repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/jpeg_save_options_data.py
JpegSaveOptionsData.metafile_rendering_options
python
def metafile_rendering_options(self): return self._metafile_rendering_options
Gets the metafile_rendering_options of this JpegSaveOptionsData. # noqa: E501 Gets or sets the metafile rendering options. # noqa: E501 :return: The metafile_rendering_options of this JpegSaveOptionsData. # noqa: E501 :rtype: MetafileRenderingOptionsData
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/jpeg_save_options_data.py#L610-L618
import pprint import re import datetime import six import json class JpegSaveOptionsData(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'allow_embedding_post_script_fonts': 'bool', 'custom_time_zone_info_data': 'TimeZoneInfoData', 'dml3_d_effects_rendering_mode': 'str', 'dml_effects_rendering_mode': 'str', 'dml_rendering_mode': 'str', 'file_name': 'str', 'flat_opc_xml_mapping_only': 'bool', 'iml_rendering_mode': 'str', 'save_format': 'str', 'update_created_time_property': 'bool', 'update_fields': 'bool', 'update_last_printed_property': 'bool', 'update_last_saved_time_property': 'bool', 'update_sdt_content': 'bool', 'zip_output': 'bool', 'color_mode': 'str', 'jpeg_quality': 'int', 'metafile_rendering_options': 'MetafileRenderingOptionsData', 'numeral_format': 'str', 'optimize_output': 'bool', 'page_count': 'int', 'page_index': 'int', 'horizontal_resolution': 'float', 'image_brightness': 'float', 'image_color_mode': 'str', 'image_contrast': 'float', 'paper_color': 'str', 'pixel_format': 'str', 'resolution': 'float', 'scale': 'float', 'use_anti_aliasing': 'bool', 'use_gdi_emf_renderer': 'bool', 'use_high_quality_rendering': 'bool', 'vertical_resolution': 'float' } attribute_map = { 'allow_embedding_post_script_fonts': 'AllowEmbeddingPostScriptFonts', 'custom_time_zone_info_data': 'CustomTimeZoneInfoData', 'dml3_d_effects_rendering_mode': 'Dml3DEffectsRenderingMode', 'dml_effects_rendering_mode': 'DmlEffectsRenderingMode', 'dml_rendering_mode': 'DmlRenderingMode', 'file_name': 'FileName', 'flat_opc_xml_mapping_only': 'FlatOpcXmlMappingOnly', 'iml_rendering_mode': 'ImlRenderingMode', 'save_format': 'SaveFormat', 'update_created_time_property': 'UpdateCreatedTimeProperty', 'update_fields': 'UpdateFields', 'update_last_printed_property': 'UpdateLastPrintedProperty', 'update_last_saved_time_property': 'UpdateLastSavedTimeProperty', 'update_sdt_content': 'UpdateSdtContent', 'zip_output': 'ZipOutput', 'color_mode': 'ColorMode', 'jpeg_quality': 'JpegQuality', 'metafile_rendering_options': 'MetafileRenderingOptions', 'numeral_format': 'NumeralFormat', 'optimize_output': 'OptimizeOutput', 'page_count': 'PageCount', 'page_index': 'PageIndex', 'horizontal_resolution': 'HorizontalResolution', 'image_brightness': 'ImageBrightness', 'image_color_mode': 'ImageColorMode', 'image_contrast': 'ImageContrast', 'paper_color': 'PaperColor', 'pixel_format': 'PixelFormat', 'resolution': 'Resolution', 'scale': 'Scale', 'use_anti_aliasing': 'UseAntiAliasing', 'use_gdi_emf_renderer': 'UseGdiEmfRenderer', 'use_high_quality_rendering': 'UseHighQualityRendering', 'vertical_resolution': 'VerticalResolution' } def __init__(self, allow_embedding_post_script_fonts=None, custom_time_zone_info_data=None, dml3_d_effects_rendering_mode=None, dml_effects_rendering_mode=None, dml_rendering_mode=None, file_name=None, flat_opc_xml_mapping_only=None, iml_rendering_mode=None, save_format=None, update_created_time_property=None, update_fields=None, update_last_printed_property=None, update_last_saved_time_property=None, update_sdt_content=None, zip_output=None, color_mode=None, jpeg_quality=None, metafile_rendering_options=None, numeral_format=None, optimize_output=None, page_count=None, page_index=None, horizontal_resolution=None, image_brightness=None, image_color_mode=None, image_contrast=None, paper_color=None, pixel_format=None, resolution=None, scale=None, use_anti_aliasing=None, use_gdi_emf_renderer=None, use_high_quality_rendering=None, vertical_resolution=None): self._allow_embedding_post_script_fonts = None self._custom_time_zone_info_data = None self._dml3_d_effects_rendering_mode = None self._dml_effects_rendering_mode = None self._dml_rendering_mode = None self._file_name = None self._flat_opc_xml_mapping_only = None self._iml_rendering_mode = None self._save_format = None self._update_created_time_property = None self._update_fields = None self._update_last_printed_property = None self._update_last_saved_time_property = None self._update_sdt_content = None self._zip_output = None self._color_mode = None self._jpeg_quality = None self._metafile_rendering_options = None self._numeral_format = None self._optimize_output = None self._page_count = None self._page_index = None self._horizontal_resolution = None self._image_brightness = None self._image_color_mode = None self._image_contrast = None self._paper_color = None self._pixel_format = None self._resolution = None self._scale = None self._use_anti_aliasing = None self._use_gdi_emf_renderer = None self._use_high_quality_rendering = None self._vertical_resolution = None self.discriminator = None if allow_embedding_post_script_fonts is not None: self.allow_embedding_post_script_fonts = allow_embedding_post_script_fonts if custom_time_zone_info_data is not None: self.custom_time_zone_info_data = custom_time_zone_info_data if dml3_d_effects_rendering_mode is not None: self.dml3_d_effects_rendering_mode = dml3_d_effects_rendering_mode if dml_effects_rendering_mode is not None: self.dml_effects_rendering_mode = dml_effects_rendering_mode if dml_rendering_mode is not None: self.dml_rendering_mode = dml_rendering_mode if file_name is not None: self.file_name = file_name if flat_opc_xml_mapping_only is not None: self.flat_opc_xml_mapping_only = flat_opc_xml_mapping_only if iml_rendering_mode is not None: self.iml_rendering_mode = iml_rendering_mode if save_format is not None: self.save_format = save_format if update_created_time_property is not None: self.update_created_time_property = update_created_time_property if update_fields is not None: self.update_fields = update_fields if update_last_printed_property is not None: self.update_last_printed_property = update_last_printed_property if update_last_saved_time_property is not None: self.update_last_saved_time_property = update_last_saved_time_property if update_sdt_content is not None: self.update_sdt_content = update_sdt_content if zip_output is not None: self.zip_output = zip_output if color_mode is not None: self.color_mode = color_mode if jpeg_quality is not None: self.jpeg_quality = jpeg_quality if metafile_rendering_options is not None: self.metafile_rendering_options = metafile_rendering_options if numeral_format is not None: self.numeral_format = numeral_format if optimize_output is not None: self.optimize_output = optimize_output if page_count is not None: self.page_count = page_count if page_index is not None: self.page_index = page_index if horizontal_resolution is not None: self.horizontal_resolution = horizontal_resolution if image_brightness is not None: self.image_brightness = image_brightness if image_color_mode is not None: self.image_color_mode = image_color_mode if image_contrast is not None: self.image_contrast = image_contrast if paper_color is not None: self.paper_color = paper_color if pixel_format is not None: self.pixel_format = pixel_format if resolution is not None: self.resolution = resolution if scale is not None: self.scale = scale if use_anti_aliasing is not None: self.use_anti_aliasing = use_anti_aliasing if use_gdi_emf_renderer is not None: self.use_gdi_emf_renderer = use_gdi_emf_renderer if use_high_quality_rendering is not None: self.use_high_quality_rendering = use_high_quality_rendering if vertical_resolution is not None: self.vertical_resolution = vertical_resolution @property def allow_embedding_post_script_fonts(self): return self._allow_embedding_post_script_fonts @allow_embedding_post_script_fonts.setter def allow_embedding_post_script_fonts(self, allow_embedding_post_script_fonts): self._allow_embedding_post_script_fonts = allow_embedding_post_script_fonts @property def custom_time_zone_info_data(self): return self._custom_time_zone_info_data @custom_time_zone_info_data.setter def custom_time_zone_info_data(self, custom_time_zone_info_data): self._custom_time_zone_info_data = custom_time_zone_info_data @property def dml3_d_effects_rendering_mode(self): return self._dml3_d_effects_rendering_mode @dml3_d_effects_rendering_mode.setter def dml3_d_effects_rendering_mode(self, dml3_d_effects_rendering_mode): allowed_values = ["Basic", "Advanced"] if not dml3_d_effects_rendering_mode.isdigit(): if dml3_d_effects_rendering_mode not in allowed_values: raise ValueError( "Invalid value for `dml3_d_effects_rendering_mode` ({0}), must be one of {1}" .format(dml3_d_effects_rendering_mode, allowed_values)) self._dml3_d_effects_rendering_mode = dml3_d_effects_rendering_mode else: self._dml3_d_effects_rendering_mode = allowed_values[int(dml3_d_effects_rendering_mode) if six.PY3 else long(dml3_d_effects_rendering_mode)] @property def dml_effects_rendering_mode(self): return self._dml_effects_rendering_mode @dml_effects_rendering_mode.setter def dml_effects_rendering_mode(self, dml_effects_rendering_mode): self._dml_effects_rendering_mode = dml_effects_rendering_mode @property def dml_rendering_mode(self): return self._dml_rendering_mode @dml_rendering_mode.setter def dml_rendering_mode(self, dml_rendering_mode): self._dml_rendering_mode = dml_rendering_mode @property def file_name(self): return self._file_name @file_name.setter def file_name(self, file_name): self._file_name = file_name @property def flat_opc_xml_mapping_only(self): return self._flat_opc_xml_mapping_only @flat_opc_xml_mapping_only.setter def flat_opc_xml_mapping_only(self, flat_opc_xml_mapping_only): self._flat_opc_xml_mapping_only = flat_opc_xml_mapping_only @property def iml_rendering_mode(self): return self._iml_rendering_mode @iml_rendering_mode.setter def iml_rendering_mode(self, iml_rendering_mode): self._iml_rendering_mode = iml_rendering_mode @property def save_format(self): return self._save_format @save_format.setter def save_format(self, save_format): self._save_format = save_format @property def update_created_time_property(self): return self._update_created_time_property @update_created_time_property.setter def update_created_time_property(self, update_created_time_property): self._update_created_time_property = update_created_time_property @property def update_fields(self): return self._update_fields @update_fields.setter def update_fields(self, update_fields): self._update_fields = update_fields @property def update_last_printed_property(self): return self._update_last_printed_property @update_last_printed_property.setter def update_last_printed_property(self, update_last_printed_property): self._update_last_printed_property = update_last_printed_property @property def update_last_saved_time_property(self): return self._update_last_saved_time_property @update_last_saved_time_property.setter def update_last_saved_time_property(self, update_last_saved_time_property): self._update_last_saved_time_property = update_last_saved_time_property @property def update_sdt_content(self): return self._update_sdt_content @update_sdt_content.setter def update_sdt_content(self, update_sdt_content): self._update_sdt_content = update_sdt_content @property def zip_output(self): return self._zip_output @zip_output.setter def zip_output(self, zip_output): self._zip_output = zip_output @property def color_mode(self): return self._color_mode @color_mode.setter def color_mode(self, color_mode): self._color_mode = color_mode @property def jpeg_quality(self): return self._jpeg_quality @jpeg_quality.setter def jpeg_quality(self, jpeg_quality): self._jpeg_quality = jpeg_quality @property
MIT License
dirty-cat/dirty_cat
dirty_cat/super_vectorizer.py
SuperVectorizer._auto_cast
python
def _auto_cast(X: pd.DataFrame) -> pd.DataFrame: from pandas.core.dtypes.base import ExtensionDtype for col in X.columns: dtype = X[col].dtype contains_missing: bool = _has_missing_values(X[col]) if not contains_missing: X[col] = X[col].convert_dtypes() if issubclass(dtype.__class__, ExtensionDtype): try: X[col] = X[col].astype(dtype.type, errors='ignore') except (TypeError, ValueError): pass if contains_missing: if pd.api.types.is_numeric_dtype(X[col]): X[col] = X[col].astype(np.float64) X[col].fillna(value=np.nan, inplace=True) return X
Takes a pandas DataFrame and tries to convert its columns to the best possible data type. Parameters ---------- X: pd.DataFrame Input data as a pandas DataFrame. Returns ------- array The same array, with its columns casted to the best possible data type. Columns with missing values won't be modified.
https://github.com/dirty-cat/dirty_cat/blob/3aeb866a11b869a360155043ede6868c7dfb8306/dirty_cat/super_vectorizer.py#L172-L212
import sklearn import numpy as np import pandas as pd from warnings import warn from typing import Union, Optional, List from distutils.version import LooseVersion from sklearn.base import BaseEstimator from sklearn.compose import ColumnTransformer from sklearn.preprocessing import OneHotEncoder from dirty_cat import GapEncoder _sklearn_loose_version = LooseVersion(sklearn.__version__) def _has_missing_values(df: Union[pd.DataFrame, pd.Series]) -> bool: return any(df.isnull()) def _replace_missing_in_col(df: pd.Series, value: str = "missing") -> pd.Series: dtype_name = df.dtype.name if dtype_name == 'category' and (value not in df.cat.categories): df = df.cat.add_categories(value) df = df.fillna(value=value) return df class SuperVectorizer(ColumnTransformer): _required_parameters = [] OptionalEstimator = Optional[Union[BaseEstimator, str]] def __init__(self, *, cardinality_threshold: int = 40, low_card_cat_transformer: Optional[Union[BaseEstimator, str]] = OneHotEncoder(), high_card_cat_transformer: Optional[Union[BaseEstimator, str]] = GapEncoder(n_components=30), numerical_transformer: Optional[Union[BaseEstimator, str]] = None, datetime_transformer: Optional[Union[BaseEstimator, str]] = None, auto_cast: bool = True, impute_missing: str = 'auto', remainder: str = 'passthrough', sparse_threshold: float = 0.3, n_jobs: int = None, transformer_weights=None, verbose: bool = False, ): super().__init__(transformers=[]) self.cardinality_threshold = cardinality_threshold self.low_card_cat_transformer = low_card_cat_transformer self.high_card_cat_transformer = high_card_cat_transformer self.numerical_transformer = numerical_transformer self.datetime_transformer = datetime_transformer self.auto_cast = auto_cast self.impute_missing = impute_missing self.remainder = remainder self.sparse_threshold = sparse_threshold self.n_jobs = n_jobs self.transformer_weights = transformer_weights self.verbose = verbose @staticmethod
BSD 3-Clause New or Revised License
interactive-sonification/sc3nb
src/sc3nb/sc_objects/buffer.py
Buffer.zero
python
def zero(self) -> "Buffer": if not self._allocated: raise RuntimeError("Buffer object is not initialized!") self._server.msg(BufferCommand.ZERO, [self._bufnum], bundle=True) return self
Set buffer data to zero. Returns ------- self : Buffer the created Buffer object Raises ------ RuntimeError If the Buffer is not allocated yet.
https://github.com/interactive-sonification/sc3nb/blob/7d7fbd9178fe804c5c8ddd0ddd4075579221b7c4/src/sc3nb/sc_objects/buffer.py#L496-L512
import os import warnings from enum import Enum, unique from pathlib import Path from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Any, List, NamedTuple, Optional, Sequence, Union import numpy as np import scipy.io.wavfile as wavfile import sc3nb from sc3nb.sc_objects.node import Synth from sc3nb.sc_objects.synthdef import SynthDef if TYPE_CHECKING: import pya from sc3nb.sc_objects.server import SCServer @unique class BufferReply(str, Enum): INFO = "/b_info" @unique class BufferCommand(str, Enum): ALLOC = "/b_alloc" ALLOC_READ = "/b_allocRead" ALLOC_READ_CHANNEL = "/b_allocReadChannel" READ = "/b_read" READ_CHANNEL = "/b_readChannel" WRITE = "/b_write" FREE = "/b_free" ZERO = "/b_zero" SET = "/b_set" SETN = "/b_setn" FILL = "/b_fill" GEN = "/b_gen" CLOSE = "/b_close" QUERY = "/b_query" GET = "/b_get" GETN = "/b_getn" @unique class BufferAllocationMode(str, Enum): FILE = "file" ALLOC = "alloc" DATA = "data" EXISTING = "existing" COPY = "copy" NONE = "none" class BufferInfo(NamedTuple): bufnum: int num_frames: int num_channels: int sample_rate: float class Buffer: def __init__( self, bufnum: Optional[int] = None, server: Optional["SCServer"] = None ) -> None: self._server = server or sc3nb.SC.get_default().server self._bufnum_set_manually = bufnum is not None self._bufnum = bufnum self._sr = None self._channels = None self._samples = None self._alloc_mode = BufferAllocationMode.NONE self._allocated = False self._path = None self._synth_def = None self._synth = None def read( self, path: str, starting_frame: int = 0, num_frames: int = -1, channels: Optional[Union[int, Sequence[int]]] = None, ) -> "Buffer": if self._allocated: raise RuntimeError("Buffer object is already initialized!") if self._bufnum is None: self._bufnum = self._server.buffer_ids.allocate(num=1)[0] self._alloc_mode = BufferAllocationMode.FILE self._path = Path(path).resolve(strict=True) self._sr, data = wavfile.read( self._path ) server_sr = self._server.nominal_sr if self._sr != server_sr: warnings.warn( f"Sample rate of file ({self._sr}) does not " f"match the SC Server sample rate ({server_sr})" ) self._samples = data.shape[0] if num_frames <= 0 else num_frames if channels is None: channels = [0] if len(data.shape) == 1 else range(data.shape[1]) elif isinstance(channels, int): channels = [channels] self._channels = len(channels) self._server.msg( BufferCommand.ALLOC_READ_CHANNEL, [self._bufnum, str(self._path), starting_frame, num_frames, *channels], bundle=True, ) self._allocated = True return self def alloc(self, size: int, sr: int = 44100, channels: int = 1) -> "Buffer": if self._allocated: raise RuntimeError("Buffer object is already initialized!") if self._bufnum is None: self._bufnum = self._server.buffer_ids.allocate(num=1)[0] self._sr = sr self._alloc_mode = BufferAllocationMode.ALLOC self._channels = channels self._samples = int(size) self._server.msg( BufferCommand.ALLOC, [self._bufnum, size, channels], bundle=True ) self._allocated = True return self def load_data( self, data: np.ndarray, sr: int = 44100, mode: str = "file", sync: bool = True, ) -> "Buffer": if self._allocated: raise RuntimeError("Buffer object is already initialized!") if self._bufnum is None: self._bufnum = self._server.buffer_ids.allocate(num=1)[0] self._alloc_mode = BufferAllocationMode.DATA self._sr = sr self._samples = data.shape[0] self._channels = 1 if len(data.shape) == 1 else data.shape[1] if mode == "file": tempfile = NamedTemporaryFile(delete=False) try: wavfile.write(tempfile, self._sr, data) finally: tempfile.close() self._server.msg( BufferCommand.ALLOC_READ, [self._bufnum, tempfile.name], await_reply=True, ) if os.path.exists(tempfile.name): os.remove(tempfile.name) elif mode == "osc": self._server.msg( BufferCommand.ALLOC, [self._bufnum, data.shape[0]], bundle=True ) blocksize = 1000 if self._channels > 1: data = data.reshape(-1, 1) if data.shape[0] < blocksize: self._server.msg( BufferCommand.SETN, [self._bufnum, [0, data.shape[0], data.tolist()]], bundle=True, ) else: splitdata = np.array_split(data, data.shape[0] / blocksize) for i, chunk in enumerate(splitdata): self._server.msg( BufferCommand.SETN, [self._bufnum, i * blocksize, chunk.shape[0], chunk.tolist()], await_reply=False, bundle=True, ) if sync: self._server.sync() else: raise ValueError(f"Unsupported mode '{mode}'.") self._allocated = True return self def load_collection( self, data: np.ndarray, mode: str = "file", sr: int = 44100 ) -> "Buffer": return self.load_data(data, sr=sr, mode=mode) def load_asig(self, asig: "pya.Asig", mode: str = "file") -> "Buffer": if self._allocated: raise RuntimeError("Buffer object is already initialized!") return self.load_data(asig.sig, sr=asig.sr, mode=mode) def use_existing(self, bufnum: int, sr: int = 44100) -> "Buffer": if self._allocated: raise RuntimeError("Buffer object is already initialized!") self._alloc_mode = BufferAllocationMode.EXISTING self._sr = sr self._bufnum = bufnum self._allocated = True info = self.query() self._samples = info.num_frames self._channels = info.num_channels return self def copy_existing(self, buffer: "Buffer") -> "Buffer": if self._allocated: raise RuntimeError("Buffer object is already initialized!") if not buffer.allocated: raise RuntimeError("Other Buffer object is not initialized!") if self._server is buffer._server: self.alloc(buffer.samples, buffer.sr, buffer.channels) self.gen_copy(buffer, 0, 0, -1) else: self._sr = buffer.sr tempfile = NamedTemporaryFile(delete=False) tempfile.close() try: buffer.write(tempfile.name) self.read(tempfile.name) finally: if os.path.exists(tempfile.name): os.remove(tempfile.name) self._alloc_mode = BufferAllocationMode.COPY return self def fill(self, start: int = 0, count: int = 0, value: float = 0) -> "Buffer": if not self._allocated: raise RuntimeError("Buffer object is not initialized!") values = [start, count, value] if not isinstance(start, list) else start self._server.msg(BufferCommand.FILL, [self._bufnum] + values, bundle=True) return self def gen(self, command: str, args: List[Any]) -> "Buffer": if not self._allocated: raise RuntimeError("Buffer object is not initialized!") self._server.msg(BufferCommand.GEN, [self._bufnum, command] + args, bundle=True) return self
MIT License
cstein/neb
neb/bond.py
Bond.getNbrAtomIdx
python
def getNbrAtomIdx(self, value): if self._id1 == value: return self._id2 if self._id2 == value: return self._id1 raise ValueError("The atom index {0:d} is not in the bond.".format(value))
Returns the neighboring atom index in the bond
https://github.com/cstein/neb/blob/8bf59ef16819841d0839d6875c5935a681e0c405/neb/bond.py#L44-L48
import numpy class Bond(object): def __init__(self, id1, id2): self._id1 = id1 self._id2 = id2 assert self._id1 != -1 assert self._id2 != -1 assert self._id1 != self._id2, "indices cannot refer to same atom." def sharesAtom(self, other): if self == other: return -1 if self._id1 == other._id1 and self._id2 != other._id2: return self._id1 if self._id1 == other._id2 and self._id2 != other._id1: return self._id1 if self._id2 == other._id1 and self._id1 != other._id2: return self._id2 if self._id2 == other._id2 and self._id1 != other._id1: return self._id2 return -1
MIT License
pylast/pylast
src/pylast/__init__.py
_Network.scrobble_many
python
def scrobble_many(self, tracks): tracks_to_scrobble = tracks[:50] if len(tracks) > 50: remaining_tracks = tracks[50:] else: remaining_tracks = None params = {} for i in range(len(tracks_to_scrobble)): params["artist[%d]" % i] = tracks_to_scrobble[i]["artist"] params["track[%d]" % i] = tracks_to_scrobble[i]["title"] additional_args = ( "timestamp", "album", "album_artist", "context", "stream_id", "track_number", "mbid", "duration", ) args_map_to = { "album_artist": "albumArtist", "track_number": "trackNumber", "stream_id": "streamID", } for arg in additional_args: if arg in tracks_to_scrobble[i] and tracks_to_scrobble[i][arg]: if arg in args_map_to: maps_to = args_map_to[arg] else: maps_to = arg params["%s[%d]" % (maps_to, i)] = tracks_to_scrobble[i][arg] _Request(self, "track.scrobble", params).execute() if remaining_tracks: self.scrobble_many(remaining_tracks)
Used to scrobble a batch of tracks at once. The parameter tracks is a sequence of dicts per track containing the keyword arguments as if passed to the scrobble() method.
https://github.com/pylast/pylast/blob/3db88e98ce77bc86bf533389b06a028a75251409/src/pylast/__init__.py#L588-L636
import collections import hashlib import html.entities import logging import os import shelve import ssl import tempfile import time import xml.dom from http.client import HTTPSConnection from urllib.parse import quote_plus from xml.dom import Node, minidom import pkg_resources __author__ = "Amr Hassan, hugovk, Mice Pápai" __copyright__ = "Copyright (C) 2008-2010 Amr Hassan, 2013-2021 hugovk, 2017 Mice Pápai" __license__ = "apache2" __email__ = "amr.hassan@gmail.com" __version__ = pkg_resources.get_distribution(__name__).version STATUS_INVALID_SERVICE = 2 STATUS_INVALID_METHOD = 3 STATUS_AUTH_FAILED = 4 STATUS_INVALID_FORMAT = 5 STATUS_INVALID_PARAMS = 6 STATUS_INVALID_RESOURCE = 7 STATUS_OPERATION_FAILED = 8 STATUS_INVALID_SK = 9 STATUS_INVALID_API_KEY = 10 STATUS_OFFLINE = 11 STATUS_SUBSCRIBERS_ONLY = 12 STATUS_INVALID_SIGNATURE = 13 STATUS_TOKEN_UNAUTHORIZED = 14 STATUS_TOKEN_EXPIRED = 15 STATUS_TEMPORARILY_UNAVAILABLE = 16 STATUS_LOGIN_REQUIRED = 17 STATUS_TRIAL_EXPIRED = 18 STATUS_NOT_ENOUGH_CONTENT = 20 STATUS_NOT_ENOUGH_MEMBERS = 21 STATUS_NOT_ENOUGH_FANS = 22 STATUS_NOT_ENOUGH_NEIGHBOURS = 23 STATUS_NO_PEAK_RADIO = 24 STATUS_RADIO_NOT_FOUND = 25 STATUS_API_KEY_SUSPENDED = 26 STATUS_DEPRECATED = 27 STATUS_RATE_LIMIT_EXCEEDED = 29 PERIOD_OVERALL = "overall" PERIOD_7DAYS = "7day" PERIOD_1MONTH = "1month" PERIOD_3MONTHS = "3month" PERIOD_6MONTHS = "6month" PERIOD_12MONTHS = "12month" DOMAIN_ENGLISH = 0 DOMAIN_GERMAN = 1 DOMAIN_SPANISH = 2 DOMAIN_FRENCH = 3 DOMAIN_ITALIAN = 4 DOMAIN_POLISH = 5 DOMAIN_PORTUGUESE = 6 DOMAIN_SWEDISH = 7 DOMAIN_TURKISH = 8 DOMAIN_RUSSIAN = 9 DOMAIN_JAPANESE = 10 DOMAIN_CHINESE = 11 SIZE_SMALL = 0 SIZE_MEDIUM = 1 SIZE_LARGE = 2 SIZE_EXTRA_LARGE = 3 SIZE_MEGA = 4 IMAGES_ORDER_POPULARITY = "popularity" IMAGES_ORDER_DATE = "dateadded" SCROBBLE_SOURCE_USER = "P" SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST = "R" SCROBBLE_SOURCE_PERSONALIZED_BROADCAST = "E" SCROBBLE_SOURCE_LASTFM = "L" SCROBBLE_SOURCE_UNKNOWN = "U" SCROBBLE_MODE_PLAYED = "" SCROBBLE_MODE_LOVED = "L" SCROBBLE_MODE_BANNED = "B" SCROBBLE_MODE_SKIPPED = "S" DELAY_TIME = 0.2 SSL_CONTEXT = ssl.create_default_context() logger = logging.getLogger(__name__) logging.getLogger(__name__).addHandler(logging.NullHandler()) class _Network: def __init__( self, name, homepage, ws_server, api_key, api_secret, session_key, username, password_hash, domain_names, urls, token=None, ): self.name = name self.homepage = homepage self.ws_server = ws_server self.api_key = api_key self.api_secret = api_secret self.session_key = session_key self.username = username self.password_hash = password_hash self.domain_names = domain_names self.urls = urls self.cache_backend = None self.proxy_enabled = False self.proxy = None self.last_call_time = 0 self.limit_rate = False if token and not self.session_key: sk_gen = SessionKeyGenerator(self) self.session_key, self.username = sk_gen.get_web_auth_session_key_username( url=None, token=token ) if ( (self.api_key and self.api_secret) and not self.session_key and (self.username and self.password_hash) ): sk_gen = SessionKeyGenerator(self) self.session_key = sk_gen.get_session_key(self.username, self.password_hash) def __str__(self): return "%s Network" % self.name def get_artist(self, artist_name): return Artist(artist_name, self) def get_track(self, artist, title): return Track(artist, title, self) def get_album(self, artist, title): return Album(artist, title, self) def get_authenticated_user(self): return AuthenticatedUser(self) def get_country(self, country_name): return Country(country_name, self) def get_user(self, username): return User(username, self) def get_tag(self, name): return Tag(name, self) def _get_language_domain(self, domain_language): if domain_language in self.domain_names: return self.domain_names[domain_language] def _get_url(self, domain, url_type): return "https://{}/{}".format( self._get_language_domain(domain), self.urls[url_type] ) def _get_ws_auth(self): return self.api_key, self.api_secret, self.session_key def _delay_call(self): now = time.time() time_since_last = now - self.last_call_time if time_since_last < DELAY_TIME: time.sleep(DELAY_TIME - time_since_last) self.last_call_time = now def get_top_artists(self, limit=None, cacheable=True): params = {} if limit: params["limit"] = limit doc = _Request(self, "chart.getTopArtists", params).execute(cacheable) return _extract_top_artists(doc, self) def get_top_tracks(self, limit=None, cacheable=True): params = {} if limit: params["limit"] = limit doc = _Request(self, "chart.getTopTracks", params).execute(cacheable) seq = [] for node in doc.getElementsByTagName("track"): title = _extract(node, "name") artist = _extract(node, "name", 1) track = Track(artist, title, self) weight = _number(_extract(node, "playcount")) seq.append(TopItem(track, weight)) return seq def get_top_tags(self, limit=None, cacheable=True): doc = _Request(self, "tag.getTopTags").execute(cacheable) seq = [] for node in doc.getElementsByTagName("tag"): if limit and len(seq) >= limit: break tag = Tag(_extract(node, "name"), self) weight = _number(_extract(node, "count")) seq.append(TopItem(tag, weight)) return seq def get_geo_top_artists(self, country, limit=None, cacheable=True): params = {"country": country} if limit: params["limit"] = limit doc = _Request(self, "geo.getTopArtists", params).execute(cacheable) return _extract_top_artists(doc, self) def get_geo_top_tracks(self, country, location=None, limit=None, cacheable=True): params = {"country": country} if location: params["location"] = location if limit: params["limit"] = limit doc = _Request(self, "geo.getTopTracks", params).execute(cacheable) tracks = doc.getElementsByTagName("track") seq = [] for track in tracks: title = _extract(track, "name") artist = _extract(track, "name", 1) listeners = _extract(track, "listeners") seq.append(TopItem(Track(artist, title, self), listeners)) return seq def enable_proxy(self, host, port): self.proxy = [host, _number(port)] self.proxy_enabled = True def disable_proxy(self): self.proxy_enabled = False def is_proxy_enabled(self): return self.proxy_enabled def _get_proxy(self): return self.proxy def enable_rate_limit(self): self.limit_rate = True def disable_rate_limit(self): self.limit_rate = False def is_rate_limited(self): return self.limit_rate def enable_caching(self, file_path=None): if not file_path: self.cache_backend = _ShelfCacheBackend.create_shelf() return self.cache_backend = _ShelfCacheBackend(file_path) def disable_caching(self): self.cache_backend = None def is_caching_enabled(self): return not (self.cache_backend is None) def _get_cache_backend(self): return self.cache_backend def search_for_album(self, album_name): return AlbumSearch(album_name, self) def search_for_artist(self, artist_name): return ArtistSearch(artist_name, self) def search_for_track(self, artist_name, track_name): return TrackSearch(artist_name, track_name, self) def get_track_by_mbid(self, mbid): params = {"mbid": mbid} doc = _Request(self, "track.getInfo", params).execute(True) return Track(_extract(doc, "name", 1), _extract(doc, "name"), self) def get_artist_by_mbid(self, mbid): params = {"mbid": mbid} doc = _Request(self, "artist.getInfo", params).execute(True) return Artist(_extract(doc, "name"), self) def get_album_by_mbid(self, mbid): params = {"mbid": mbid} doc = _Request(self, "album.getInfo", params).execute(True) return Album(_extract(doc, "artist"), _extract(doc, "name"), self) def update_now_playing( self, artist, title, album=None, album_artist=None, duration=None, track_number=None, mbid=None, context=None, ): params = {"track": title, "artist": artist} if album: params["album"] = album if album_artist: params["albumArtist"] = album_artist if context: params["context"] = context if track_number: params["trackNumber"] = track_number if mbid: params["mbid"] = mbid if duration: params["duration"] = duration _Request(self, "track.updateNowPlaying", params).execute() def scrobble( self, artist, title, timestamp, album=None, album_artist=None, track_number=None, duration=None, stream_id=None, context=None, mbid=None, ): return self.scrobble_many( ( { "artist": artist, "title": title, "timestamp": timestamp, "album": album, "album_artist": album_artist, "track_number": track_number, "duration": duration, "stream_id": stream_id, "context": context, "mbid": mbid, }, ) )
Apache License 2.0
app-sre/qontract-reconcile
reconcile/queries.py
get_gitlab_instance
python
def get_gitlab_instance(): gqlapi = gql.get_api() return gqlapi.query(GITLAB_INSTANCES_QUERY)['instances'][0]
Returns a single GitLab instance
https://github.com/app-sre/qontract-reconcile/blob/67af2226f58f538b626f51700109ff5074911160/reconcile/queries.py#L184-L188
import logging import itertools from textwrap import indent from jinja2 import Template from reconcile.utils import gql APP_INTERFACE_SETTINGS_QUERY = """ { settings: app_interface_settings_v1 { vault kubeBinary mergeRequestGateway saasDeployJobTemplate hashLength dependencies { type services { name } } credentials { name secret { path field } } sqlQuery { imageRepository pullSecret { path version labels annotations type } } } } """ def get_app_interface_settings(): gqlapi = gql.get_api() settings = gqlapi.query(APP_INTERFACE_SETTINGS_QUERY)['settings'] if settings: return settings[0] return None APP_INTERFACE_EMAILS_QUERY = """ { emails: app_interface_emails_v1 { name subject to { aliases services { serviceOwners { email } } clusters { name } namespaces { name } aws_accounts { accountOwners { email } } roles { users { org_username } } users { org_username } } body } } """ def get_app_interface_emails(): gqlapi = gql.get_api() return gqlapi.query(APP_INTERFACE_EMAILS_QUERY)['emails'] CREDENTIALS_REQUESTS_QUERY = """ { credentials_requests: credentials_requests_v1 { name description user { org_username public_gpg_key } credentials } } """ def get_credentials_requests(): gqlapi = gql.get_api() return gqlapi.query(CREDENTIALS_REQUESTS_QUERY)['credentials_requests'] def get_integrations(): gqlapi = gql.get_api() return gqlapi.query(gql.INTEGRATIONS_QUERY)['integrations'] JENKINS_INSTANCES_QUERY = """ { instances: jenkins_instances_v1 { name serverUrl token { path field } previousUrls plugins deleteMethod managedProjects buildsCleanupRules { name keep_hours } } } """ def get_jenkins_instances(): gqlapi = gql.get_api() return gqlapi.query(JENKINS_INSTANCES_QUERY)['instances'] def get_jenkins_instances_previous_urls(): instances = get_jenkins_instances() all_previous_urls = [] for instance in instances: previous_urls = instance.get('previousUrls') if previous_urls: all_previous_urls.extend(previous_urls) return all_previous_urls GITLAB_INSTANCES_QUERY = """ { instances: gitlabinstance_v1 { url token { path field } managedGroups projectRequests { group projects } sslVerify } } """
Apache License 2.0
ywangd/pybufrkit
pybufrkit/coder.py
Coder.process_operator_descriptor
python
def process_operator_descriptor(self, state, bit_operator, descriptor): operator_code, operand_value = descriptor.operator_code, descriptor.operand_value if operator_code == 201: state.nbits_offset = (operand_value - 128) if operand_value else 0 elif operator_code == 202: state.scale_offset = (operand_value - 128) if operand_value else 0 elif operator_code == 203: if operand_value == 255: state.nbits_of_new_refval = 0 else: state.nbits_of_new_refval = operand_value if operand_value == 0: state.new_refvals = {} elif operator_code == 204: if operand_value == 0: state.nbits_of_associated.pop() else: state.nbits_of_associated.append(operand_value) elif operator_code == 205: self.process_string(state, bit_operator, descriptor, operand_value) elif operator_code == 206: state.nbits_of_skipped_local_descriptor = operand_value elif operator_code == 207: if operand_value == 0: state.bsr_modifier = BSRModifier( nbits_increment=0, scale_increment=0, refval_factor=1 ) else: state.bsr_modifier = BSRModifier( nbits_increment=(10 * operand_value + 2) // 3, scale_increment=operand_value, refval_factor=10 ** operand_value, ) elif operator_code == 208: state.new_nbytes = operand_value elif operator_code == 221: state.data_not_present_count = operand_value elif operator_code in (222, 223, 224, 225, 232): if operand_value == 0: state.bitmap_definition_state = BITMAP_INDICATOR state.mark_back_reference_boundary() self.process_constant(state, bit_operator, descriptor, 0) if operator_code == 222: state.status_qa_info_follows = QA_INFO_WAITING else: self.process_marker_operator_descriptor(state, bit_operator, descriptor) elif operator_code == 235: state.cancel_all_back_references() elif operator_code == 236: self.process_constant(state, bit_operator, descriptor, 0) elif operator_code == 237: if operand_value == 0: state.recall_bitmap() else: if state.most_recent_bitmap_is_for_reuse: state.cancel_bitmap() self.process_constant(state, bit_operator, descriptor, 0) else: raise NotImplementedError('Operator Descriptor {} not implemented'.format(descriptor))
Process Operator Descriptor. :param state: :param bit_operator: :type descriptor: OperatorDescriptor
https://github.com/ywangd/pybufrkit/blob/6fc51f1b9c5e1fefcec872667c7466bd61679a76/pybufrkit/coder.py#L508-L592
from __future__ import absolute_import from __future__ import print_function import logging import abc import functools from collections import namedtuple from six.moves import range, zip from pybufrkit.constants import (DEFAULT_TABLES_DIR, UNITS_CODE_TABLE, UNITS_FLAG_TABLE, UNITS_STRING) from pybufrkit.errors import PyBufrKitError, UnknownDescriptor from pybufrkit.bufr import SectionConfigurer from pybufrkit.descriptors import (ElementDescriptor, FixedReplicationDescriptor, DelayedReplicationDescriptor, OperatorDescriptor, SequenceDescriptor, AssociatedDescriptor, MarkerDescriptor, SkippedLocalDescriptor) BITMAP_NA = 0 BITMAP_INDICATOR = 1 BITMAP_WAITING_FOR_BIT = 4 BITMAP_BIT_COUNTING = 5 QA_INFO_NA = 0 QA_INFO_WAITING = 1 QA_INFO_PROCESSING = 2 BSRModifier = namedtuple('BSRModifier', ['nbits_increment', 'scale_increment', 'refval_factor']) log = logging.getLogger(__file__) class AuditedList(list): def append(self, p_object): log.debug('{!r}'.format(p_object)) super(AuditedList, self).append(p_object) def __getitem__(self, item): value = super(AuditedList, self).__getitem__(item) log.debug('{!r}'.format(value)) return value class CoderState(object): def __init__(self, is_compressed, n_subsets, decoded_values_all_subsets=None): self.is_compressed = is_compressed self.n_subsets = n_subsets self.idx_subset = 0 if is_compressed: self.decoded_descriptors_all_subsets = [[]] * n_subsets self.bitmap_links_all_subsets = [{}] * n_subsets else: self.decoded_descriptors_all_subsets = [[] for _ in range(n_subsets)] self.bitmap_links_all_subsets = [{} for _ in range(n_subsets)] self.decoded_descriptors = [] if n_subsets == 0 else self.decoded_descriptors_all_subsets[0] self.bitmap_links = [] if n_subsets == 0 else self.bitmap_links_all_subsets[0] if logging.root.level == logging.getLevelName('DEBUG'): if decoded_values_all_subsets: self.decoded_values_all_subsets = [AuditedList(vals) for vals in decoded_values_all_subsets] else: self.decoded_values_all_subsets = [AuditedList() for _ in range(n_subsets)] else: self.decoded_values_all_subsets = decoded_values_all_subsets or [[] for _ in range(n_subsets)] self.decoded_values = [] if n_subsets == 0 else self.decoded_values_all_subsets[0] self.idx_value = 0 self.nbits_offset = 0 self.scale_offset = 0 self.nbits_of_new_refval = 0 self.new_refvals = {} self.nbits_of_associated = [] self.nbits_of_skipped_local_descriptor = 0 self.bsr_modifier = BSRModifier( nbits_increment=0, scale_increment=0, refval_factor=1 ) self.new_nbytes = 0 self.data_not_present_count = 0 self.status_qa_info_follows = QA_INFO_NA self.bitmap = None self.bitmapped_descriptors = None self.bitmap_definition_state = BITMAP_NA self.most_recent_bitmap_is_for_reuse = False self.n_031031 = 0 self.next_bitmapped_descriptor = None self.back_reference_boundary = 0 self.back_referenced_descriptors = None def switch_subset_context(self, idx_subset): self.idx_subset = idx_subset self.new_refvals = {} self.decoded_descriptors = self.decoded_descriptors_all_subsets[idx_subset] self.decoded_values = self.decoded_values_all_subsets[idx_subset] self.bitmap_links = self.bitmap_links_all_subsets[idx_subset] self.idx_value = 0 def mark_back_reference_boundary(self): self.back_reference_boundary = len(self.decoded_descriptors) def recall_bitmap(self): self.next_bitmapped_descriptor = functools.partial(next, iter(self.bitmapped_descriptors)) return self.bitmap def cancel_bitmap(self): self.bitmap = None def cancel_all_back_references(self): self.back_referenced_descriptors = None self.bitmap = None self.bitmapped_descriptors = None def add_bitmap_link(self): idx_descriptor, _ = self.next_bitmapped_descriptor() self.bitmap_links[len(self.decoded_descriptors)] = idx_descriptor def get_value_for_delayed_replication_factor(self, idx): if self.is_compressed: self._assert_equal_values_of_index(idx) value = self.decoded_values_all_subsets[0][idx] else: value = self.decoded_values[idx] if value is None or value < 0: raise PyBufrKitError('Delayed replication factor must be >= 0: got ({!r})'.format(value)) return value def build_bitmapped_descriptors(self, bitmap): if not self.back_referenced_descriptors: self.back_referenced_descriptors = [] for idx in range(self.back_reference_boundary - 1, -1, -1): descriptor = self.decoded_descriptors[idx] if type(descriptor) is ElementDescriptor: self.back_referenced_descriptors.insert(0, (idx, descriptor)) if len(self.back_referenced_descriptors) == len(bitmap): break if len(self.back_referenced_descriptors) != len(bitmap): raise PyBufrKitError('Back referenced descriptors not matching defined Bitmap') self.bitmapped_descriptors = [ (idx, d) for bit, (idx, d) in zip( bitmap, self.back_referenced_descriptors ) if bit == 0 ] self.next_bitmapped_descriptor = functools.partial(next, iter(self.bitmapped_descriptors)) def _assert_equal_values_of_index(self, idx): minv, maxv = CoderState.minmax([values[idx] for values in self.decoded_values_all_subsets]) assert minv == maxv, 'Values from all subsets are NOT identical' @staticmethod def minmax(values): mn, mx = None, None for v in values: if v is not None: if mn is None or mn > v: mn = v if mx is None or mx < v: mx = v return mn, mx class Coder(object): def __init__(self, definitions_dir=None, tables_root_dir=None): self.section_configurer = SectionConfigurer(definitions_dir=definitions_dir) self.tables_root_dir = tables_root_dir or DEFAULT_TABLES_DIR @abc.abstractmethod def process(self, *args, **kwargs): @abc.abstractmethod def process_section(self, bufr_message, bit_operator, section): def process_template(self, state, bit_operator, template): self.process_members(state, bit_operator, template.members) def process_members(self, state, bit_operator, members): for member in members: member_type = type(member) log.debug('Processing {} {}'.format(member, member.name if hasattr(member, 'name') else '')) if state.data_not_present_count: state.data_not_present_count -= 1 log.debug('Data not present: {} to go'.format(state.data_not_present_count)) if member_type is ElementDescriptor: X = member.X if not (1 <= X <= 9 or X == 31): continue if state.nbits_of_new_refval and member_type is ElementDescriptor: self.process_define_new_refval(state, bit_operator, member) continue if state.nbits_of_skipped_local_descriptor: self.process_skipped_local_descriptor(state, bit_operator, member) continue if state.bitmap_definition_state != BITMAP_NA: self.process_bitmap_definition(state, bit_operator, member) if member_type is ElementDescriptor: self.process_element_descriptor(state, bit_operator, member) elif member_type is FixedReplicationDescriptor: self.process_fixed_replication_descriptor(state, bit_operator, member) elif member_type is DelayedReplicationDescriptor: self.process_delayed_replication_descriptor(state, bit_operator, member) elif member_type is OperatorDescriptor: self.process_operator_descriptor(state, bit_operator, member) elif member_type is SequenceDescriptor: self.process_sequence_descriptor(state, bit_operator, member) else: raise UnknownDescriptor('Cannot process descriptor {} of type: {}'.format( member, member_type.__name__)) def process_define_new_refval(self, state, bit_operator, descriptor): log.debug('Defining new reference value for {}'.format(descriptor)) if descriptor.unit == UNITS_STRING: raise PyBufrKitError('Cannot define new reference value for descriptor of string value') self.process_new_refval(state, bit_operator, descriptor, state.nbits_of_new_refval) def process_skipped_local_descriptor(self, state, bit_operator, descriptor): log.debug('Skipping {} bits for local descriptor {}'.format( state.nbits_of_skipped_local_descriptor, descriptor)) self.process_codeflag( state, bit_operator, SkippedLocalDescriptor(descriptor.id, state.nbits_of_skipped_local_descriptor), state.nbits_of_skipped_local_descriptor ) state.nbits_of_skipped_local_descriptor = 0 def process_bitmap_definition(self, state, bit_operator, descriptor): if state.bitmap_definition_state == BITMAP_INDICATOR: if descriptor.id == 236000: log.debug('Defining bitmap for reuse') state.most_recent_bitmap_is_for_reuse = True state.bitmap_definition_state = BITMAP_WAITING_FOR_BIT state.n_031031 = 0 elif descriptor.id == 237000: log.debug('Recall most recently defined bitmap') state.bitmap_definition_state = BITMAP_NA else: log.debug('Defining non-reuse bitmap') state.most_recent_bitmap_is_for_reuse = False state.bitmap_definition_state = BITMAP_WAITING_FOR_BIT state.n_031031 = 0 elif state.bitmap_definition_state == BITMAP_WAITING_FOR_BIT: if descriptor.id == 31031: state.bitmap_definition_state = BITMAP_BIT_COUNTING state.n_031031 += 1 elif state.bitmap_definition_state == BITMAP_BIT_COUNTING: if descriptor.id == 31031: state.n_031031 += 1 else: log.debug('Bitmap defined with {} bits'.format(state.n_031031)) self.define_bitmap(state, state.most_recent_bitmap_is_for_reuse) state.bitmap_definition_state = BITMAP_NA def process_element_descriptor(self, state, bit_operator, descriptor): X = descriptor.X if state.nbits_of_associated and X != 31: log.debug('Processing associated field of {} bits'.format(state.nbits_of_associated)) self.process_associated_field(state, bit_operator, descriptor) if X == 33: if state.status_qa_info_follows == QA_INFO_WAITING: state.status_qa_info_follows = QA_INFO_PROCESSING if state.status_qa_info_follows == QA_INFO_PROCESSING: state.add_bitmap_link() else: if state.status_qa_info_follows == QA_INFO_PROCESSING: state.status_qa_info_follows = QA_INFO_NA if descriptor.unit == UNITS_STRING: nbytes = state.new_nbytes if state.new_nbytes else descriptor.nbits // 8 self.process_string(state, bit_operator, descriptor, nbytes) elif descriptor.unit in (UNITS_FLAG_TABLE, UNITS_CODE_TABLE): self.process_codeflag(state, bit_operator, descriptor, descriptor.nbits) else: nbits = (descriptor.nbits + state.nbits_offset + state.bsr_modifier.nbits_increment) scale = (descriptor.scale + state.scale_offset + state.bsr_modifier.scale_increment) scale_powered = 1.0 * 10 ** scale if descriptor.id not in state.new_refvals: refval = descriptor.refval * state.bsr_modifier.refval_factor self.process_numeric(state, bit_operator, descriptor, nbits, scale_powered, refval) else: self.process_numeric_of_new_refval(state, bit_operator, descriptor, nbits, scale_powered, state.bsr_modifier.refval_factor) def process_fixed_replication_descriptor(self, state, bit_operator, descriptor): for _ in range(descriptor.n_repeats): self.process_members(state, bit_operator, descriptor.members) def process_delayed_replication_descriptor(self, state, bit_operator, descriptor): if descriptor.id in (31011, 31012): raise NotImplementedError('delayed repetition descriptor') log.debug('Processing {}'.format(descriptor.factor)) self.process_element_descriptor(state, bit_operator, descriptor.factor) for _ in range(self.get_value_for_delayed_replication_factor(state)): self.process_members(state, bit_operator, descriptor.members)
MIT License
docusign/docusign-python-client
docusign_esign/models/date_signed.py
DateSigned.conditional_parent_value
python
def conditional_parent_value(self): return self._conditional_parent_value
Gets the conditional_parent_value of this DateSigned. # noqa: E501 For conditional fields, this is the value of the parent tab that controls the tab's visibility. If the parent tab is a Checkbox, Radio button, Optional Signature, or Optional Initial use \"on\" as the value to show that the parent tab is active. # noqa: E501 :return: The conditional_parent_value of this DateSigned. # noqa: E501 :rtype: str
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/date_signed.py#L931-L939
import pprint import re import six from docusign_esign.client.configuration import Configuration class DateSigned(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'anchor_allow_white_space_in_characters': 'str', 'anchor_allow_white_space_in_characters_metadata': 'PropertyMetadata', 'anchor_case_sensitive': 'str', 'anchor_case_sensitive_metadata': 'PropertyMetadata', 'anchor_horizontal_alignment': 'str', 'anchor_horizontal_alignment_metadata': 'PropertyMetadata', 'anchor_ignore_if_not_present': 'str', 'anchor_ignore_if_not_present_metadata': 'PropertyMetadata', 'anchor_match_whole_word': 'str', 'anchor_match_whole_word_metadata': 'PropertyMetadata', 'anchor_string': 'str', 'anchor_string_metadata': 'PropertyMetadata', 'anchor_tab_processor_version': 'str', 'anchor_tab_processor_version_metadata': 'PropertyMetadata', 'anchor_units': 'str', 'anchor_units_metadata': 'PropertyMetadata', 'anchor_x_offset': 'str', 'anchor_x_offset_metadata': 'PropertyMetadata', 'anchor_y_offset': 'str', 'anchor_y_offset_metadata': 'PropertyMetadata', 'bold': 'str', 'bold_metadata': 'PropertyMetadata', 'conditional_parent_label': 'str', 'conditional_parent_label_metadata': 'PropertyMetadata', 'conditional_parent_value': 'str', 'conditional_parent_value_metadata': 'PropertyMetadata', 'custom_tab_id': 'str', 'custom_tab_id_metadata': 'PropertyMetadata', 'document_id': 'str', 'document_id_metadata': 'PropertyMetadata', 'error_details': 'ErrorDetails', 'font': 'str', 'font_color': 'str', 'font_color_metadata': 'PropertyMetadata', 'font_metadata': 'PropertyMetadata', 'font_size': 'str', 'font_size_metadata': 'PropertyMetadata', 'form_order': 'str', 'form_order_metadata': 'PropertyMetadata', 'form_page_label': 'str', 'form_page_label_metadata': 'PropertyMetadata', 'form_page_number': 'str', 'form_page_number_metadata': 'PropertyMetadata', 'height': 'str', 'height_metadata': 'PropertyMetadata', 'italic': 'str', 'italic_metadata': 'PropertyMetadata', 'locale_policy': 'LocalePolicyTab', 'merge_field': 'MergeField', 'merge_field_xml': 'str', 'name': 'str', 'name_metadata': 'PropertyMetadata', 'page_number': 'str', 'page_number_metadata': 'PropertyMetadata', 'recipient_id': 'str', 'recipient_id_guid': 'str', 'recipient_id_guid_metadata': 'PropertyMetadata', 'recipient_id_metadata': 'PropertyMetadata', 'smart_contract_information': 'SmartContractInformation', 'source': 'str', 'status': 'str', 'status_metadata': 'PropertyMetadata', 'tab_group_labels': 'list[str]', 'tab_group_labels_metadata': 'PropertyMetadata', 'tab_id': 'str', 'tab_id_metadata': 'PropertyMetadata', 'tab_label': 'str', 'tab_label_metadata': 'PropertyMetadata', 'tab_order': 'str', 'tab_order_metadata': 'PropertyMetadata', 'tab_type': 'str', 'tab_type_metadata': 'PropertyMetadata', 'template_locked': 'str', 'template_locked_metadata': 'PropertyMetadata', 'template_required': 'str', 'template_required_metadata': 'PropertyMetadata', 'tooltip': 'str', 'tool_tip_metadata': 'PropertyMetadata', 'underline': 'str', 'underline_metadata': 'PropertyMetadata', 'value': 'str', 'value_metadata': 'PropertyMetadata', 'width': 'str', 'width_metadata': 'PropertyMetadata', 'x_position': 'str', 'x_position_metadata': 'PropertyMetadata', 'y_position': 'str', 'y_position_metadata': 'PropertyMetadata' } attribute_map = { 'anchor_allow_white_space_in_characters': 'anchorAllowWhiteSpaceInCharacters', 'anchor_allow_white_space_in_characters_metadata': 'anchorAllowWhiteSpaceInCharactersMetadata', 'anchor_case_sensitive': 'anchorCaseSensitive', 'anchor_case_sensitive_metadata': 'anchorCaseSensitiveMetadata', 'anchor_horizontal_alignment': 'anchorHorizontalAlignment', 'anchor_horizontal_alignment_metadata': 'anchorHorizontalAlignmentMetadata', 'anchor_ignore_if_not_present': 'anchorIgnoreIfNotPresent', 'anchor_ignore_if_not_present_metadata': 'anchorIgnoreIfNotPresentMetadata', 'anchor_match_whole_word': 'anchorMatchWholeWord', 'anchor_match_whole_word_metadata': 'anchorMatchWholeWordMetadata', 'anchor_string': 'anchorString', 'anchor_string_metadata': 'anchorStringMetadata', 'anchor_tab_processor_version': 'anchorTabProcessorVersion', 'anchor_tab_processor_version_metadata': 'anchorTabProcessorVersionMetadata', 'anchor_units': 'anchorUnits', 'anchor_units_metadata': 'anchorUnitsMetadata', 'anchor_x_offset': 'anchorXOffset', 'anchor_x_offset_metadata': 'anchorXOffsetMetadata', 'anchor_y_offset': 'anchorYOffset', 'anchor_y_offset_metadata': 'anchorYOffsetMetadata', 'bold': 'bold', 'bold_metadata': 'boldMetadata', 'conditional_parent_label': 'conditionalParentLabel', 'conditional_parent_label_metadata': 'conditionalParentLabelMetadata', 'conditional_parent_value': 'conditionalParentValue', 'conditional_parent_value_metadata': 'conditionalParentValueMetadata', 'custom_tab_id': 'customTabId', 'custom_tab_id_metadata': 'customTabIdMetadata', 'document_id': 'documentId', 'document_id_metadata': 'documentIdMetadata', 'error_details': 'errorDetails', 'font': 'font', 'font_color': 'fontColor', 'font_color_metadata': 'fontColorMetadata', 'font_metadata': 'fontMetadata', 'font_size': 'fontSize', 'font_size_metadata': 'fontSizeMetadata', 'form_order': 'formOrder', 'form_order_metadata': 'formOrderMetadata', 'form_page_label': 'formPageLabel', 'form_page_label_metadata': 'formPageLabelMetadata', 'form_page_number': 'formPageNumber', 'form_page_number_metadata': 'formPageNumberMetadata', 'height': 'height', 'height_metadata': 'heightMetadata', 'italic': 'italic', 'italic_metadata': 'italicMetadata', 'locale_policy': 'localePolicy', 'merge_field': 'mergeField', 'merge_field_xml': 'mergeFieldXml', 'name': 'name', 'name_metadata': 'nameMetadata', 'page_number': 'pageNumber', 'page_number_metadata': 'pageNumberMetadata', 'recipient_id': 'recipientId', 'recipient_id_guid': 'recipientIdGuid', 'recipient_id_guid_metadata': 'recipientIdGuidMetadata', 'recipient_id_metadata': 'recipientIdMetadata', 'smart_contract_information': 'smartContractInformation', 'source': 'source', 'status': 'status', 'status_metadata': 'statusMetadata', 'tab_group_labels': 'tabGroupLabels', 'tab_group_labels_metadata': 'tabGroupLabelsMetadata', 'tab_id': 'tabId', 'tab_id_metadata': 'tabIdMetadata', 'tab_label': 'tabLabel', 'tab_label_metadata': 'tabLabelMetadata', 'tab_order': 'tabOrder', 'tab_order_metadata': 'tabOrderMetadata', 'tab_type': 'tabType', 'tab_type_metadata': 'tabTypeMetadata', 'template_locked': 'templateLocked', 'template_locked_metadata': 'templateLockedMetadata', 'template_required': 'templateRequired', 'template_required_metadata': 'templateRequiredMetadata', 'tooltip': 'tooltip', 'tool_tip_metadata': 'toolTipMetadata', 'underline': 'underline', 'underline_metadata': 'underlineMetadata', 'value': 'value', 'value_metadata': 'valueMetadata', 'width': 'width', 'width_metadata': 'widthMetadata', 'x_position': 'xPosition', 'x_position_metadata': 'xPositionMetadata', 'y_position': 'yPosition', 'y_position_metadata': 'yPositionMetadata' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._anchor_allow_white_space_in_characters = None self._anchor_allow_white_space_in_characters_metadata = None self._anchor_case_sensitive = None self._anchor_case_sensitive_metadata = None self._anchor_horizontal_alignment = None self._anchor_horizontal_alignment_metadata = None self._anchor_ignore_if_not_present = None self._anchor_ignore_if_not_present_metadata = None self._anchor_match_whole_word = None self._anchor_match_whole_word_metadata = None self._anchor_string = None self._anchor_string_metadata = None self._anchor_tab_processor_version = None self._anchor_tab_processor_version_metadata = None self._anchor_units = None self._anchor_units_metadata = None self._anchor_x_offset = None self._anchor_x_offset_metadata = None self._anchor_y_offset = None self._anchor_y_offset_metadata = None self._bold = None self._bold_metadata = None self._conditional_parent_label = None self._conditional_parent_label_metadata = None self._conditional_parent_value = None self._conditional_parent_value_metadata = None self._custom_tab_id = None self._custom_tab_id_metadata = None self._document_id = None self._document_id_metadata = None self._error_details = None self._font = None self._font_color = None self._font_color_metadata = None self._font_metadata = None self._font_size = None self._font_size_metadata = None self._form_order = None self._form_order_metadata = None self._form_page_label = None self._form_page_label_metadata = None self._form_page_number = None self._form_page_number_metadata = None self._height = None self._height_metadata = None self._italic = None self._italic_metadata = None self._locale_policy = None self._merge_field = None self._merge_field_xml = None self._name = None self._name_metadata = None self._page_number = None self._page_number_metadata = None self._recipient_id = None self._recipient_id_guid = None self._recipient_id_guid_metadata = None self._recipient_id_metadata = None self._smart_contract_information = None self._source = None self._status = None self._status_metadata = None self._tab_group_labels = None self._tab_group_labels_metadata = None self._tab_id = None self._tab_id_metadata = None self._tab_label = None self._tab_label_metadata = None self._tab_order = None self._tab_order_metadata = None self._tab_type = None self._tab_type_metadata = None self._template_locked = None self._template_locked_metadata = None self._template_required = None self._template_required_metadata = None self._tooltip = None self._tool_tip_metadata = None self._underline = None self._underline_metadata = None self._value = None self._value_metadata = None self._width = None self._width_metadata = None self._x_position = None self._x_position_metadata = None self._y_position = None self._y_position_metadata = None self.discriminator = None setattr(self, "_{}".format('anchor_allow_white_space_in_characters'), kwargs.get('anchor_allow_white_space_in_characters', None)) setattr(self, "_{}".format('anchor_allow_white_space_in_characters_metadata'), kwargs.get('anchor_allow_white_space_in_characters_metadata', None)) setattr(self, "_{}".format('anchor_case_sensitive'), kwargs.get('anchor_case_sensitive', None)) setattr(self, "_{}".format('anchor_case_sensitive_metadata'), kwargs.get('anchor_case_sensitive_metadata', None)) setattr(self, "_{}".format('anchor_horizontal_alignment'), kwargs.get('anchor_horizontal_alignment', None)) setattr(self, "_{}".format('anchor_horizontal_alignment_metadata'), kwargs.get('anchor_horizontal_alignment_metadata', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present'), kwargs.get('anchor_ignore_if_not_present', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present_metadata'), kwargs.get('anchor_ignore_if_not_present_metadata', None)) setattr(self, "_{}".format('anchor_match_whole_word'), kwargs.get('anchor_match_whole_word', None)) setattr(self, "_{}".format('anchor_match_whole_word_metadata'), kwargs.get('anchor_match_whole_word_metadata', None)) setattr(self, "_{}".format('anchor_string'), kwargs.get('anchor_string', None)) setattr(self, "_{}".format('anchor_string_metadata'), kwargs.get('anchor_string_metadata', None)) setattr(self, "_{}".format('anchor_tab_processor_version'), kwargs.get('anchor_tab_processor_version', None)) setattr(self, "_{}".format('anchor_tab_processor_version_metadata'), kwargs.get('anchor_tab_processor_version_metadata', None)) setattr(self, "_{}".format('anchor_units'), kwargs.get('anchor_units', None)) setattr(self, "_{}".format('anchor_units_metadata'), kwargs.get('anchor_units_metadata', None)) setattr(self, "_{}".format('anchor_x_offset'), kwargs.get('anchor_x_offset', None)) setattr(self, "_{}".format('anchor_x_offset_metadata'), kwargs.get('anchor_x_offset_metadata', None)) setattr(self, "_{}".format('anchor_y_offset'), kwargs.get('anchor_y_offset', None)) setattr(self, "_{}".format('anchor_y_offset_metadata'), kwargs.get('anchor_y_offset_metadata', None)) setattr(self, "_{}".format('bold'), kwargs.get('bold', None)) setattr(self, "_{}".format('bold_metadata'), kwargs.get('bold_metadata', None)) setattr(self, "_{}".format('conditional_parent_label'), kwargs.get('conditional_parent_label', None)) setattr(self, "_{}".format('conditional_parent_label_metadata'), kwargs.get('conditional_parent_label_metadata', None)) setattr(self, "_{}".format('conditional_parent_value'), kwargs.get('conditional_parent_value', None)) setattr(self, "_{}".format('conditional_parent_value_metadata'), kwargs.get('conditional_parent_value_metadata', None)) setattr(self, "_{}".format('custom_tab_id'), kwargs.get('custom_tab_id', None)) setattr(self, "_{}".format('custom_tab_id_metadata'), kwargs.get('custom_tab_id_metadata', None)) setattr(self, "_{}".format('document_id'), kwargs.get('document_id', None)) setattr(self, "_{}".format('document_id_metadata'), kwargs.get('document_id_metadata', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('font'), kwargs.get('font', None)) setattr(self, "_{}".format('font_color'), kwargs.get('font_color', None)) setattr(self, "_{}".format('font_color_metadata'), kwargs.get('font_color_metadata', None)) setattr(self, "_{}".format('font_metadata'), kwargs.get('font_metadata', None)) setattr(self, "_{}".format('font_size'), kwargs.get('font_size', None)) setattr(self, "_{}".format('font_size_metadata'), kwargs.get('font_size_metadata', None)) setattr(self, "_{}".format('form_order'), kwargs.get('form_order', None)) setattr(self, "_{}".format('form_order_metadata'), kwargs.get('form_order_metadata', None)) setattr(self, "_{}".format('form_page_label'), kwargs.get('form_page_label', None)) setattr(self, "_{}".format('form_page_label_metadata'), kwargs.get('form_page_label_metadata', None)) setattr(self, "_{}".format('form_page_number'), kwargs.get('form_page_number', None)) setattr(self, "_{}".format('form_page_number_metadata'), kwargs.get('form_page_number_metadata', None)) setattr(self, "_{}".format('height'), kwargs.get('height', None)) setattr(self, "_{}".format('height_metadata'), kwargs.get('height_metadata', None)) setattr(self, "_{}".format('italic'), kwargs.get('italic', None)) setattr(self, "_{}".format('italic_metadata'), kwargs.get('italic_metadata', None)) setattr(self, "_{}".format('locale_policy'), kwargs.get('locale_policy', None)) setattr(self, "_{}".format('merge_field'), kwargs.get('merge_field', None)) setattr(self, "_{}".format('merge_field_xml'), kwargs.get('merge_field_xml', None)) setattr(self, "_{}".format('name'), kwargs.get('name', None)) setattr(self, "_{}".format('name_metadata'), kwargs.get('name_metadata', None)) setattr(self, "_{}".format('page_number'), kwargs.get('page_number', None)) setattr(self, "_{}".format('page_number_metadata'), kwargs.get('page_number_metadata', None)) setattr(self, "_{}".format('recipient_id'), kwargs.get('recipient_id', None)) setattr(self, "_{}".format('recipient_id_guid'), kwargs.get('recipient_id_guid', None)) setattr(self, "_{}".format('recipient_id_guid_metadata'), kwargs.get('recipient_id_guid_metadata', None)) setattr(self, "_{}".format('recipient_id_metadata'), kwargs.get('recipient_id_metadata', None)) setattr(self, "_{}".format('smart_contract_information'), kwargs.get('smart_contract_information', None)) setattr(self, "_{}".format('source'), kwargs.get('source', None)) setattr(self, "_{}".format('status'), kwargs.get('status', None)) setattr(self, "_{}".format('status_metadata'), kwargs.get('status_metadata', None)) setattr(self, "_{}".format('tab_group_labels'), kwargs.get('tab_group_labels', None)) setattr(self, "_{}".format('tab_group_labels_metadata'), kwargs.get('tab_group_labels_metadata', None)) setattr(self, "_{}".format('tab_id'), kwargs.get('tab_id', None)) setattr(self, "_{}".format('tab_id_metadata'), kwargs.get('tab_id_metadata', None)) setattr(self, "_{}".format('tab_label'), kwargs.get('tab_label', None)) setattr(self, "_{}".format('tab_label_metadata'), kwargs.get('tab_label_metadata', None)) setattr(self, "_{}".format('tab_order'), kwargs.get('tab_order', None)) setattr(self, "_{}".format('tab_order_metadata'), kwargs.get('tab_order_metadata', None)) setattr(self, "_{}".format('tab_type'), kwargs.get('tab_type', None)) setattr(self, "_{}".format('tab_type_metadata'), kwargs.get('tab_type_metadata', None)) setattr(self, "_{}".format('template_locked'), kwargs.get('template_locked', None)) setattr(self, "_{}".format('template_locked_metadata'), kwargs.get('template_locked_metadata', None)) setattr(self, "_{}".format('template_required'), kwargs.get('template_required', None)) setattr(self, "_{}".format('template_required_metadata'), kwargs.get('template_required_metadata', None)) setattr(self, "_{}".format('tooltip'), kwargs.get('tooltip', None)) setattr(self, "_{}".format('tool_tip_metadata'), kwargs.get('tool_tip_metadata', None)) setattr(self, "_{}".format('underline'), kwargs.get('underline', None)) setattr(self, "_{}".format('underline_metadata'), kwargs.get('underline_metadata', None)) setattr(self, "_{}".format('value'), kwargs.get('value', None)) setattr(self, "_{}".format('value_metadata'), kwargs.get('value_metadata', None)) setattr(self, "_{}".format('width'), kwargs.get('width', None)) setattr(self, "_{}".format('width_metadata'), kwargs.get('width_metadata', None)) setattr(self, "_{}".format('x_position'), kwargs.get('x_position', None)) setattr(self, "_{}".format('x_position_metadata'), kwargs.get('x_position_metadata', None)) setattr(self, "_{}".format('y_position'), kwargs.get('y_position', None)) setattr(self, "_{}".format('y_position_metadata'), kwargs.get('y_position_metadata', None)) @property def anchor_allow_white_space_in_characters(self): return self._anchor_allow_white_space_in_characters @anchor_allow_white_space_in_characters.setter def anchor_allow_white_space_in_characters(self, anchor_allow_white_space_in_characters): self._anchor_allow_white_space_in_characters = anchor_allow_white_space_in_characters @property def anchor_allow_white_space_in_characters_metadata(self): return self._anchor_allow_white_space_in_characters_metadata @anchor_allow_white_space_in_characters_metadata.setter def anchor_allow_white_space_in_characters_metadata(self, anchor_allow_white_space_in_characters_metadata): self._anchor_allow_white_space_in_characters_metadata = anchor_allow_white_space_in_characters_metadata @property def anchor_case_sensitive(self): return self._anchor_case_sensitive @anchor_case_sensitive.setter def anchor_case_sensitive(self, anchor_case_sensitive): self._anchor_case_sensitive = anchor_case_sensitive @property def anchor_case_sensitive_metadata(self): return self._anchor_case_sensitive_metadata @anchor_case_sensitive_metadata.setter def anchor_case_sensitive_metadata(self, anchor_case_sensitive_metadata): self._anchor_case_sensitive_metadata = anchor_case_sensitive_metadata @property def anchor_horizontal_alignment(self): return self._anchor_horizontal_alignment @anchor_horizontal_alignment.setter def anchor_horizontal_alignment(self, anchor_horizontal_alignment): self._anchor_horizontal_alignment = anchor_horizontal_alignment @property def anchor_horizontal_alignment_metadata(self): return self._anchor_horizontal_alignment_metadata @anchor_horizontal_alignment_metadata.setter def anchor_horizontal_alignment_metadata(self, anchor_horizontal_alignment_metadata): self._anchor_horizontal_alignment_metadata = anchor_horizontal_alignment_metadata @property def anchor_ignore_if_not_present(self): return self._anchor_ignore_if_not_present @anchor_ignore_if_not_present.setter def anchor_ignore_if_not_present(self, anchor_ignore_if_not_present): self._anchor_ignore_if_not_present = anchor_ignore_if_not_present @property def anchor_ignore_if_not_present_metadata(self): return self._anchor_ignore_if_not_present_metadata @anchor_ignore_if_not_present_metadata.setter def anchor_ignore_if_not_present_metadata(self, anchor_ignore_if_not_present_metadata): self._anchor_ignore_if_not_present_metadata = anchor_ignore_if_not_present_metadata @property def anchor_match_whole_word(self): return self._anchor_match_whole_word @anchor_match_whole_word.setter def anchor_match_whole_word(self, anchor_match_whole_word): self._anchor_match_whole_word = anchor_match_whole_word @property def anchor_match_whole_word_metadata(self): return self._anchor_match_whole_word_metadata @anchor_match_whole_word_metadata.setter def anchor_match_whole_word_metadata(self, anchor_match_whole_word_metadata): self._anchor_match_whole_word_metadata = anchor_match_whole_word_metadata @property def anchor_string(self): return self._anchor_string @anchor_string.setter def anchor_string(self, anchor_string): self._anchor_string = anchor_string @property def anchor_string_metadata(self): return self._anchor_string_metadata @anchor_string_metadata.setter def anchor_string_metadata(self, anchor_string_metadata): self._anchor_string_metadata = anchor_string_metadata @property def anchor_tab_processor_version(self): return self._anchor_tab_processor_version @anchor_tab_processor_version.setter def anchor_tab_processor_version(self, anchor_tab_processor_version): self._anchor_tab_processor_version = anchor_tab_processor_version @property def anchor_tab_processor_version_metadata(self): return self._anchor_tab_processor_version_metadata @anchor_tab_processor_version_metadata.setter def anchor_tab_processor_version_metadata(self, anchor_tab_processor_version_metadata): self._anchor_tab_processor_version_metadata = anchor_tab_processor_version_metadata @property def anchor_units(self): return self._anchor_units @anchor_units.setter def anchor_units(self, anchor_units): self._anchor_units = anchor_units @property def anchor_units_metadata(self): return self._anchor_units_metadata @anchor_units_metadata.setter def anchor_units_metadata(self, anchor_units_metadata): self._anchor_units_metadata = anchor_units_metadata @property def anchor_x_offset(self): return self._anchor_x_offset @anchor_x_offset.setter def anchor_x_offset(self, anchor_x_offset): self._anchor_x_offset = anchor_x_offset @property def anchor_x_offset_metadata(self): return self._anchor_x_offset_metadata @anchor_x_offset_metadata.setter def anchor_x_offset_metadata(self, anchor_x_offset_metadata): self._anchor_x_offset_metadata = anchor_x_offset_metadata @property def anchor_y_offset(self): return self._anchor_y_offset @anchor_y_offset.setter def anchor_y_offset(self, anchor_y_offset): self._anchor_y_offset = anchor_y_offset @property def anchor_y_offset_metadata(self): return self._anchor_y_offset_metadata @anchor_y_offset_metadata.setter def anchor_y_offset_metadata(self, anchor_y_offset_metadata): self._anchor_y_offset_metadata = anchor_y_offset_metadata @property def bold(self): return self._bold @bold.setter def bold(self, bold): self._bold = bold @property def bold_metadata(self): return self._bold_metadata @bold_metadata.setter def bold_metadata(self, bold_metadata): self._bold_metadata = bold_metadata @property def conditional_parent_label(self): return self._conditional_parent_label @conditional_parent_label.setter def conditional_parent_label(self, conditional_parent_label): self._conditional_parent_label = conditional_parent_label @property def conditional_parent_label_metadata(self): return self._conditional_parent_label_metadata @conditional_parent_label_metadata.setter def conditional_parent_label_metadata(self, conditional_parent_label_metadata): self._conditional_parent_label_metadata = conditional_parent_label_metadata @property
MIT License
geyingli/unif
uf/modeling/util.py
get_shape_list
python
def get_shape_list(tensor, expected_rank=None, name=None): if name is None: name = tensor.name if expected_rank is not None: assert_rank(tensor, expected_rank, name) shape = tensor.shape.as_list() non_static_indexes = [] for (index, dim) in enumerate(shape): if dim is None: non_static_indexes.append(index) if not non_static_indexes: return shape dyn_shape = tf.shape(tensor) for index in non_static_indexes: shape[index] = dyn_shape[index] return shape
Returns a list of the shape of tensor, preferring static dimensions.
https://github.com/geyingli/unif/blob/a6c9c94f60a7b906d9bd410bb446c4e3f2540ffc/uf/modeling/util.py#L173-L194
import numpy as np from ..tools import tf def gelu(num): cdf = 0.5 * (1.0 + tf.tanh((np.sqrt(2 / np.pi) * (num + 0.044715 * tf.pow(num, 3))))) return num * cdf def get_activation(activation_string): if not isinstance(activation_string, str): return activation_string if not activation_string: return None act = activation_string.lower() if act == 'linear': return None if act == 'relu': return tf.nn.relu if act == 'gelu': return gelu if act == 'tanh': return tf.tanh raise ValueError('Unsupported activation: %s' % act) def dropout(input_tensor, dropout_prob): if dropout_prob is None or dropout_prob == 0.0: return input_tensor try: output = tf.nn.dropout(input_tensor, keep_prob=1.0 - dropout_prob) except: output = tf.nn.dropout(input_tensor, rate=dropout_prob) return output def layer_norm(input_tensor, center=True, scale=True, activation_fn=None, variables_collections=None, outputs_collections=None, begin_norm_axis=-1, begin_params_axis=-1, trainable=True, name='LayerNorm'): with tf.variable_scope(name): inputs_shape = input_tensor.shape inputs_rank = inputs_shape.ndims if inputs_rank is None: raise ValueError('Inputs %s has undefined rank.' % input_tensor.name) dtype = input_tensor.dtype.base_dtype if begin_norm_axis < 0: begin_norm_axis = inputs_rank + begin_norm_axis if begin_params_axis >= inputs_rank or begin_norm_axis >= inputs_rank: raise ValueError( 'begin_params_axis (%d) and begin_norm_axis (%d) ' 'must be < rank(inputs) (%d)' % (begin_params_axis, begin_norm_axis, inputs_rank)) params_shape = inputs_shape[begin_params_axis:] if not params_shape.is_fully_defined(): raise ValueError( 'Inputs %s: shape(inputs)[%s:] is not fully defined: %s' % (input_tensor.name, begin_params_axis, inputs_shape)) beta, gamma = None, None if center: beta = tf.get_variable( 'beta', shape=params_shape, dtype=dtype, initializer=tf.zeros_initializer(), trainable=trainable) if scale: gamma = tf.get_variable( 'gamma', shape=params_shape, dtype=dtype, initializer=tf.ones_initializer(), trainable=trainable) norm_axes = list(range(begin_norm_axis, inputs_rank)) mean, variance = tf.nn.moments(input_tensor, norm_axes, keep_dims=True) variance_epsilon = 1e-12 if dtype != tf.float16 else 1e-3 outputs = tf.nn.batch_normalization( input_tensor, mean, variance, offset=beta, scale=gamma, variance_epsilon=variance_epsilon) outputs.set_shape(inputs_shape) if activation_fn is not None: outputs = activation_fn(outputs) return outputs def layer_norm_and_dropout(input_tensor, dropout_prob, trainable=True): output_tensor = layer_norm(input_tensor, trainable=trainable) output_tensor = dropout(output_tensor, dropout_prob) return output_tensor def create_initializer(initializer_range=0.02): return tf.truncated_normal_initializer(stddev=initializer_range)
Apache License 2.0
yuanbaonet/baoaiback
flask_restplus/mask.py
Mask.apply
python
def apply(self, data): from . import fields if isinstance(data, (list, tuple, set)): return [self.apply(d) for d in data] elif isinstance(data, (fields.Nested, fields.List, fields.Polymorph)): return data.clone(self) elif type(data) == fields.Raw: return fields.Raw(default=data.default, attribute=data.attribute, mask=self) elif data == fields.Raw: return fields.Raw(mask=self) elif isinstance(data, fields.Raw) or isclass(data) and issubclass(data, fields.Raw): raise MaskError('Mask is inconsistent with model') elif (not isinstance(data, (dict, OrderedDict)) and hasattr(data, '__dict__')): data = data.__dict__ return self.filter_data(data)
Apply a fields mask to the data. :param data: The data or model to apply mask on :raises MaskError: when unable to apply the mask
https://github.com/yuanbaonet/baoaiback/blob/0fb1b604185a8bd8b72c1d2d527fb94bbaf46a86/flask_restplus/mask.py#L103-L128
from __future__ import unicode_literals, absolute_import import logging import re import six from collections import OrderedDict from inspect import isclass from .errors import RestError log = logging.getLogger(__name__) LEXER = re.compile(r'\{|\}|\,|[\w_:\-\*]+') class MaskError(RestError): pass class ParseError(MaskError): pass class Mask(OrderedDict): def __init__(self, mask=None, skip=False, **kwargs): self.skip = skip if isinstance(mask, six.string_types): super(Mask, self).__init__() self.parse(mask) elif isinstance(mask, (dict, OrderedDict)): super(Mask, self).__init__(mask, **kwargs) else: self.skip = skip super(Mask, self).__init__(**kwargs) def parse(self, mask): if not mask: return mask = self.clean(mask) fields = self previous = None stack = [] for token in LEXER.findall(mask): if token == '{': if previous not in fields: raise ParseError('Unexpected opening bracket') fields[previous] = Mask(skip=self.skip) stack.append(fields) fields = fields[previous] elif token == '}': if not stack: raise ParseError('Unexpected closing bracket') fields = stack.pop() elif token == ',': if previous in (',', '{', None): raise ParseError('Unexpected comma') else: fields[token] = True previous = token if stack: raise ParseError('Missing closing bracket') def clean(self, mask): mask = mask.replace('\n', '').strip() if mask[0] == '{': if mask[-1] != '}': raise ParseError('Missing closing bracket') mask = mask[1:-1] return mask
Apache License 2.0
zachchristensen28/ta-opnsense
bin/ta_opnsense/aob_py3/cloudconnectlib/core/ext.py
time_str2str
python
def time_str2str(date_string, from_format, to_format): if not isinstance(date_string, six.string_types): _logger.warning( '"date_string" must be a string type, found %s,' ' return the original date_string directly.', type(date_string) ) return date_string try: dt = datetime.strptime(date_string, from_format) if to_format: timestamp = calendar.timegm(dt.timetuple()) to_format = _fix_timestamp_format(to_format, str(timestamp)) to_format = _fix_microsecond_format(to_format, str(dt.microsecond)) return dt.strftime(to_format) except Exception: _logger.warning( 'Unable to convert date_string "%s" from format "%s" to "%s",' ' return the original date_string, cause=%s', date_string, from_format, to_format, traceback.format_exc() ) return date_string
Convert a date string with given format to another format. Return the original date string if it's type is not string or failed to parse or convert it with format.
https://github.com/zachchristensen28/ta-opnsense/blob/fc736f4c6f0fa7866b4f6d2dcf9761b6b693d6cf/bin/ta_opnsense/aob_py3/cloudconnectlib/core/ext.py#L306-L338
from builtins import str from builtins import range import calendar import json import re import traceback from collections import Iterable from datetime import datetime import six from jsonpath_ng import parse from .exceptions import FuncException, StopCCEIteration, QuitJobError from .pipemgr import PipeManager from ..common import util, log _logger = log.get_cc_logger() def regex_search(pattern, source, flags=0): if not isinstance(source, six.string_types): _logger.warning('Cannot apply regex search on non-string: %s', type(source)) return {} try: matches = re.search(pattern=pattern, string=source, flags=flags) except Exception: _logger.warning('Unable to search pattern=%s and flags=%s in string, error=%s', pattern, flags, traceback.format_exc()) return {} else: return matches.groupdict() if matches else {} def regex_match(pattern, source, flags=0): try: return re.match(pattern, source, flags) is not None except Exception: _logger.warning( 'Unable to match source with pattern=%s, cause=%s', pattern, traceback.format_exc() ) return False def regex_not_match(pattern, source, flags=0): return not regex_match(pattern, source, flags) def json_path(source, json_path_expr): if not source: _logger.debug('source to apply JSONPATH is empty, return empty.') return '' if isinstance(source, six.string_types): _logger.debug( 'source expected is a JSON, not %s. Attempt to' ' convert it to JSON', type(source) ) try: source = json.loads(source) except Exception as ex: _logger.warning( 'Unable to load JSON from source: %s. ' 'Attempt to apply JSONPATH "%s" on source directly.', ex, json_path_expr ) try: expression = parse(json_path_expr) results = [match.value for match in expression.find(source)] _logger.debug( 'Got %s elements extracted with JSONPATH expression "%s"', len(results), json_path_expr ) if not results: return '' return results[0] or '' if len(results) == 1 else results except Exception as ex: _logger.warning( 'Unable to apply JSONPATH expression "%s" on source,' ' message=%s cause=%s', json_path_expr, ex, traceback.format_exc() ) return '' def splunk_xml(candidates, time=None, index=None, host=None, source=None, sourcetype=None): if not isinstance(candidates, (list, tuple)): candidates = [candidates] time = time or None if time: try: time = float(time) except ValueError: _logger.warning( '"time" %s is expected to be a float, set "time" to None', time ) time = None xml_events = util.format_events( candidates, time=time, index=index, host=host, source=source, sourcetype=sourcetype ) _logger.info( "[%s] events are formated as splunk stream xml", len(candidates) ) return xml_events def std_output(candidates): if isinstance(candidates, six.string_types): candidates = [candidates] all_str = True for candidate in candidates: if all_str and not isinstance(candidate, six.string_types): all_str = False _logger.debug( 'The type of data needs to print is "%s" rather than %s', type(candidate), str(six.string_types) ) try: candidate = json.dumps(candidate) except: _logger.exception('The type of data needs to print is "%s"' ' rather than %s', type(candidate), str(six.string_types)) if not PipeManager().write_events(candidate): raise FuncException('Fail to output data to stdout. The event' ' writer is stopped or encountered exception') _logger.debug('Writing events to stdout finished.') return True def _parse_json(source, json_path_expr=None): if not source: _logger.debug('Unable to parse JSON from empty source, return empty.') return {} if json_path_expr: _logger.debug( 'Try to extract JSON from source with JSONPATH expression: %s, ', json_path_expr ) source = json_path(source, json_path_expr) elif isinstance(source, six.string_types): source = json.loads(source) return source def json_empty(source, json_path_expr=None): try: data = _parse_json(source, json_path_expr) if isinstance(data, (list, tuple)): return all(len(ele) == 0 for ele in data) return len(data) == 0 except Exception as ex: _logger.warning( 'Unable to determine whether source is json_empty, treat it as ' 'not json_empty: %s', ex ) return False def json_not_empty(source, json_path_expr=None): try: data = _parse_json(source, json_path_expr) if isinstance(data, (list, tuple)): return any(len(ele) > 0 for ele in data) return len(data) > 0 except Exception as ex: _logger.warning( 'Unable to determine whether source is json_not_empty, ' 'treat it as not json_not_empty: %s', ex ) return False def set_var(value): return value def _fix_microsecond_format(fmt, micros): micros = str(micros).zfill(6) def do_replacement(x, micros): if int(x.group(1)) in range(1, 7) and len(x.group()) % 2: return x.group().replace('%' + x.group(1) + 'f', micros[:min(int(x.group(1)), len(micros))]) return x.group() return re.sub(r'%+([1-6])f', lambda x: do_replacement(x, micros), fmt) def _fix_timestamp_format(fmt, timestamp): return re.sub( r'%+s', ( lambda x: x.group() if len(x.group()) % 2 else x.group().replace('%s', timestamp) ), fmt )
MIT License
petuum/autodist
autodist/strategy/base.py
StrategyCompiler.compile
python
def compile(self, strategy): strategy = self._prune_nodes(strategy) if self._device_resolver: strategy = self._resolve_devices(strategy) return strategy
Compile the strategy.
https://github.com/petuum/autodist/blob/9bf89c90cb41eafa82d5660b979e7d0797e3cdf5/autodist/strategy/base.py#L163-L168
import os from abc import ABC, abstractmethod from datetime import datetime from autodist.const import DEFAULT_SERIALIZATION_DIR from autodist.graph_item import GraphItem from autodist.kernel.common.utils import get_op_name from autodist.proto import strategy_pb2 from autodist.resource_spec import ResourceSpec class Strategy: def __init__(self, strategy=None): self._strategy = strategy or strategy_pb2.Strategy() if strategy is None: self._strategy.id = datetime.utcnow().strftime('%Y%m%dT%H%M%SM%f') @property def id(self): return self._strategy.id @property def path(self): return self._strategy.path @property def node_config(self): return self._strategy.node_config @node_config.setter def node_config(self, value): if self._strategy.node_config is not value: del self._strategy.node_config[:] self._strategy.node_config.extend(value) @property def graph_config(self): return self._strategy.graph_config @graph_config.setter def graph_config(self, value): self._strategy.graph_config = value def copy(self): other_strategy = strategy_pb2.Strategy() other_strategy.CopyFrom(self._strategy) return Strategy(strategy=other_strategy) def __str__(self): return self._strategy.__str__() def serialize(self, path=None): if path is None: os.makedirs(DEFAULT_SERIALIZATION_DIR, exist_ok=True) path = os.path.join(DEFAULT_SERIALIZATION_DIR, self._strategy.id) self._strategy.path = path with open(path, "wb+") as f: f.write(self._strategy.SerializeToString()) @classmethod def deserialize(cls, strategy_id=None, path=None): if path is None: assert strategy_id is not None path = os.path.join(DEFAULT_SERIALIZATION_DIR, strategy_id) with open(path, 'rb') as f: data = f.read() new_strategy = strategy_pb2.Strategy() new_strategy.ParseFromString(data) return cls(strategy=new_strategy) class StrategyBuilder(ABC): @abstractmethod def build(self, graph_item: GraphItem, resource_spec: ResourceSpec) -> Strategy: raise NotImplementedError class StrategyCompiler: def __init__(self, graph_item): self._graph_item = graph_item self._device_resolver = None def set_device_resolver(self, resolver): self._device_resolver = resolver return self def _resolve_reduction_destination(self, node): synchronizer = getattr(node, node.WhichOneof('synchronizer')) if hasattr(synchronizer, 'reduction_destination'): d = synchronizer.reduction_destination synchronizer.reduction_destination = self._device_resolver(d) def _resolve_devices(self, strategy): s = strategy.copy() for n in s.node_config: if n.partitioner: for part in n.part_config: self._resolve_reduction_destination(part) else: self._resolve_reduction_destination(n) d = s.graph_config.replicas s.graph_config.replicas[:] = self._device_resolver(d) return s def _prune_nodes(self, strategy): s = strategy.copy() s.node_config = [n for n in strategy.node_config if get_op_name(n.var_name) in self._graph_item.var_op_name_to_grad_info] return s
Apache License 2.0
irfanchahyadi/whatsapp-chat-analyzer
src/chat_parser.py
decode_emoji
python
def decode_emoji(text): replaced = emoji_pattern.sub(lambda x: '<Emoji_' + str(EMOJI.get(x.group(0))) + '>', text) cleaned = cleaner_pattern.sub('', replaced) if cleaned[-1:] == '\n': cleaned = cleaned[:-1] return cleaned
Convert unicode character of emoji into string representation.
https://github.com/irfanchahyadi/whatsapp-chat-analyzer/blob/184ae0d9c9ddb9a7aa7daa81e29989f81b13766d/src/chat_parser.py#L28-L34
import re import json from datetime import datetime import pandas as pd import numpy as np from src import db_handler as db from src.settings import LANGUAGE, PATTERN, LINK_LOCATION from src.emoji import EMOJI, DEMOJI, CLEANER emoji_pattern = re.compile('|'.join(sorted([re.escape(emo) for emo in EMOJI], key=len, reverse=True))) demoji_pattern = re.compile('|'.join(DEMOJI)) cleaner_pattern = re.compile('|'.join([re.escape(c) for c in CLEANER])) def get_pattern(key, lang='en'): re_patterns = PATTERN[key] if isinstance(re_patterns, list): re_patterns = [re_pattern.format(**LANGUAGE[lang]) for re_pattern in re_patterns] elif key != 'mention': re_patterns = re_patterns.format(**LANGUAGE[lang]) return re_patterns def translate_event_type(event_type, lang): event_index = {v: k for k, v in LANGUAGE[lang].items()}[event_type] return LANGUAGE['en'][event_index]
MIT License
jozefstefaninstitute/newsfeed
util.py
decodeText_simple
python
def decodeText_simple(text, headers): contentType = headers.get('content-type','text/html; charset=latin1') if not contentType.startswith('text/'): raise ValueError, "Can only convert HTTP responses with mime type text/*; got '%s' instead" % contentType m = re.search('''<meta \s+ http-equiv \s* = \s* .?Content-Type.? \s+ content \s* = \s* .?text/\w+;? \s+ charset=( [^"';> ]+ ) ''', text, re.IGNORECASE | re.VERBOSE) if not m: m = re.search('charset=([\w0-9\-]+)', contentType) if m: charset = m.group(1).replace('windows-','cp') else: charset='latin1' return text.decode(charset, 'ignore')
Takes a HTTP response body (=text) and the corresponding headers (a dict or dict-like object; httplib.HTTPResponse will do); outputs the text as a unicode string. The encoding is guessed using a combination of HTTP headers and the META ta inside HTML. If no encoding can be inferred, latin1 is assumed. Characters that can't be decoded are left as-is. Throws ValueError if headers do not indicate a text/* mime-type. Does not use any extra libraries, unlike decodeText(), which is more accurate.
https://github.com/jozefstefaninstitute/newsfeed/blob/cacbced16b049b94084732bebb966dfbefbe3c9a/util.py#L186-L215
import os, sys import re, htmlentitydefs, string import urllib2 import time, datetime import random import gzip, StringIO import inspect, traceback import socket socket.setdefaulttimeout(5) def htmlUnescape(text): def fixup(m): text = m.group(0) if text[:2] == "&#": try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except ValueError: pass else: try: text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) except KeyError: pass return text return re.sub("&#?\w+;", fixup, text) def textifyHtml(html): if html is None: return None txt = html txt = re.sub('</?\s*(br|p|div|td|th|h\d)[^>]*>', '\n', txt) txt = re.sub("<.*?>", "", txt) txt = normalizePunctuation(txt, normalizeWhitespace=True) txt = '\n'.join(line.strip() for line in txt.splitlines()) txt = re.sub(" +"," ", txt) txt = re.sub("\n+","\n", txt) return txt def xmlEscape(txt, errors='ignore'): allowedChars = set( string.uppercase + string.lowercase + string.digits + '.,;:!?_-+/!@#$%*()=[]\\\'"| \t\n\r') knownMappings = {'&':'&amp;', '<':'&lt;', '>':'&gt;'} chars = list(txt) for (i,c) in enumerate(chars): if c not in allowedChars: cc = ord(c) if 0x20<cc<0xD7FF or cc in (0x9, 0xA, 0xD) or 0xE000<cc<0xFFFD or 0x10000<cc<0x10FFFF: chars[i] = knownMappings.get(c, '&#%d;' % ord(c)) else: if errors != 'ignore': raise ValueError(u"Character is not XML-encodable: %r" % c) else: chars[i] = '\x00' return ''.join(c for c in chars if c!='\x00') _normalizedPunctuation = { 0x00A0: u' ', 0x2013: u'-', 0x2014: u' -- ', 0x2015: u' -- ', 0x2212: u'-', 0x2500: u'--', 0x2501: u'|', 0x2215: u'/', 0x2044: u'/', 0x2018: u"'", 0x2019: u"'", 0x201A: u"'", 0x201B: u"'", 0x201C: u'"', 0x201D: u'"', 0x201E: u'"', 0x00BB: u'"', 0x00AB: u'"', 0x2039: u'"', 0x203A: u'"', 0x2022: u'*', 0x2032: u"'", 0x2033: u"''", 0x0060: u"'", 0x02DD: u'"', 0x02DC: u'~', 0x00A6: u'|', 0x2026: u'...', 0x0133: u'ij', 0xFB00: u'ff', 0xFB01: u'fi', 0xFB02: u'fl', 0xFB03: u'ffi', 0xFB04: u'ffl', 0xFB06: u'st', 0x0091: u"'", 0x0092: u"'", 0x0082: u"'", 0x0084: u'"', 0x0093: u'"', 0x0094: u'"', 0x0095: u'*', 0x0096: u'-', 0x0097: u' -- ', 0x0085: u'...', } _normalizedWhitespace = { 0x000A: u' ', 0x000D: u' ', 0x0009: u' ', } def normalizePunctuation(txt, normalizeWhitespace=False): if normalizeWhitespace: mapping = _normalizedPunctuation.copy() mapping.update(_normalizedWhitespace) else: mapping = _normalizedPunctuation return unicode(txt).translate(mapping) def iso_utc_time(t): return datetime.datetime.utcfromtimestamp(time.mktime(t.timetuple())).isoformat()+'Z' def unique(lst, sorted=False): if sorted: ilst = iter(lst) lastSeen = ilst.next() yield lastSeen for el in ilst: if el==lastSeen: continue lastSeen = el yield el else: seen = set() addToSeen = seen.add for el in lst: if not hasattr(el,'__hash__') or el.__hash__==None: seen = list(seen) addToSeen = seen.append if el not in seen: addToSeen(el) yield el
BSD 3-Clause New or Revised License
googleapis/python-logging
google/cloud/logging_v2/metric.py
Metric.update
python
def update(self, *, client=None): client = self._require_client(client) client.metrics_api.metric_update( self.project, self.name, self.filter_, self.description )
API call: update metric configuration via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update Args: client (Optional[~logging_v2.client.Client]): The client to use. If not passed, falls back to the ``client`` stored on the current sink.
https://github.com/googleapis/python-logging/blob/3ab386102d06637c3b0ba100c7a36a30d0ada26e/google/cloud/logging_v2/metric.py#L152-L166
from google.cloud.exceptions import NotFound class Metric(object): def __init__(self, name, *, filter_=None, client=None, description=""): self.name = name self._client = client self.filter_ = filter_ self.description = description @property def client(self): return self._client @property def project(self): return self._client.project @property def full_name(self): return f"projects/{self.project}/metrics/{self.name}" @property def path(self): return f"/{self.full_name}" @classmethod def from_api_repr(cls, resource, client): metric_name = resource["name"] filter_ = resource["filter"] description = resource.get("description", "") return cls(metric_name, filter_=filter_, client=client, description=description) def _require_client(self, client): if client is None: client = self._client return client def create(self, *, client=None): client = self._require_client(client) client.metrics_api.metric_create( self.project, self.name, self.filter_, self.description ) def exists(self, *, client=None): client = self._require_client(client) try: client.metrics_api.metric_get(self.project, self.name) except NotFound: return False else: return True def reload(self, *, client=None): client = self._require_client(client) data = client.metrics_api.metric_get(self.project, self.name) self.description = data.get("description", "") self.filter_ = data["filter"]
Apache License 2.0
dmitriy-serdyuk/twinnet-asr
libs/Theano/theano/tensor/nnet/conv3d2d.py
conv3d
python
def conv3d(signals, filters, signals_shape=None, filters_shape=None, border_mode='valid'): if isinstance(border_mode, str): border_mode = (border_mode, border_mode, border_mode) if signals_shape is None: _signals_shape_5d = signals.shape else: _signals_shape_5d = signals_shape if filters_shape is None: _filters_shape_5d = filters.shape else: _filters_shape_5d = filters_shape _signals_shape_4d = ( _signals_shape_5d[0] * _signals_shape_5d[1], _signals_shape_5d[2], _signals_shape_5d[3], _signals_shape_5d[4], ) _filters_shape_4d = ( _filters_shape_5d[0] * _filters_shape_5d[1], _filters_shape_5d[2], _filters_shape_5d[3], _filters_shape_5d[4], ) if border_mode[1] != border_mode[2]: raise NotImplementedError('height and width bordermodes must match') conv2d_signal_shape = _signals_shape_4d conv2d_filter_shape = _filters_shape_4d if signals_shape is None: conv2d_signal_shape = None if filters_shape is None: conv2d_filter_shape = None out_4d = tensor.nnet.conv2d( signals.reshape(_signals_shape_4d), filters.reshape(_filters_shape_4d), input_shape=conv2d_signal_shape, filter_shape=conv2d_filter_shape, border_mode=border_mode[1]) if border_mode[1] == 'valid': out_tmp = out_4d.reshape(( _signals_shape_5d[0], _signals_shape_5d[1], _filters_shape_5d[0], _filters_shape_5d[1], _signals_shape_5d[3] - _filters_shape_5d[3] + 1, _signals_shape_5d[4] - _filters_shape_5d[4] + 1, )) elif border_mode[1] == 'full': out_tmp = out_4d.reshape(( _signals_shape_5d[0], _signals_shape_5d[1], _filters_shape_5d[0], _filters_shape_5d[1], _signals_shape_5d[3] + _filters_shape_5d[3] - 1, _signals_shape_5d[4] + _filters_shape_5d[4] - 1, )) elif border_mode[1] == 'same': raise NotImplementedError() else: raise ValueError('invalid border mode', border_mode[1]) if border_mode[0] == 'valid': if _filters_shape_5d[1] != 1: out_5d = diagonal_subtensor(out_tmp, 1, 3).sum(axis=3) else: out_5d = out_tmp.reshape(( _signals_shape_5d[0], _signals_shape_5d[1], _filters_shape_5d[0], _signals_shape_5d[3] - _filters_shape_5d[3] + 1, _signals_shape_5d[4] - _filters_shape_5d[4] + 1, )) elif border_mode[0] in ('full', 'same'): raise NotImplementedError('sequence border mode', border_mode[0]) else: raise ValueError('invalid border mode', border_mode[1]) return out_5d
Convolve spatio-temporal filters with a movie. It flips the filters. Parameters ---------- signals Timeseries of images whose pixels have color channels. Shape: [Ns, Ts, C, Hs, Ws]. filters Spatio-temporal filters. Shape: [Nf, Tf, C, Hf, Wf]. signals_shape None or a tuple/list with the shape of signals. filters_shape None or a tuple/list with the shape of filters. border_mode The only one tested is 'valid'. Notes ----- Another way to define signals: (batch, time, in channel, row, column) Another way to define filters: (out channel,time,in channel, row, column) For the GPU, you can use this implementation or :func:`conv3d_fft <theano.sandbox.cuda.fftconv.conv3d_fft>`. See Also -------- Someone made a script that shows how to swap the axes between both 3d convolution implementations in Theano. See the last `attachment <https://groups.google.com/d/msg/theano-users/1S9_bZgHxVw/0cQR9a4riFUJ>`_
https://github.com/dmitriy-serdyuk/twinnet-asr/blob/799220d682306467a2b401e42e788f8c33382b00/libs/Theano/theano/tensor/nnet/conv3d2d.py#L170-L293
import theano from theano.gradient import DisconnectedType from theano.gof import Op, Apply, TopoOptimizer from theano import tensor import theano.sandbox.cuda as cuda def get_diagonal_subtensor_view(x, i0, i1): i0 = int(i0) i1 = int(i1) if x.shape[i0] < x.shape[i1]: raise NotImplementedError('is this allowed?') idx = [slice(None)] * x.ndim idx[i0] = slice(x.shape[i1] - 1, None, None) xview = x.__getitem__(tuple(idx)) strides = list(xview.strides) strides[i1] -= strides[i0] xview.strides = strides return xview class DiagonalSubtensor(Op): __props__ = ("inplace",) def __str__(self): if self.inplace: return "%s{inplace}" % self.__class__.__name__ return "%s" % self.__class__.__name__ def __init__(self, inplace=False): self.inplace = inplace if inplace: self.view_map = {0: [0]} def make_node(self, x, i0, i1): _i0 = tensor.as_tensor_variable(i0) _i1 = tensor.as_tensor_variable(i1) return Apply(self, [x, _i0, _i1], [x.type()]) def perform(self, node, inputs, output_storage): xview = get_diagonal_subtensor_view(*inputs) if self.inplace: output_storage[0][0] = xview else: output_storage[0][0] = xview.copy() def grad(self, inputs, g_outputs): z = tensor.zeros_like(inputs[0]) gx = inc_diagonal_subtensor(z, inputs[1], inputs[2], g_outputs[0]) return [gx, DisconnectedType()(), DisconnectedType()()] def connection_pattern(self, node): rval = [[True], [False], [False]] return rval diagonal_subtensor = DiagonalSubtensor(False) class IncDiagonalSubtensor(Op): __props__ = ("inplace",) def __str__(self): if self.inplace: return "%s{inplace}" % self.__class__.__name__ return "%s" % self.__class__.__name__ def __init__(self, inplace=False): self.inplace = inplace if inplace: self.destroy_map = {0: [0]} def make_node(self, x, i0, i1, amt): _i0 = tensor.as_tensor_variable(i0) _i1 = tensor.as_tensor_variable(i1) return Apply(self, [x, _i0, _i1, amt], [x.type()]) def perform(self, node, inputs, output_storage): x, i0, i1, amt = inputs if not self.inplace: x = x.copy() xview = get_diagonal_subtensor_view(x, i0, i1) xview += amt output_storage[0][0] = x def grad(self, inputs, g_outputs): x, i0, i1, amt = inputs gy = g_outputs[0] return [gy, DisconnectedType()(), DisconnectedType()(), diagonal_subtensor(gy, i0, i1)] def connection_pattern(self, node): rval = [[True], [False], [False], [True]] return rval inc_diagonal_subtensor = IncDiagonalSubtensor(False)
MIT License
nextdoor/ndscheduler
ndscheduler/corescheduler/datastore/base.py
DatastoreBase.get_db_url
python
def get_db_url(self): raise NotImplementedError('Please implement this function.')
We can use the dict passed from db_config_dict to construct a db url. :return: Database url. See: http://docs.sqlalchemy.org/en/latest/core/engines.html :rtype: str
https://github.com/nextdoor/ndscheduler/blob/d31016aaca480e38a69d75a66a9978a937c6a0b0/ndscheduler/corescheduler/datastore/base.py#L62-L67
import dateutil.tz import dateutil.parser from apscheduler.jobstores import sqlalchemy as sched_sqlalchemy from sqlalchemy import desc, select, MetaData from ndscheduler.corescheduler import constants from ndscheduler.corescheduler import utils from ndscheduler.corescheduler.datastore import tables class DatastoreBase(sched_sqlalchemy.SQLAlchemyJobStore): instance = None @classmethod def get_instance(cls, db_config=None, table_names=None): if not cls.instance: cls.instance = cls(db_config, table_names) return cls.instance @classmethod def destroy_instance(cls): cls.instance = None def __init__(self, db_config, table_names): self.metadata = MetaData() self.table_names = table_names self.db_config = db_config executions_tablename = constants.DEFAULT_EXECUTIONS_TABLENAME jobs_tablename = constants.DEFAULT_JOBS_TABLENAME auditlogs_tablename = constants.DEFAULT_AUDIT_LOGS_TABLENAME if table_names: if 'executions_tablename' in table_names: executions_tablename = table_names['executions_tablename'] if 'jobs_tablename' in table_names: jobs_tablename = table_names['jobs_tablename'] if 'auditlogs_tablename' in table_names: auditlogs_tablename = table_names['auditlogs_tablename'] self.executions_table = tables.get_execution_table(self.metadata, executions_tablename) self.auditlogs_table = tables.get_auditlogs_table(self.metadata, auditlogs_tablename) super(DatastoreBase, self).__init__(url=self.get_db_url(), tablename=jobs_tablename) self.metadata.create_all(self.engine)
BSD 2-Clause Simplified License
mjhoptics/ray-optics
src/rayoptics/codev/cmdproc.py
read_lens
python
def read_lens(filename, **kwargs): global _glass_handler, _track_contents global _reading_private_catalog logging.basicConfig(filename='cv_cmd_proc.log', filemode='w', level=logging.DEBUG) _reading_private_catalog = False _track_contents = util.Counter() opt_model = opticalmodel.OpticalModel(do_init=False) _glass_handler = CVGlassHandler(filename) cmds = cvr.read_seq_file(filename) for i, c in enumerate(cmds): cmd_fct, tla, qlist, dlist = process_command(c) if cmd_fct: eval_str = cmd_fct + '(opt_model, tla, qlist, dlist)' eval(eval_str) else: logging.info('Line %d: Command %s not supported', i+1, c[0]) post_process_input(opt_model, filename, **kwargs) _glass_handler.save_replacements() _track_contents.update(_glass_handler.track_contents) opt_model.update_model() info = _track_contents, _glass_handler.glasses_not_found return opt_model, info
given a CODE V .seq filename, return an OpticalModel Args: filename (pathlib.Path): a CODE V .seq file path kwargs (dict): keyword args passed to the reader functions Returns: an OpticalModel instance and a info tuple
https://github.com/mjhoptics/ray-optics/blob/3b2c9ab9100dd9e0cc9c52c33655dc69286ad40e/src/rayoptics/codev/cmdproc.py#L49-L84
import logging import math from . import tla from . import reader as cvr import rayoptics.optical.opticalmodel as opticalmodel from rayoptics.elem.surface import (DecenterData, Circular, Rectangular, Elliptical) from rayoptics.elem import profiles from rayoptics.oprops import doe from rayoptics.seq.medium import (Air, Glass, InterpolatedGlass, Medium, GlassHandlerBase) from rayoptics.raytr.opticalspec import Field from rayoptics.util.misc_math import isanumber from opticalglass import util _tla = tla.MapTLA() _reading_private_catalog = False _private_catalog_wvls = None _private_catalog_glasses = {} _glass_handler = None _track_contents = None def fictitious_glass_decode(gc): rindex_part = int(gc) magnitude = int(math.floor(math.log10(rindex_part))) + 1 n = 1.0 + rindex_part/10**magnitude v = round(100.0*(gc - int(gc)), 6) return n, v
BSD 3-Clause New or Revised License
xarray-contrib/xarray-simlab
xsimlab/xr_accessor.py
_maybe_get_model_from_context
python
def _maybe_get_model_from_context(model): if model is None: if not Model.active: raise ValueError("No model found in context") model = Model.active[0] if not isinstance(model, Model): raise TypeError(f"{model} is not an instance of xsimlab.Model") return model
Return the given model or try to find it in the context if there was none supplied.
https://github.com/xarray-contrib/xarray-simlab/blob/33a4c83b3046153ff4a63c44925d67b54d42beb0/xsimlab/xr_accessor.py#L31-L44
from collections import defaultdict import warnings import attr import numpy as np from xarray import as_variable, Dataset, register_dataset_accessor from .drivers import XarraySimulationDriver from .model import get_model_variables, Model from .utils import Frozen, variables_dict from .variable import VarType @register_dataset_accessor("filter") def filter_accessor(dataset): def filter(func=None, like=None, regex=None): variables = {k: v for k, v in dataset._variables.items() if func(v)} coord_names = [c for c in dataset._coord_names if c in variables] return dataset._replace_vars_and_dims(variables, coord_names=coord_names) return filter
BSD 3-Clause New or Revised License
lisa-lab/pylearn2
pylearn2/expr/normalize.py
CrossChannelNormalizationBC01.__call__
python
def __call__(self, bc01): half = self.n // 2 sq = T.sqr(bc01) b, ch, r, c = bc01.shape extra_channels = T.alloc(0., b, ch + 2*half, r, c) sq = T.set_subtensor(extra_channels[:,half:half+ch,:,:], sq) scale = self.k for i in xrange(self.n): scale += self.alpha * sq[:,i:i+ch,:,:] scale = scale ** self.beta return bc01 / scale
.. todo:: WRITEME
https://github.com/lisa-lab/pylearn2/blob/af81e5c362f0df4df85c3e54e23b2adeec026055/pylearn2/expr/normalize.py#L35-L58
__authors__ = "Ian Goodfellow and David Warde-Farley" __copyright__ = "Copyright 2013, Universite de Montreal" __credits__ = ["Ian Goodfellow and David Warde-Farley"] __license__ = "3-clause BSD" __maintainer__ = "LISA Lab" __email__ = "pylearn-dev@googlegroups" from theano.compat.six.moves import xrange import theano.tensor as T from pylearn2.sandbox.cuda_convnet.response_norm import CrossMapNorm class CrossChannelNormalizationBC01(object): def __init__(self, alpha = 1e-4, k=2, beta=0.75, n=5): self.__dict__.update(locals()) del self.self if n % 2 == 0: raise NotImplementedError("Only works with odd n for now")
BSD 3-Clause New or Revised License
ibm/matrix-capsules-with-em-routing
utils.py
compute_votes
python
def compute_votes(poses_i, o, regularizer, tag=False): batch_size = int(poses_i.get_shape()[0]) kh_kw_i = int(poses_i.get_shape()[1]) output = tf.reshape(poses_i, shape=[batch_size, kh_kw_i, 1, 4, 4]) w = slim.model_variable('w', shape=[1, kh_kw_i, o, 4, 4], dtype=tf.float32, initializer=tf.truncated_normal_initializer( mean=0.0, stddev=1.0), regularizer=regularizer) w = tf.tile(w, [batch_size, 1, 1, 1, 1]) output = tf.tile(output, [1, 1, o, 1, 1]) mult = tf.matmul(output, w) votes = tf.reshape(mult, [batch_size, kh_kw_i, o, 16]) return votes
Compute the votes by multiplying input poses by transformation matrix. Multiply the poses of layer i by the transform matrix to compute the votes for layer j. Author: Ashley Gritzman 19/10/2018 Credit: Suofei Zhang's implementation on GitHub, "Matrix-Capsules-EM-Tensorflow" https://github.com/www0wwwjs1/Matrix-Capsules-EM-Tensorflow Args: poses_i: poses in layer i tiled according to the kernel (N*OH*OW, kh*kw*i, 16) (64*5*5, 9*8, 16) o: number of output capsules, also called "parent_caps" regularizer: Returns: votes: (N*OH*OW, kh*kw*i, o, 16) (64*5*5, 9*8, 32, 16)
https://github.com/ibm/matrix-capsules-with-em-routing/blob/2da9cdf9e1787f0b0984f7673f644d47b08f220c/utils.py#L107-L165
import tensorflow as tf import tensorflow.contrib.slim as slim import numpy as np def create_routing_map(child_space, k, s): parent_space = int((child_space - k)/s + 1) binmap = np.zeros((child_space**2, parent_space**2)) for r in range(parent_space): for c in range(parent_space): p_idx = r*parent_space + c for i in range(k): c_idx = r*s*child_space + c*s + child_space*i binmap[(c_idx):(c_idx + k), p_idx] = 1 return binmap def kernel_tile(input, kernel, stride): input_shape = input.get_shape() batch_size = int(input_shape[0]) spatial_size = int(input_shape[1]) n_capsules = int(input_shape[3]) parent_spatial_size = int((spatial_size - kernel)/stride + 1) assert input_shape[1] == input_shape[2] if len(input_shape) > 5: size = input_shape[4]*input_shape[5] else: size = 1 child_parent_matrix = create_routing_map(spatial_size, kernel, stride) child_to_parent_idx = group_children_by_parent(child_parent_matrix) input = tf.reshape(input, [batch_size, spatial_size*spatial_size, -1]) tiled = tf.gather(input, child_to_parent_idx, axis=1) tiled = tf.squeeze(tiled) tiled = tf.reshape(tiled, [batch_size, parent_spatial_size, parent_spatial_size, kernel*kernel, n_capsules, -1]) return tiled, child_parent_matrix
Apache License 2.0
google/clusterfuzz
src/clusterfuzz/_internal/bot/tasks/setup.py
_is_search_index_data_bundle
python
def _is_search_index_data_bundle(data_bundle_name): return data_bundle_name.startswith( testcase_manager.SEARCH_INDEX_BUNDLE_PREFIX)
Return true on if this is a search index data bundle, false otherwise.
https://github.com/google/clusterfuzz/blob/e9e105d66f009356c4f3fe9ae7873ffff126b234/src/clusterfuzz/_internal/bot/tasks/setup.py#L570-L573
import datetime import os import shlex import time import zipfile import six from clusterfuzz._internal.base import dates from clusterfuzz._internal.base import errors from clusterfuzz._internal.base import tasks from clusterfuzz._internal.base import utils from clusterfuzz._internal.bot import testcase_manager from clusterfuzz._internal.build_management import revisions from clusterfuzz._internal.datastore import data_handler from clusterfuzz._internal.datastore import data_types from clusterfuzz._internal.datastore import locks from clusterfuzz._internal.datastore import ndb_utils from clusterfuzz._internal.fuzzing import leak_blacklist from clusterfuzz._internal.google_cloud_utils import blobs from clusterfuzz._internal.google_cloud_utils import storage from clusterfuzz._internal.metrics import fuzzer_logs from clusterfuzz._internal.metrics import logs from clusterfuzz._internal.platforms import android from clusterfuzz._internal.system import archive from clusterfuzz._internal.system import environment from clusterfuzz._internal.system import shell _BOT_DIR = 'bot' _DATA_BUNDLE_CACHE_COUNT = 10 _DATA_BUNDLE_SYNC_INTERVAL_IN_SECONDS = 6 * 60 * 60 _DATA_BUNDLE_LOCK_INTERVAL_IN_SECONDS = 3 * 60 * 60 _SYNC_FILENAME = '.sync' _TESTCASE_ARCHIVE_EXTENSION = '.zip' def _set_timeout_value_from_user_upload(testcase_id): metadata = data_types.TestcaseUploadMetadata.query( data_types.TestcaseUploadMetadata.testcase_id == int(testcase_id)).get() if metadata and metadata.timeout: environment.set_value('TEST_TIMEOUT', metadata.timeout) def _copy_testcase_to_device_and_setup_environment(testcase, testcase_file_path): android.device.push_testcases_to_device() job_type_has_privileged_access = environment.get_value('PRIVILEGED_ACCESS') if not job_type_has_privileged_access: return package_name = android.app.get_package_name(testcase_file_path) if package_name: environment.set_value('PKG_NAME', package_name) android.device.install_application_if_needed( testcase_file_path, force_update=True) app_launch_command = testcase.get_metadata('app_launch_command') if app_launch_command: environment.set_value('APP_LAUNCH_COMMAND', app_launch_command) local_testcases_directory = environment.get_value('FUZZ_INPUTS') if (testcase_file_path and testcase_file_path.startswith(local_testcases_directory)): relative_testcase_file_path = ( testcase_file_path[len(local_testcases_directory) + 1:]) device_testcase_file_path = os.path.join( android.constants.DEVICE_TESTCASES_DIR, relative_testcase_file_path) android.adb.run_shell_command(['chmod', '0755', device_testcase_file_path]) def _get_application_arguments(testcase, job_type, task_name): testcase_args = testcase.minimized_arguments if not testcase_args: return None if task_name != 'variant': return testcase_args if environment.is_afl_job(job_type): return '%TESTCASE%' job_args = data_handler.get_value_from_job_definition( job_type, 'APP_ARGS', default='') job_args_list = shlex.split(job_args) testcase_args_list = shlex.split(testcase_args) testcase_args_filtered_list = [ arg for arg in testcase_args_list if arg not in job_args_list ] app_args = ' '.join(testcase_args_filtered_list) if job_args: if app_args: app_args += ' ' app_args += job_args return app_args def _setup_memory_tools_environment(testcase): env = testcase.get_metadata('env') if not env: environment.reset_current_memory_tool_options( redzone_size=testcase.redzone, disable_ubsan=testcase.disable_ubsan) return for options_name, options_value in six.iteritems(env): if not options_value: environment.remove_key(options_name) continue environment.set_memory_tool_options(options_name, options_value) def prepare_environment_for_testcase(testcase, job_type, task_name): _setup_memory_tools_environment(testcase) environment.set_value('WINDOW_ARG', testcase.window_argument) if testcase.timeout_multiplier: test_timeout = environment.get_value('TEST_TIMEOUT') environment.set_value('TEST_TIMEOUT', int(test_timeout * testcase.timeout_multiplier)) fuzz_target = testcase.get_metadata('fuzzer_binary_name') if fuzz_target: environment.set_value('FUZZ_TARGET', fuzz_target) app_args = _get_application_arguments(testcase, job_type, task_name) if app_args: environment.set_value('APP_ARGS', app_args) def setup_testcase(testcase, job_type, fuzzer_override=None): fuzzer_name = fuzzer_override or testcase.fuzzer_name task_name = environment.get_value('TASK_NAME') testcase_fail_wait = environment.get_value('FAIL_WAIT') testcase_id = testcase.key.id() shell.clear_testcase_directories() if testcase.uploader_email: _set_timeout_value_from_user_upload(testcase_id) if fuzzer_name: try: update_successful = update_fuzzer_and_data_bundles(fuzzer_name) except errors.InvalidFuzzerError: testcase.open = False testcase.fixed = 'NA' testcase.set_metadata('fuzzer_was_deleted', True) logs.log_error('Closed testcase %d with invalid fuzzer %s.' % (testcase_id, fuzzer_name)) error_message = 'Fuzzer %s no longer exists' % fuzzer_name data_handler.update_testcase_comment(testcase, data_types.TaskState.ERROR, error_message) return None, None, None if not update_successful: error_message = 'Unable to setup fuzzer %s' % fuzzer_name data_handler.update_testcase_comment(testcase, data_types.TaskState.ERROR, error_message) tasks.add_task( task_name, testcase_id, job_type, wait_time=testcase_fail_wait) return None, None, None file_list, input_directory, testcase_file_path = unpack_testcase(testcase) if not file_list: error_message = 'Unable to setup testcase %s' % testcase_file_path data_handler.update_testcase_comment(testcase, data_types.TaskState.ERROR, error_message) tasks.add_task( task_name, testcase_id, job_type, wait_time=testcase_fail_wait) return None, None, None if environment.is_android(): _copy_testcase_to_device_and_setup_environment(testcase, testcase_file_path) if environment.is_trusted_host(): from clusterfuzz._internal.bot.untrusted_runner import file_host file_host.push_testcases_to_worker() is_lsan_enabled = environment.get_value('LSAN') if is_lsan_enabled: leak_blacklist.copy_global_to_local_blacklist(excluded_testcase=testcase) prepare_environment_for_testcase(testcase, job_type, task_name) return file_list, input_directory, testcase_file_path def _get_testcase_file_and_path(testcase): testcase_absolute_path = testcase.absolute_path testcase_path_is_absolute = ( testcase_absolute_path[1:3] == ':\\' or os.path.isabs(testcase_absolute_path)) if environment.platform() != 'WINDOWS' and '\\' in testcase_absolute_path: testcase_absolute_path = testcase_absolute_path.replace('\\', os.sep) input_directory = environment.get_value('FUZZ_INPUTS') if not testcase_path_is_absolute: testcase_path = os.path.join(input_directory, testcase_absolute_path) return input_directory, testcase_path nfs_root = environment.get_value('NFS_ROOT') if nfs_root and testcase_absolute_path.startswith(nfs_root): return input_directory, testcase_absolute_path root_directory = environment.get_value('ROOT_DIR') search_string = '%s%s%s' % (os.sep, _BOT_DIR, os.sep) search_index = testcase_absolute_path.find(search_string) relative_path = testcase_absolute_path[search_index + len(search_string):] testcase_path = os.path.join(root_directory, _BOT_DIR, relative_path) return input_directory, testcase_path def unpack_testcase(testcase): input_directory, testcase_file_path = _get_testcase_file_and_path(testcase) minimized = testcase.minimized_keys and testcase.minimized_keys != 'NA' if minimized: key = testcase.minimized_keys archived = bool(testcase.archive_state & data_types.ArchiveStatus.MINIMIZED) else: key = testcase.fuzzed_keys archived = bool(testcase.archive_state & data_types.ArchiveStatus.FUZZED) if archived: if minimized: temp_filename = ( os.path.join(input_directory, str(testcase.key.id()) + _TESTCASE_ARCHIVE_EXTENSION)) else: temp_filename = os.path.join(input_directory, testcase.archive_filename) else: temp_filename = testcase_file_path if not blobs.read_blob_to_disk(key, temp_filename): return None, input_directory, testcase_file_path file_list = [] if archived: archive.unpack(temp_filename, input_directory) file_list = archive.get_file_list(temp_filename) shell.remove_file(temp_filename) file_exists = False for file_name in file_list: if os.path.basename(file_name) == os.path.basename(testcase_file_path): file_exists = True break if not file_exists: logs.log_error( 'Expected file to run %s is not in archive. Base directory is %s and ' 'files in archive are [%s].' % (testcase_file_path, input_directory, ','.join(file_list))) return None, input_directory, testcase_file_path else: file_list.append(testcase_file_path) return file_list, input_directory, testcase_file_path def _get_data_bundle_update_lock_name(data_bundle_name): return 'update:data_bundle:%s' % data_bundle_name def _get_data_bundle_sync_file_path(data_bundle_directory): return os.path.join(data_bundle_directory, _SYNC_FILENAME) def _fetch_lock_for_data_bundle_update(data_bundle): if data_bundle.is_local: return True data_bundle_lock_name = _get_data_bundle_update_lock_name(data_bundle.name) return locks.acquire_lock( data_bundle_lock_name, max_hold_seconds=_DATA_BUNDLE_LOCK_INTERVAL_IN_SECONDS, by_zone=True) def _clear_old_data_bundles_if_needed(): data_bundles_directory = environment.get_value('DATA_BUNDLES_DIR') dirs = [] for filename in os.listdir(data_bundles_directory): file_path = os.path.join(data_bundles_directory, filename) if not os.path.isdir(file_path): continue dirs.append(file_path) dirs_to_remove = sorted( dirs, key=os.path.getmtime, reverse=True)[_DATA_BUNDLE_CACHE_COUNT:] for dir_to_remove in dirs_to_remove: logs.log('Removing data bundle directory to keep disk cache small: %s' % dir_to_remove) shell.remove_directory(dir_to_remove) def update_data_bundle(fuzzer, data_bundle): from clusterfuzz._internal.google_cloud_utils import gsutil if not data_bundle.is_local: testcase_disk_directory = environment.get_value('FUZZ_INPUTS_DISK') environment.set_value('FUZZ_INPUTS', testcase_disk_directory) data_bundle_directory = get_data_bundle_directory(fuzzer.name) if not data_bundle_directory: logs.log_error('Failed to setup data bundle %s.' % data_bundle.name) return False if not shell.create_directory( data_bundle_directory, create_intermediates=True): logs.log_error( 'Failed to create data bundle %s directory.' % data_bundle.name) return False if _is_data_bundle_up_to_date(data_bundle, data_bundle_directory): logs.log('Data bundle was recently synced, skip.') return True if not _fetch_lock_for_data_bundle_update(data_bundle): logs.log_error('Failed to lock data bundle %s.' % data_bundle.name) return False if _is_data_bundle_up_to_date(data_bundle, data_bundle_directory): logs.log('Another bot finished the sync, skip.') _release_lock_for_data_bundle_update(data_bundle) return True time_before_sync_start = time.time() if not _is_search_index_data_bundle(data_bundle.name): bucket_url = data_handler.get_data_bundle_bucket_url(data_bundle.name) if environment.is_trusted_host() and data_bundle.sync_to_worker: from clusterfuzz._internal.bot.untrusted_runner import corpus_manager from clusterfuzz._internal.bot.untrusted_runner import file_host worker_data_bundle_directory = file_host.rebase_to_worker_root( data_bundle_directory) file_host.create_directory( worker_data_bundle_directory, create_intermediates=True) result = corpus_manager.RemoteGSUtilRunner().rsync( bucket_url, worker_data_bundle_directory, delete=False) else: result = gsutil.GSUtilRunner().rsync( bucket_url, data_bundle_directory, delete=False) if result.return_code != 0: logs.log_error('Failed to sync data bundle %s: %s.' % (data_bundle.name, result.output)) _release_lock_for_data_bundle_update(data_bundle) return False testcase_manager.create_testcase_list_file(data_bundle_directory) sync_file_path = _get_data_bundle_sync_file_path(data_bundle_directory) utils.write_data_to_file(time_before_sync_start, sync_file_path) if environment.is_trusted_host() and data_bundle.sync_to_worker: from clusterfuzz._internal.bot.untrusted_runner import file_host worker_sync_file_path = file_host.rebase_to_worker_root(sync_file_path) file_host.copy_file_to_worker(sync_file_path, worker_sync_file_path) _release_lock_for_data_bundle_update(data_bundle) return True def update_fuzzer_and_data_bundles(fuzzer_name): fuzzer = data_types.Fuzzer.query(data_types.Fuzzer.name == fuzzer_name).get() if not fuzzer: logs.log_error('No fuzzer exists with name %s.' % fuzzer_name) raise errors.InvalidFuzzerError fuzzer_directory = get_fuzzer_directory(fuzzer_name) environment.set_value('FUZZER_DIR', fuzzer_directory) environment.set_value('UNTRUSTED_CONTENT', fuzzer.untrusted_content) if fuzzer.has_large_testcases: testcase_disk_directory = environment.get_value('FUZZ_INPUTS_DISK') environment.set_value('FUZZ_INPUTS', testcase_disk_directory) if fuzzer.timeout: environment.set_value('TEST_TIMEOUT', fuzzer.timeout) fuzz_test_timeout = environment.get_value('FUZZ_TEST_TIMEOUT') if fuzz_test_timeout and fuzz_test_timeout < fuzzer.timeout: environment.set_value('FUZZ_TEST_TIMEOUT', fuzzer.timeout) max_testcases = environment.get_value('MAX_TESTCASES') if fuzzer.max_testcases and fuzzer.max_testcases < max_testcases: environment.set_value('MAX_TESTCASES', fuzzer.max_testcases) version_file = os.path.join(fuzzer_directory, '.%s_version' % fuzzer_name) if (not fuzzer.builtin and revisions.needs_update(version_file, fuzzer.revision)): logs.log('Fuzzer update was found, updating.') if not shell.remove_directory(fuzzer_directory, recreate=True): logs.log_error('Failed to clear fuzzer directory.') return None archive_path = os.path.join(fuzzer_directory, fuzzer.filename) if not blobs.read_blob_to_disk(fuzzer.blobstore_key, archive_path): logs.log_error('Failed to copy fuzzer archive.') return None try: archive.unpack(archive_path, fuzzer_directory) except Exception: error_message = ('Failed to unpack fuzzer archive %s ' '(bad archive or unsupported format).') % fuzzer.filename logs.log_error(error_message) fuzzer_logs.upload_script_log( 'Fatal error: ' + error_message, fuzzer_name=fuzzer_name) return None fuzzer_path = os.path.join(fuzzer_directory, fuzzer.executable_path) if not os.path.exists(fuzzer_path): error_message = ('Fuzzer executable %s not found. ' 'Check fuzzer configuration.') % fuzzer.executable_path logs.log_error(error_message) fuzzer_logs.upload_script_log( 'Fatal error: ' + error_message, fuzzer_name=fuzzer_name) return None os.chmod(fuzzer_path, 0o750) shell.remove_file(archive_path) revisions.write_revision_to_revision_file(version_file, fuzzer.revision) logs.log('Updated fuzzer to revision %d.' % fuzzer.revision) _clear_old_data_bundles_if_needed() data_bundles = ndb_utils.get_all_from_query( data_types.DataBundle.query( data_types.DataBundle.name == fuzzer.data_bundle_name)) for data_bundle in data_bundles: if not update_data_bundle(fuzzer, data_bundle): return None if fuzzer.launcher_script: fuzzer_launcher_path = os.path.join(fuzzer_directory, fuzzer.launcher_script) environment.set_value('LAUNCHER_PATH', fuzzer_launcher_path) if environment.is_trusted_host(): from clusterfuzz._internal.bot.untrusted_runner import file_host worker_fuzzer_directory = file_host.rebase_to_worker_root( fuzzer_directory) file_host.copy_directory_to_worker( fuzzer_directory, worker_fuzzer_directory, replace=True) return fuzzer
Apache License 2.0
xolox/python-coloredlogs
coloredlogs/__init__.py
find_program_name
python
def find_program_name(): return ((os.path.basename(sys.argv[0]) if sys.argv and sys.argv[0] != '-c' else '') or (os.path.basename(sys.executable) if sys.executable else '') or 'python')
Select a suitable program name to embed in log messages. :returns: One of the following strings (in decreasing order of preference): 1. The base name of the currently running Python program or script (based on the value at index zero of :data:`sys.argv`). 2. The base name of the Python executable (based on :data:`sys.executable`). 3. The string 'python'.
https://github.com/xolox/python-coloredlogs/blob/65bdfe976ac0bf81e8c0bd9a98242b9d666b2859/coloredlogs/__init__.py#L804-L819
import collections import logging import os import re import socket import sys from humanfriendly import coerce_boolean from humanfriendly.compat import coerce_string, is_string, on_windows from humanfriendly.terminal import ANSI_COLOR_CODES, ansi_wrap, enable_ansi_support, terminal_supports_colors from humanfriendly.text import format, split __version__ = '15.0.1' DEFAULT_LOG_LEVEL = logging.INFO DEFAULT_LOG_FORMAT = '%(asctime)s %(hostname)s %(name)s[%(process)d] %(levelname)s %(message)s' DEFAULT_DATE_FORMAT = '%Y-%m-%d %H:%M:%S' CHROOT_FILES = ['/etc/debian_chroot'] DEFAULT_FIELD_STYLES = dict( asctime=dict(color='green'), hostname=dict(color='magenta'), levelname=dict(color='black', bold=True), name=dict(color='blue'), programname=dict(color='cyan'), username=dict(color='yellow'), ) DEFAULT_LEVEL_STYLES = dict( spam=dict(color='green', faint=True), debug=dict(color='green'), verbose=dict(color='blue'), info=dict(), notice=dict(color='magenta'), warning=dict(color='yellow'), success=dict(color='green', bold=True), error=dict(color='red'), critical=dict(color='red', bold=True), ) DEFAULT_FORMAT_STYLE = '%' FORMAT_STYLE_PATTERNS = { '%': r'%\((\w+)\)[#0 +-]*\d*(?:\.\d+)?[hlL]?[diouxXeEfFgGcrs%]', '{': r'{(\w+)[^}]*}', '$': r'\$(\w+)|\${(\w+)}', } def auto_install(): if coerce_boolean(os.environ.get('COLOREDLOGS_AUTO_INSTALL', 'false')): install() def install(level=None, **kw): logger = kw.get('logger') or logging.getLogger() reconfigure = kw.get('reconfigure', True) stream = kw.get('stream') or sys.stderr style = check_style(kw.get('style') or DEFAULT_FORMAT_STYLE) if level is None: level = os.environ.get('COLOREDLOGS_LOG_LEVEL', DEFAULT_LOG_LEVEL) level = level_to_number(level) match_streams = ([sys.stdout, sys.stderr] if stream in [sys.stdout, sys.stderr, None] else [stream]) match_handler = lambda handler: match_stream_handler(handler, match_streams) handler, logger = replace_handler(logger, match_handler, reconfigure) if not (handler and not reconfigure): syslog_enabled = kw.get('syslog') if syslog_enabled not in (None, False): from coloredlogs.syslog import enable_system_logging if syslog_enabled is True: enable_system_logging() else: enable_system_logging(level=syslog_enabled) use_colors = kw.get('isatty', None) if use_colors or (use_colors is None): if use_colors is None and 'NO_COLOR' in os.environ: use_colors = False if (use_colors or use_colors is None) and on_windows(): use_colors = enable_ansi_support() if use_colors is None: use_colors = terminal_supports_colors(stream) filters = handler.filters if handler else None if stream is sys.stderr: handler = StandardErrorHandler() else: handler = logging.StreamHandler(stream) handler.setLevel(level) if filters: handler.filters = filters formatter_options = dict(fmt=kw.get('fmt'), datefmt=kw.get('datefmt')) if style != DEFAULT_FORMAT_STYLE: formatter_options['style'] = style if not formatter_options['fmt']: formatter_options['fmt'] = os.environ.get('COLOREDLOGS_LOG_FORMAT') or DEFAULT_LOG_FORMAT if not formatter_options['datefmt']: formatter_options['datefmt'] = os.environ.get('COLOREDLOGS_DATE_FORMAT') or DEFAULT_DATE_FORMAT if kw.get('milliseconds'): parser = FormatStringParser(style=style) if not (parser.contains_field(formatter_options['fmt'], 'msecs') or '%f' in formatter_options['datefmt']): pattern = parser.get_pattern('asctime') replacements = {'%': '%(msecs)03d', '{': '{msecs:03}', '$': '${msecs}'} formatter_options['fmt'] = pattern.sub( r'\g<0>,' + replacements[style], formatter_options['fmt'], ) HostNameFilter.install( fmt=formatter_options['fmt'], handler=handler, style=style, use_chroot=kw.get('use_chroot', True), ) ProgramNameFilter.install( fmt=formatter_options['fmt'], handler=handler, programname=kw.get('programname'), style=style, ) UserNameFilter.install( fmt=formatter_options['fmt'], handler=handler, username=kw.get('username'), style=style, ) if use_colors: for name, environment_name in (('field_styles', 'COLOREDLOGS_FIELD_STYLES'), ('level_styles', 'COLOREDLOGS_LEVEL_STYLES')): value = kw.get(name) if value is None: environment_value = os.environ.get(environment_name) if environment_value is not None: value = parse_encoded_styles(environment_value) if value is not None: formatter_options[name] = value formatter_type = ColoredFormatter if use_colors else BasicFormatter handler.setFormatter(formatter_type(**formatter_options)) adjust_level(logger, level) logger.addHandler(handler) def check_style(value): if sys.version_info[:2] >= (3, 2): if value not in FORMAT_STYLE_PATTERNS: msg = "Unsupported logging format style! (%r)" raise ValueError(format(msg, value)) elif value != DEFAULT_FORMAT_STYLE: msg = "Format string styles other than %r require Python 3.2+!" raise ValueError(msg, DEFAULT_FORMAT_STYLE) return value def increase_verbosity(): defined_levels = sorted(set(find_defined_levels().values())) current_index = defined_levels.index(get_level()) selected_index = max(0, current_index - 1) set_level(defined_levels[selected_index]) def decrease_verbosity(): defined_levels = sorted(set(find_defined_levels().values())) current_index = defined_levels.index(get_level()) selected_index = min(current_index + 1, len(defined_levels) - 1) set_level(defined_levels[selected_index]) def is_verbose(): return get_level() < DEFAULT_LOG_LEVEL def get_level(): handler, logger = find_handler(logging.getLogger(), match_stream_handler) return handler.level if handler else DEFAULT_LOG_LEVEL def set_level(level): handler, logger = find_handler(logging.getLogger(), match_stream_handler) if handler and logger: handler.setLevel(level_to_number(level)) adjust_level(logger, level) else: install(level=level) def adjust_level(logger, level): level = level_to_number(level) if logger.getEffectiveLevel() > level: logger.setLevel(level) def find_defined_levels(): defined_levels = {} for name in dir(logging): if name.isupper(): value = getattr(logging, name) if isinstance(value, int): defined_levels[name] = value return defined_levels def level_to_number(value): if is_string(value): try: defined_levels = find_defined_levels() value = defined_levels[value.upper()] except KeyError: value = DEFAULT_LOG_LEVEL return value def find_level_aliases(): mapping = collections.defaultdict(list) for name, value in find_defined_levels().items(): mapping[value].append(name) aliases = {} for value, names in mapping.items(): if len(names) > 1: names = sorted(names, key=lambda n: len(n)) canonical_name = names.pop() for alias in names: aliases[alias] = canonical_name return aliases def parse_encoded_styles(text, normalize_key=None): parsed_styles = {} for assignment in split(text, ';'): name, _, styles = assignment.partition('=') target = parsed_styles.setdefault(name, {}) for token in split(styles, ','): if token.isdigit(): target['color'] = int(token) elif token in ANSI_COLOR_CODES: target['color'] = token elif '=' in token: name, _, value = token.partition('=') if name in ('color', 'background'): if value.isdigit(): target[name] = int(value) elif value in ANSI_COLOR_CODES: target[name] = value else: target[token] = True return parsed_styles def find_hostname(use_chroot=True): for chroot_file in CHROOT_FILES: try: with open(chroot_file) as handle: first_line = next(handle) name = first_line.strip() if name: return name except Exception: pass return socket.gethostname()
MIT License
fengliu90/dk-for-tst
utils.py
MatConvert
python
def MatConvert(x, device, dtype): x = torch.from_numpy(x).to(device, dtype) return x
convert the numpy to a torch tensor.
https://github.com/fengliu90/dk-for-tst/blob/1c4065e81fb902e46e3316bfd98eadd0b7f22d74/utils.py#L38-L41
import numpy as np import torch import torch.utils.data import freqopttest.data as data import freqopttest.tst as tst is_cuda = True class ModelLatentF(torch.nn.Module): def __init__(self, x_in, H, x_out): super(ModelLatentF, self).__init__() self.restored = False self.latent = torch.nn.Sequential( torch.nn.Linear(x_in, H, bias=True), torch.nn.Softplus(), torch.nn.Linear(H, H, bias=True), torch.nn.Softplus(), torch.nn.Linear(H, H, bias=True), torch.nn.Softplus(), torch.nn.Linear(H, x_out, bias=True), ) def forward(self, input): fealant = self.latent(input) return fealant def get_item(x, is_cuda): if is_cuda: x = x.cpu().detach().numpy() else: x = x.detach().numpy() return x
MIT License
openstack/openstacksdk
openstack/cloud/_baremetal.py
BaremetalCloudMixin.inspect_machine
python
def inspect_machine(self, name_or_id, wait=False, timeout=3600): return_to_available = False node = self.baremetal.get_node(name_or_id) if node.provision_state == 'available': if node.instance_id: raise exc.OpenStackCloudException( "Refusing to inspect available machine %(node)s " "which is associated with an instance " "(instance_uuid %(inst)s)" % {'node': node.id, 'inst': node.instance_id}) return_to_available = True node = self.baremetal.set_node_provision_state(node, 'manage', wait=True, timeout=timeout) if node.provision_state not in ('manageable', 'inspect failed'): raise exc.OpenStackCloudException( "Machine %(node)s must be in 'manageable', 'inspect failed' " "or 'available' provision state to start inspection, the " "current state is %(state)s" % {'node': node.id, 'state': node.provision_state}) node = self.baremetal.set_node_provision_state(node, 'inspect', wait=True, timeout=timeout) if return_to_available: node = self.baremetal.set_node_provision_state(node, 'provide', wait=True, timeout=timeout) return self._normalize_machine(node)
Inspect a Barmetal machine Engages the Ironic node inspection behavior in order to collect metadata about the baremetal machine. :param name_or_id: String representing machine name or UUID value in order to identify the machine. :param wait: Boolean value controlling if the method is to wait for the desired state to be reached or a failure to occur. :param timeout: Integer value, defautling to 3600 seconds, for the$ wait state to reach completion. :returns: ``munch.Munch`` representing the current state of the machine upon exit of the method.
https://github.com/openstack/openstacksdk/blob/b38f16e0e8f47f5bdbfd57506869bb6ee2533005/openstack/cloud/_baremetal.py#L105-L162
import types import warnings import jsonpatch from openstack.cloud import _normalize from openstack.cloud import _utils from openstack.cloud import exc from openstack import utils class BaremetalCloudMixin(_normalize.Normalizer): @property def _baremetal_client(self): if 'baremetal' not in self._raw_clients: client = self._get_raw_client('baremetal') client = self._get_versioned_client( 'baremetal', min_version=1, max_version='1.latest') self._raw_clients['baremetal'] = client return self._raw_clients['baremetal'] def list_nics(self): return [nic._to_munch() for nic in self.baremetal.ports(details=True)] def list_nics_for_machine(self, uuid): return [nic._to_munch() for nic in self.baremetal.ports(details=True, node_id=uuid)] def get_nic_by_mac(self, mac): results = [nic._to_munch() for nic in self.baremetal.ports(address=mac, details=True)] try: return results[0] except IndexError: return None def list_machines(self): return [self._normalize_machine(node) for node in self.baremetal.nodes()] def get_machine(self, name_or_id): try: return self._normalize_machine(self.baremetal.get_node(name_or_id)) except exc.OpenStackCloudResourceNotFound: return None def get_machine_by_mac(self, mac): nic = self.get_nic_by_mac(mac) if nic is None: return None else: return self.get_machine(nic['node_uuid'])
Apache License 2.0
neuralmagic/sparseml
src/sparseml/keras/utils/logger.py
KerasLogger.log_scalar
python
def log_scalar( self, tag: str, value: float, step: Union[None, int] = None, **kwargs ): raise NotImplementedError()
:param tag: identifying tag to log the value with :param value: value to save :param step: global step for when the value was taken
https://github.com/neuralmagic/sparseml/blob/e2dcb66bad713542158dfe54cba113a0cc02ed39/src/sparseml/keras/utils/logger.py#L94-L102
import logging import os import time from abc import ABC, abstractmethod from enum import Enum, unique from logging import Logger from typing import Union import tensorflow from tensorflow.summary import create_file_writer __all__ = ["KerasLogger", "PythonLogger", "TensorBoardLogger", "LoggingMode"] @unique class LoggingMode(Enum): TRAIN = "train" TEST = "validation" PREDICT = "predict" class KerasLogger(ABC): def __init__(self, name: str, update_freq: Union[str, int] = "epoch", **kwargs): self._name = name self._update_freq = update_freq self._mode = LoggingMode.TRAIN @property def name(self) -> str: return self._name @property def update_freq(self): return self._update_freq @property def mode(self): return self._mode @mode.setter def mode(self, value: LoggingMode): if not isinstance(value, LoggingMode): raise ValueError("Expected LoggingMode for mode, got {}".format(value)) self._mode = value @abstractmethod
Apache License 2.0
bd-j/sedpy
sedpy/attenuation.py
powerlaw
python
def powerlaw(wave, tau_v=1, alpha=1.0, **kwargs): return tau_v * (wave / 5500)**(-alpha)
Simple power-law attenuation, normalized to 5500\AA. :param wave: The wavelengths at which optical depth estimates are desired. :param tau_v: (default: 1) The optical depth at 5500\AA, used to normalize the attenuation curve. :returns tau: The optical depth at each wavelength.
https://github.com/bd-j/sedpy/blob/aa9f4d1ac753c5e346e84f99ec7ff4bb2a96f4cf/sedpy/attenuation.py#L19-L32
import numpy as np import warnings, sys __all__ = ["calzetti", "chevallard", "conroy", "noll", "powerlaw", "drude", "broken_powerlaw", "cardelli", "smc", "lmc"]
MIT License
crowdcomms/faker-e164
faker_e164/providers.py
E164Provider._e164
python
def _e164(self, region_code: str, is_valid=True, is_possible=True) -> PhoneNumber: assert not (is_valid and not is_possible), 'is_valid must be False if is_possible is False' e164_numerify_pattern = self._get_e164_numerify_pattern(region_code, is_possible=is_possible) phone_number = self.numerify(e164_numerify_pattern) while not isinstance(phone_number, PhoneNumber): try: phone_number = phonenumbers.parse(phone_number, region_code) except phonenumbers.phonenumberutil.NumberParseException: phone_number = self.numerify(e164_numerify_pattern) continue if is_valid and not phonenumbers.is_valid_number(phone_number): phone_number = self.numerify(e164_numerify_pattern) continue elif not is_valid and phonenumbers.is_valid_number(phone_number): phone_number = self.numerify(e164_numerify_pattern) continue if is_possible and not phonenumbers.is_possible_number(phone_number): phone_number = self.numerify(e164_numerify_pattern) continue elif not is_possible and phonenumbers.is_possible_number(phone_number): phone_number = self.numerify(e164_numerify_pattern) continue return phone_number
Generate an e164 phone number
https://github.com/crowdcomms/faker-e164/blob/d25e095356cae02ab3ca1baf3fe3281857d2f35b/faker_e164/providers.py#L70-L98
import os import logging import phonenumbers from phonenumbers import PhoneNumber from faker.providers import BaseProvider logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO")) logger = logging.getLogger(__name__) safe_numbers = { 'AU': [ '+61491570156', '+61491570157', '+61491570158', '+61491570159', '+61491570110', ], 'US': [ '+12025550191', '+12025550188', '+12025550187', '+12025550137', '+12025550105', '+12025550124', ], 'GB': [ '+441632960600', '+441632960541', '+441632960702', '+441632960979', '+441632960570', '+441632960864', ], 'CA': [ '+16135550110', '+16135550120', '+16135550109', '+16135550151', '+16135550136', '+16135550119', ] } class E164Provider(BaseProvider): _e164_numerify_pattern = '%######!!!!!!!!' def _get_e164_numerify_pattern(self, region_code: str, is_possible=True): if not is_possible: return '#!!!!!!' country_code = phonenumbers.country_code_for_region(region_code) return str(country_code)+self._e164_numerify_pattern[len(str(country_code)):]
Apache License 2.0
napalm-automation/napalm-yang
napalm_yang/models/openconfig/interfaces/interface/ethernet/state/counters/__init__.py
counters._get_in_fragment_frames
python
def _get_in_fragment_frames(self): return self.__in_fragment_frames
Getter method for in_fragment_frames, mapped from YANG variable /interfaces/interface/ethernet/state/counters/in_fragment_frames (yang:counter64) YANG Description: Number of fragment frames received on the interface.
https://github.com/napalm-automation/napalm-yang/blob/9148e015b086ebe311c07deb92e168ea36fd7771/napalm_yang/models/openconfig/interfaces/interface/ethernet/state/counters/__init__.py#L548-L554
from operator import attrgetter from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType from pyangbind.lib.yangtypes import RestrictedClassType from pyangbind.lib.yangtypes import TypedListType from pyangbind.lib.yangtypes import YANGBool from pyangbind.lib.yangtypes import YANGListType from pyangbind.lib.yangtypes import YANGDynClass from pyangbind.lib.yangtypes import ReferenceType from pyangbind.lib.base import PybindBase from collections import OrderedDict from decimal import Decimal from bitarray import bitarray import six if six.PY3: import builtins as __builtin__ long = int elif six.PY2: import __builtin__ class counters(PybindBase): __slots__ = ( "_path_helper", "_extmethods", "__in_mac_control_frames", "__in_mac_pause_frames", "__in_oversize_frames", "__in_jabber_frames", "__in_fragment_frames", "__in_8021q_frames", "__in_crc_errors", "__out_mac_control_frames", "__out_mac_pause_frames", "__out_8021q_frames", ) _yang_name = "counters" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__in_mac_control_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-mac-control-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__in_mac_pause_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-mac-pause-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__in_oversize_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-oversize-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__in_jabber_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-jabber-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__in_fragment_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-fragment-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__in_8021q_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-8021q-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__in_crc_errors = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-crc-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__out_mac_control_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="out-mac-control-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__out_mac_pause_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="out-mac-pause-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) self.__out_8021q_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="out-8021q-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return ["interfaces", "interface", "ethernet", "state", "counters"] def _get_in_mac_control_frames(self): return self.__in_mac_control_frames def _set_in_mac_control_frames(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-mac-control-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) except (TypeError, ValueError): raise ValueError( { "error-string": """in_mac_control_frames must be of a type compatible with yang:counter64""", "defined-type": "yang:counter64", "generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mac-control-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ethernet', defining_module='openconfig-if-ethernet', yang_type='yang:counter64', is_config=False)""", } ) self.__in_mac_control_frames = t if hasattr(self, "_set"): self._set() def _unset_in_mac_control_frames(self): self.__in_mac_control_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-mac-control-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) def _get_in_mac_pause_frames(self): return self.__in_mac_pause_frames def _set_in_mac_pause_frames(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-mac-pause-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) except (TypeError, ValueError): raise ValueError( { "error-string": """in_mac_pause_frames must be of a type compatible with yang:counter64""", "defined-type": "yang:counter64", "generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mac-pause-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ethernet', defining_module='openconfig-if-ethernet', yang_type='yang:counter64', is_config=False)""", } ) self.__in_mac_pause_frames = t if hasattr(self, "_set"): self._set() def _unset_in_mac_pause_frames(self): self.__in_mac_pause_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-mac-pause-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) def _get_in_oversize_frames(self): return self.__in_oversize_frames def _set_in_oversize_frames(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-oversize-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) except (TypeError, ValueError): raise ValueError( { "error-string": """in_oversize_frames must be of a type compatible with yang:counter64""", "defined-type": "yang:counter64", "generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-oversize-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ethernet', defining_module='openconfig-if-ethernet', yang_type='yang:counter64', is_config=False)""", } ) self.__in_oversize_frames = t if hasattr(self, "_set"): self._set() def _unset_in_oversize_frames(self): self.__in_oversize_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-oversize-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) def _get_in_jabber_frames(self): return self.__in_jabber_frames def _set_in_jabber_frames(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-jabber-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, ) except (TypeError, ValueError): raise ValueError( { "error-string": """in_jabber_frames must be of a type compatible with yang:counter64""", "defined-type": "yang:counter64", "generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-jabber-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ethernet', defining_module='openconfig-if-ethernet', yang_type='yang:counter64', is_config=False)""", } ) self.__in_jabber_frames = t if hasattr(self, "_set"): self._set() def _unset_in_jabber_frames(self): self.__in_jabber_frames = YANGDynClass( base=RestrictedClassType( base_type=long, restriction_dict={"range": ["0..18446744073709551615"]}, int_size=64, ), is_leaf=True, yang_name="in-jabber-frames", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/interfaces/ethernet", defining_module="openconfig-if-ethernet", yang_type="yang:counter64", is_config=False, )
Apache License 2.0
philchristensen/antioch
antioch/core/exchange.py
ObjectExchange.load
python
def load(self, obj_type, obj_id): obj_key = '%s-%s' % (obj_type, obj_id) if(obj_key in self.cache): return self.cache[obj_key] items = self.connection.runQuery(sql.build_select(obj_type, id=obj_id)) if not(items): raise errors.NoSuchObjectError("%s #%s" % (obj_type, obj_id)) def fail(record): raise RuntimeError("Don't know how to make an object of type '%s'" % obj_type) maker = getattr(self, '_mk%s' % obj_type, fail) obj = maker(items[0]) if not(obj.get_id()): obj.set_id(obj_id) self.cache[obj_key] = obj return obj
Load a specific object from the database.
https://github.com/philchristensen/antioch/blob/7fe27c961ae81b7655c6428038c85eefad27e980/antioch/core/exchange.py#L406-L427
import crypt, string, random, time, logging, collections from antioch import celery_config from antioch.core import interface, errors, models from antioch.util import sql, ason, hash_password from django.conf import settings from django.db import transaction from pygments import highlight from pygments.lexers.python import Python3TracebackLexer from pygments.formatters import HtmlFormatter group_definitions = dict( owners = lambda x,a,s: a.owns(s), wizards = lambda x,a,s: x.is_wizard(a.get_id()), everyone = lambda x,a,s: True, ) rollback_after_fatal_errors = True log = logging.getLogger(__name__) def extract_id(literal): if(isinstance(literal, str) and literal.startswith('#')): end = literal.find("(") if(end == -1): end = literal.find( " ") if(end == -1): end = len(literal) return int(literal[1:end]) if(isinstance(literal, int)): return literal return None class ConnectionWrapper(object): def __init__(self, connection): self.connection = connection def isType(self, type): return self.connection.vendor == type def getLastInsertId(self, obj_type): if(self.isType('postgresql')): result = self.runQuery("SELECT currval(pg_get_serial_sequence('%s','id'));" % obj_type) return result[0]['currval'] elif(self.isType('sqlite')): result = self.runQuery("SELECT last_insert_rowid();") return result[0]['last_insert_rowid()'] elif(self.isType('mysql')): result = self.runQuery("SELECT LAST_INSERT_ID();") return result[0]['LAST_INSERT_ID()'] else: raise UnsupportedError("Unsupported database type.") def runOperation(self, query, *args, **kwargs): with self.connection.cursor() as cursor: cursor.execute(query, *args) def runQuery(self, query, *args, **kwargs): with self.connection.cursor() as cursor: cursor.execute(query, *args) columns = [col[0] for col in cursor.description] return [ dict(list(zip(columns, row))) for row in cursor.fetchall() ] class ObjectExchange(object): permission_list = None def __init__(self, connection=None, wrapper=None, queue=False, ctx=None): self.cache = collections.OrderedDict() self.connection = wrapper or ConnectionWrapper(connection) if(self.connection is None): raise RuntimeError("No connection provided.") self.use_queue = queue self.queue = [] if queue else None self.default_grants_active = False self.load_permissions() if(queue and not ctx): raise RuntimeError("Exchanges can't use queues without a context.") self.ctx = ctx if(isinstance(ctx, int)): self.ctx_id = ctx self.ctx = self.get_object(ctx) elif(ctx): self.ctx_id = ctx.id def __enter__(self): self.begin() return self def __exit__(self, etype, e, trace): try: show_all_traces = self.ctx and self.ctx.get('show_all_traces', False).value except: show_all_traces = False try: if(etype is errors.TestError): self.commit() return False elif(etype is EnvironmentError): self.rollback() return False elif(isinstance(e, errors.UserError) and not show_all_traces): self.commit() err = str(e) log.info('Sending normal exception to user: %s' % err) if(self.queue is not None): self.send_message(self.ctx.get_id(), dict( command = 'write', text = highlight(err, Python3TracebackLexer(), HtmlFormatter()), is_error = True, escape_html = False )) return True elif(etype is not None): if(rollback_after_fatal_errors): self.rollback() else: self.commit() import traceback, io io = io.StringIO() traceback.print_exception(etype, e, trace, None, io) log.error('Sending fatal exception to user: %s' % str(e)) if(self.queue is not None): self.send_message(self.ctx.get_id(), dict( command = 'write', text = highlight(io.getvalue(), Python3TracebackLexer(), HtmlFormatter()), is_error = True, escape_html = False )) return True else: self.commit() finally: self.flush() def begin(self): self.sid = transaction.savepoint() def commit(self): transaction.savepoint_commit(self.sid) def rollback(self): transaction.savepoint_rollback(self.sid) def send_message(self, user_id, msg): if not(self.use_queue): log.warning("attempted to send a message to user #%s on an unqueued exchange: %s" % (user_id, msg)) return self.queue.append((self.get_object(user_id), msg)) def flush(self): self.cache.clear() self.cache._order = [] if(self.queue): with celery_config.app.default_connection() as conn: from kombu import Exchange, Queue unbound_exchange = Exchange('antioch', type = 'direct', auto_delete = False, durable = True, ) channel = conn.channel() exchange = unbound_exchange(channel) exchange.declare() for user, msg in self.queue: if not(user.is_connected_player()): log.debug("ignoring message for unconnected player %s" % user) continue queue_id = '-'.join([settings.USER_QUEUE, str(user.id)]) log.debug("flushing message to #%s: %s" % (queue_id, msg)) exchange.publish(exchange.Message(ason.dumps(msg), content_type="application/json"), routing_key=queue_id) def get_context(self): return self.ctx def load_permissions(self): if not(ObjectExchange.permission_list): results = self.connection.runQuery(sql.build_select('permission')) ObjectExchange.permission_list = dict([(x['name'], x['id']) for x in results]) def activate_default_grants(self): if(self.default_grants_active): return system = self.instantiate('object', default_permissions=False, id=1) result = self.connection.runQuery(sql.interp( """SELECT v.* FROM verb_name vn INNER JOIN verb v ON v.id = vn.verb_id WHERE vn.name = 'set_default_permissions' AND v.origin_id = %s """, system.get_id())) self.instantiate('verb', default_permissions=False, *result) self.default_grants_active = True def instantiate(self, obj_type, record=None, *additions, **fields): records = [] if(record): records.append(record) if(additions): records.extend(additions) default_permissions = fields.pop('default_permissions', True) if(fields): records.append(fields) results = [] for record in records: object_id = record.get('id', None) object_key = '%s-%s' % (obj_type, object_id) if(object_key in self.cache): obj = self.cache[object_key] else: if(object_id is None): def fail(record): raise RuntimeError("Don't know how to make an object of type '%s'" % obj_type) if(self.ctx and 'owner_id' not in record): record['owner_id'] = ctx.get_id() maker = getattr(self, '_mk%s' % obj_type, fail) obj = maker(record) self.save(obj) if(default_permissions): try: self.activate_default_grants() system = self.get_object(1) set_default_permissions = system.set_default_permissions except (errors.NoSuchObjectError, errors.NoSuchVerbError) as e: set_default_permissions = lambda *a: None set_default_permissions(obj) else: obj = self.load(obj_type, object_id) results.append(obj) if(len(records) == 1): return results[0] return results def _mkobject(self, record): obj = interface.Object(self) obj._name = record.get('name', '') obj._unique_name = record.get('unique_name', False) obj._owner_id = record.get('owner_id', None) obj._location_id = record.get('location_id', None) return obj def _mkverb(self, record): origin = self.instantiate('object', id=record['origin_id']) v = interface.Verb(origin) v._code = record.get('code', '') v._filename = record.get('filename', None) v._ref = record.get('ref', None) v._owner_id = record.get('owner_id', None) v._ability = record.get('ability', False) v._method = record.get('method', False) v._origin_id = record['origin_id'] if('repo' in record): repo = models.Repository.objects.get(slug=record['repo']) v._repo_id = repo.id if('name' in record): self.save(v) v.add_name(record['name']) return v def _mkproperty(self, record): origin = self.instantiate('object', id=record['origin_id']) p = interface.Property(origin) p._name = record['name'] p._origin_id = record['origin_id'] p._type = record.get('type', 'string') p._owner_id = record.get('owner_id', None) val = record.get('value', '') p._value = ason.loads(val, exchange=self) if val else val return p def _mkpermission(self, record): origin = None for origin_type in ('object', 'verb', 'property'): origin_id = record.get('%s_id' % origin_type, None) if(origin_id): origin = self.instantiate(origin_type, id=origin_id) break assert origin is not None, "Can't determine an origin for permission record: %s" % record perm = interface.Permission(origin) perm.object_id = record.get('object_id', None) perm.verb_id = record.get('verb_id', None) perm.property_id = record.get('property_id', None) perm.rule = record.get('rule', 'allow') perm.permission_id = record.get('permission_id', None) perm.type = record.get('type', 'group') perm.subject_id = record.get('subject_id', None) perm.group = record.get('group', 'everyone') return perm
MIT License
foglamp/foglamp
python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py
RestoreProcess.backup_status_update
python
def backup_status_update(self, backup_id, status): _logger.debug("{func} - backup id |{id}| ".format(func="backup_status_update", id=backup_id)) sql_cmd = """ UPDATE backups SET status={status} WHERE id='{id}'; """.format(status=status, id=backup_id, ) self.storage_update(sql_cmd)
Updates the status of the backup in the Storage layer Args: backup_id: int - status: BackupStatus - Returns: Raises:
https://github.com/foglamp/foglamp/blob/918dff88b440e6ad580efdaa5f0fbdf4143a73d4/python/foglamp/plugins/storage/sqlite/backup_restore/restore_sqlite.py#L606-L626
import time import sys import os import signal import sqlite3 from foglamp.common.parser import Parser from foglamp.common.process import FoglampProcess from foglamp.common import logger import foglamp.plugins.storage.common.lib as lib import foglamp.plugins.storage.common.exceptions as exceptions __author__ = "Stefano Simonelli" __copyright__ = "Copyright (c) 2018 OSIsoft, LLC" __license__ = "Apache 2.0" __version__ = "${VERSION}" _MODULE_NAME = "foglamp_restore_sqlite_module" _MESSAGES_LIST = { "i000001": "Execution started.", "i000002": "Execution completed.", "e000001": "cannot initialize the logger - error details |{0}|", "e000002": "an error occurred during the restore operation - error details |{0}|", "e000003": "invalid command line arguments - error details |{0}|", "e000004": "cannot complete the initialization - error details |{0}|", } _logger = None _LOG_LEVEL_DEBUG = 10 _LOG_LEVEL_INFO = 20 _LOGGER_LEVEL = _LOG_LEVEL_INFO _LOGGER_DESTINATION = logger.SYSLOG class RestoreProcess(FoglampProcess): _MODULE_NAME = "foglamp_restore_sqlite_process" _FOGLAMP_ENVIRONMENT_DEV = "dev" _FOGLAMP_ENVIRONMENT_DEPLOY = "deploy" _FOGLAMP_CMD_PATH_DEV = "scripts/foglamp" _FOGLAMP_CMD_PATH_DEPLOY = "bin/foglamp" _foglamp_environment = _FOGLAMP_ENVIRONMENT_DEV _foglamp_cmd = _FOGLAMP_CMD_PATH_DEV + " {0}" _MESSAGES_LIST = { "i000001": "Execution started.", "i000002": "Execution completed.", "e000000": "general error", "e000001": "Invalid file name", "e000002": "cannot retrieve the configuration from the manager, trying retrieving from file " "- error details |{0}|", "e000003": "cannot retrieve the configuration from file - error details |{0}|", "e000004": "cannot restore the backup, file doesn't exists - file name |{0}|", "e000006": "cannot start FogLAMP after the restore - error details |{0}|", "e000007": "cannot restore the backup, restarting FogLAMP - error details |{0}|", "e000008": "cannot identify FogLAMP status, the maximum number of retries has been reached " "- error details |{0}|", "e000009": "cannot restore the backup, either a backup or a restore is already running - pid |{0}|", "e000010": "cannot retrieve the FogLamp status - error details |{0}|", "e000011": "cannot restore the backup, the selected backup doesn't exists - backup id |{0}|", "e000012": "cannot restore the backup, the selected backup doesn't exists - backup file name |{0}|", "e000013": "cannot proceed the execution, " "It is not possible to determine the environment in which the code is running" " neither Deployment nor Development", } _logger = None _backup_id = None _file_name = None class FogLampStatus(object): NOT_DEFINED = 0 STOPPED = 1 RUNNING = 2 @staticmethod def _signal_handler(_signo, _stack_frame): short_stack_frame = str(_stack_frame)[:100] _logger.debug("{func} - signal |{signo}| - info |{ssf}| ".format( func="_signal_handler", signo=_signo, ssf=short_stack_frame)) def __init__(self): super().__init__() if not self._logger: self._logger = logger.setup(self._MODULE_NAME, destination=_LOGGER_DESTINATION, level=_LOGGER_LEVEL) try: self._backup_id = Parser.get('--backup-id') self._file_name = Parser.get('--file') except Exception as _ex: _message = _MESSAGES_LIST["e000003"].format(_ex) _logger.exception(_message) raise exceptions.ArgumentParserError(_message) self._restore_lib = lib.BackupRestoreLib(self._storage_async, self._logger) self._job = lib.Job() self._force_restore = True lib._logger = self._logger lib._storage = self._storage_async def _identifies_backup_to_restore(self): backup_id = None file_name = None if self._backup_id is None and self._file_name is None: backup_id, file_name = self._identify_last_backup() elif self._backup_id is not None: try: backup_info = self._restore_lib.sl_get_backup_details(self._backup_id) backup_id = backup_info["id"] file_name = backup_info["file_name"] except exceptions.DoesNotExist: _message = self._MESSAGES_LIST["e000011"].format(self._backup_id) _logger.error(_message) raise exceptions.DoesNotExist(_message) elif self._file_name is not None: try: backup_info = self._restore_lib.sl_get_backup_details_from_file_name(self._file_name) backup_id = backup_info["id"] file_name = backup_info["file_name"] except exceptions.DoesNotExist: if self._force_restore: file_name = self._file_name else: _message = self._MESSAGES_LIST["e000012"].format(self._file_name) _logger.error(_message) raise exceptions.DoesNotExist(_message) if not os.path.exists(file_name): _message = self._MESSAGES_LIST["e000004"].format(file_name) _logger.error(_message) raise FileNotFoundError(_message) return backup_id, file_name def storage_retrieve(self, sql_cmd): _logger.debug("{func} - sql cmd |{cmd}| ".format(func="storage_retrieve", cmd=sql_cmd)) db_connection_string = "{path}/{db}".format( path=self._restore_lib.dir_foglamp_data, db=self._restore_lib.config['database-filename'] ) comm = sqlite3.connect(db_connection_string) cur = comm.cursor() cur.execute(sql_cmd) raw_data = cur.fetchall() cur.close() return raw_data def storage_update(self, sql_cmd): _logger.debug("{func} - sql cmd |{cmd}| ".format( func="storage_update", cmd=sql_cmd)) db_connection_string = "{path}/{db}".format( path=self._restore_lib.dir_foglamp_data, db=self._restore_lib.config['database-filename'] ) comm = sqlite3.connect(db_connection_string) cur = comm.cursor() cur.execute(sql_cmd) comm.commit() comm.close() def _identify_last_backup(self): self._logger.debug("{func} ".format(func="_identify_last_backup")) sql_cmd = """ SELECT id, file_name FROM backups WHERE id= (SELECT MAX(id) FROM backups WHERE status={0} or status={1}); """.format(lib.BackupStatus.COMPLETED, lib.BackupStatus.RESTORED) data = self.storage_retrieve(sql_cmd) if len(data) == 0: raise exceptions.NoBackupAvailableError elif len(data) == 1: _backup_id = data[0][0] _file_name = data[0][1] else: raise exceptions.FileNameError return _backup_id, _file_name def get_backup_details_from_file_name(self, _file_name): self._logger.debug("{func} ".format(func="get_backup_details_from_file_name")) sql_cmd = """ SELECT * FROM backups WHERE file_name='{file}' """.format(file=_file_name) data = self.storage_retrieve(sql_cmd) if len(data) == 0: raise exceptions.NoBackupAvailableError elif len(data) == 1: backup_information = data[0] else: raise exceptions.FileNameError return backup_information def _foglamp_stop(self): self._logger.debug("{func}".format(func="_foglamp_stop")) cmd = "{path}/{cmd}".format( path=self._restore_lib.dir_foglamp_root, cmd=self._foglamp_cmd.format("stop") ) status, output = lib.exec_wait_retry(cmd, True, max_retry=self._restore_lib.config['max_retry'], timeout=self._restore_lib.config['timeout']) self._logger.debug("{func} - status |{status}| - cmd |{cmd}| - output |{output}| ".format( func="_foglamp_stop", status=status, cmd=cmd, output=output)) if status == 0: if self._foglamp_status() != self.FogLampStatus.STOPPED: raise exceptions.FogLAMPStopError(output) else: raise exceptions.FogLAMPStopError(output) def _decode_foglamp_status(self, text): text_upper = text.upper() if 'FOGLAMP UPTIME' in text_upper: status = self.FogLampStatus.RUNNING elif 'FOGLAMP NOT RUNNING.' in text_upper: status = self.FogLampStatus.STOPPED else: status = self.FogLampStatus.NOT_DEFINED return status def _check_wait_foglamp_start(self): self._logger.debug("{func}".format(func="_check_wait_foglamp_start")) status = self.FogLampStatus.NOT_DEFINED n_retry = 0 max_reties = self._restore_lib.config['restart-max-retries'] sleep_time = self._restore_lib.config['restart-sleep'] while n_retry < max_reties: self._logger.debug("{func}".format(func="_check_wait_foglamp_start - checks FogLamp status")) status = self._foglamp_status() if status == self.FogLampStatus.RUNNING: break self._logger.debug("{func}".format(func="_check_wait_foglamp_start - sleep {0}".format(sleep_time))) time.sleep(sleep_time) n_retry += 1 return status def _foglamp_status(self): status = self.FogLampStatus.NOT_DEFINED num_exec = 0 max_exec = 10 same_status = 0 same_status_ok = 3 sleep_time = 1 while (same_status < same_status_ok) and (num_exec <= max_exec): try: cmd = "{path}/{cmd}".format( path=self._restore_lib.dir_foglamp_root, cmd=self._foglamp_cmd.format("status") ) cmd_status, output = lib.exec_wait(cmd, True, _timeout=self._restore_lib.config['timeout']) self._logger.debug("{func} - output |{output}| \r - status |{status}| ".format( func="_foglamp_status", output=output, status=cmd_status)) num_exec += 1 new_status = self._decode_foglamp_status(output) except Exception as _ex: _message = self._MESSAGES_LIST["e000010"].format(_ex) _logger.error(_message) raise else: if new_status == status: same_status += 1 time.sleep(sleep_time) else: status = new_status same_status = 0 if num_exec >= max_exec: _message = self._MESSAGES_LIST["e000008"] self._logger.error(_message) status = self.FogLampStatus.NOT_DEFINED return status def _run_restore_command(self, backup_file): self._logger.debug("{func} - Restore starts - file name |{file}|".format( func="_run_restore_command", file=backup_file)) cmd = "{cmd} {file} {path}/{db} ".format( cmd=self._restore_lib.SQLITE_RESTORE, file=backup_file, path=self._restore_lib.dir_foglamp_data, db=self._restore_lib.config['database-filename'] ) status, output = lib.exec_wait_retry(cmd, True, timeout=self._restore_lib.config['timeout']) self._logger.debug("{func} - Restore ends - status |{status}| - cmd |{cmd}| - output |{output}|".format( func="_run_restore_command", status=status, cmd=cmd, output=output)) if status != 0: raise exceptions.RestoreFailed cmd = "rm {path}/foglamp.db-shm ".format(path=self._restore_lib.dir_foglamp_data) status, output = lib.exec_wait_retry(cmd, True, timeout=self._restore_lib.config['timeout']) cmd = "rm {path}/foglamp.db-wal ".format(path=self._restore_lib.dir_foglamp_data) status, output = lib.exec_wait_retry(cmd, True, timeout=self._restore_lib.config['timeout']) def _foglamp_start(self): cmd = "{path}/{cmd}".format( path=self._restore_lib.dir_foglamp_root, cmd=self._foglamp_cmd.format("start") ) exit_code, output = lib.exec_wait_retry( cmd, True, max_retry=self._restore_lib.config['max_retry'], timeout=self._restore_lib.config['timeout']) self._logger.debug("{func} - exit_code |{exit_code}| - cmd |{cmd}| - output |{output}|".format( func="_foglamp_start", exit_code=exit_code, cmd=cmd, output=output)) if exit_code == 0: if self._check_wait_foglamp_start() != self.FogLampStatus.RUNNING: raise exceptions.FogLAMPStartError else: raise exceptions.FogLAMPStartError
Apache License 2.0
centerforopenscience/waterbutler
waterbutler/core/metadata.py
BaseFileMetadata.serialized
python
def serialized(self) -> dict: return dict(super().serialized(), **{ 'contentType': self.content_type, 'modified': self.modified, 'modified_utc': self.modified_utc, 'created_utc': self.created_utc, 'size': self.size, 'sizeInt': self.size_as_int, })
Returns a dict representing the file's metadata suitable to be serialized into JSON. :rtype: dict
https://github.com/centerforopenscience/waterbutler/blob/47ad15e8fbe64a5b0afc9d5ffdeac3b206ef7225/waterbutler/core/metadata.py#L210-L222
import abc import typing import hashlib import furl from waterbutler.core import utils from waterbutler.server import settings class BaseMetadata(metaclass=abc.ABCMeta): def __init__(self, raw: dict) -> None: self.raw = raw def serialized(self) -> dict: return { 'extra': self.extra, 'kind': self.kind, 'name': self.name, 'path': self.path, 'provider': self.provider, 'materialized': self.materialized_path, 'etag': hashlib.sha256('{}::{}'.format(self.provider, self.etag).encode('utf-8')).hexdigest(), } def json_api_serialized(self, resource: str) -> dict: json_api = { 'id': self.provider + self.path, 'type': 'files', 'attributes': self.serialized(), 'links': self._json_api_links(resource), } json_api['attributes']['resource'] = resource return json_api def _json_api_links(self, resource: str) -> dict: entity_url = self._entity_url(resource) actions = { 'move': entity_url, 'upload': entity_url + '?kind=file', 'delete': entity_url, } return actions def _entity_url(self, resource: str) -> str: url = furl.furl(settings.DOMAIN) segments = ['v1', 'resources', resource, 'providers', self.provider] segments += self.path.split('/')[1:] url.path.segments.extend(segments) return url.url def build_path(self, path) -> str: if not path.startswith('/'): path = '/' + path if self.kind == 'folder' and not path.endswith('/'): path += '/' return path @property def is_folder(self) -> bool: return self.kind == 'folder' @property def is_file(self) -> bool: return self.kind == 'file' @property @abc.abstractmethod def provider(self) -> str: raise NotImplementedError @property @abc.abstractmethod def kind(self) -> str: raise NotImplementedError @property @abc.abstractmethod def name(self) -> str: raise NotImplementedError @property @abc.abstractmethod def path(self) -> str: raise NotImplementedError @property def etag(self) -> str: raise NotImplementedError @property def materialized_path(self) -> str: return self.path @property def extra(self) -> dict: return {} def __eq__(self, other: 'BaseMetadata') -> bool: return isinstance(other, self.__class__) and self.serialized() == other.serialized() class BaseFileMetadata(BaseMetadata):
Apache License 2.0
lockout/bbuzz
bbuzz/mutate/binary.py
bitflip
python
def bitflip(case, caselen): mask = "1" * caselen flip = str(bin(int(case, 2) ^ int(mask, 2)))[2:] return flip.zfill(caselen)
Flip 1 to 0 and 0 to 1
https://github.com/lockout/bbuzz/blob/dc33854b3b746fa132be3e1403509c2e9daf3803/bbuzz/mutate/binary.py#L35-L39
import bbuzz.common def binary(case, caselen): mutations = [] mutations.append(case) if bbuzz.common.zerocase(case): mutations = mutations + bitshift_right(case, caselen) mutations = mutations + knownvalues(caselen) elif bbuzz.common.onecase(case): mutations = mutations + bitshift_left(case, caselen) mutations = mutations + knownvalues(caselen) else: mutations.append(bitflip(case, caselen)) mutations = mutations + bitshift_left(case, caselen) mutations = mutations + bitshift_right(case, caselen) mutations = mutations + knownvalues(caselen) endianess = endian(case, caselen) if endianess: mutations.append(endianess) return mutations
MIT License
olitheolix/aiokubernetes
aiokubernetes/models/v1beta1_token_review.py
V1beta1TokenReview.spec
python
def spec(self): return self._spec
Gets the spec of this V1beta1TokenReview. # noqa: E501 Spec holds information about the request being evaluated # noqa: E501 :return: The spec of this V1beta1TokenReview. # noqa: E501 :rtype: V1beta1TokenReviewSpec
https://github.com/olitheolix/aiokubernetes/blob/266718b210dff2a9b2212183261ea89adf89115e/aiokubernetes/models/v1beta1_token_review.py#L140-L148
import pprint import re from aiokubernetes.models.v1_object_meta import V1ObjectMeta from aiokubernetes.models.v1beta1_token_review_spec import V1beta1TokenReviewSpec from aiokubernetes.models.v1beta1_token_review_status import V1beta1TokenReviewStatus class V1beta1TokenReview(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'api_version': 'str', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'spec': 'V1beta1TokenReviewSpec', 'status': 'V1beta1TokenReviewStatus' } attribute_map = { 'api_version': 'apiVersion', 'kind': 'kind', 'metadata': 'metadata', 'spec': 'spec', 'status': 'status' } def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): self._api_version = None self._kind = None self._metadata = None self._spec = None self._status = None self.discriminator = None if api_version is not None: self.api_version = api_version if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata self.spec = spec if status is not None: self.status = status @property def api_version(self): return self._api_version @api_version.setter def api_version(self, api_version): self._api_version = api_version @property def kind(self): return self._kind @kind.setter def kind(self, kind): self._kind = kind @property def metadata(self): return self._metadata @metadata.setter def metadata(self, metadata): self._metadata = metadata @property
Apache License 2.0
nasa-ammos/ait-core
ait/core/server/plugins/openmct.py
AITOpenMctPlugin._get_tlm_packet_def
python
def _get_tlm_packet_def(self, uid): pkt_defn = self._uidToPktDefMap[uid] return pkt_defn
Return packet definition based on packet unique id
https://github.com/nasa-ammos/ait-core/blob/40717498c20358303521cbd94e9bb2528b7a1f09/ait/core/server/plugins/openmct.py#L243-L246
import pickle import datetime import json import random import struct import sys import time import urllib import webbrowser import gevent import gevent.monkey; gevent.monkey.patch_all() import geventwebsocket import bottle import copy import importlib import datetime import ait.core from ait.core import api, dtype, log, tlm, db from ait.core.server.plugin import Plugin class AITOpenMctPlugin(Plugin): DEFAULT_PORT = 8082 DEFAULT_DEBUG = False DEFAULT_DEBUG_MAX_LEN = 512 DEFAULT_DATABASE_ENABLED = False def __init__(self, inputs, outputs, zmq_args=None, datastore='ait.core.db.InfluxDBBackend', **kwargs): super(AITOpenMctPlugin, self).__init__(inputs, outputs, zmq_args, **kwargs) log.info('Running AIT OpenMCT Plugin') self._datastore = datastore self._debugEnabled = AITOpenMctPlugin.DEFAULT_DEBUG self._debugMimicRepeat = False self._servicePort = AITOpenMctPlugin.DEFAULT_PORT self._databaseEnabled = AITOpenMctPlugin.DEFAULT_DATABASE_ENABLED self._checkConfig() self._app = bottle.Bottle() self._servers = [] self._tlmQueue = api.GeventDeque(maxlen=100) self._logQueue = api.GeventDeque(maxlen=100) self._aitTlmDict = tlm.getDefaultDict() self._mctTlmDict = self.format_tlmdict_for_openmct(self._aitTlmDict) self._uidToPktDefMap = self.create_uid_pkt_map(self._aitTlmDict) self._database = self.load_database(**kwargs) gevent.spawn(self.init) def _checkConfig(self): if hasattr(self, "debug_enabled"): if isinstance(self.debug_enabled, bool): self._debugEnabled = self.debug_enabled elif isinstance(self.debug_enabled, str): self._debugEnabled = self.debug_enabled in ['true', '1', 'TRUE', 'enabled', 'ENABLED'] self.dbg_message("Debug flag = " + str(self._debugEnabled)) if hasattr(self, "service_port"): try: self._servicePort = int(self.service_port) except ValueError: self._servicePort = AITOpenMctPlugin.DEFAULT_PORT self.dbg_message("Service Port = " + str(self._servicePort)) if hasattr(self, "database_enabled"): if isinstance(self.database_enabled, bool): self._databaseEnabled = self.database_enabled elif isinstance(self.database_enabled, str): self._databaseEnabled = self.database_enabled in ['true', '1', 'TRUE', 'enabled', 'ENABLED'] self.dbg_message("Database flag = " + str(self._databaseEnabled)) def load_database(self, **kwargs): dbconn = None if self._databaseEnabled: db_cfg = ait.config.get('database', kwargs.get('database', None)) if not db_cfg: log.error('[OpenMCT] Plugin configured to use database but no database configuration was found') log.warn('Disabling historical queries.') else: try: db_mod, db_cls = self._datastore.rsplit('.', 1) dbconn = getattr(importlib.import_module(db_mod), db_cls)() dbconn.connect(**kwargs) except Exception as ex: log.error('Error connecting to database: {}'.format(ex)) log.warn('Disabling historical queries.') else: msg = ( '[OpenMCT Database Configuration]' 'This plugin is not configured with a database enabled. ' 'Historical telemetry queries ' 'will be disabled from this server endpoint.' ) log.warn(msg) return dbconn def process(self, input_data, topic=None): processed = False if hasattr(self, 'telem_stream_names'): if topic in self.telem_stream_names: self._process_telem_msg(input_data) processed = True if not processed: if 'telem_stream' in topic: self._process_telem_msg(input_data) processed = True if not processed: raise ValueError('Topic of received message not recognized as telem stream.') def _process_telem_msg(self, input_data): msg = pickle.loads(input_data) uid = int(msg[0]) packet = msg[1] tlm_entry = (uid, packet) self._tlmQueue.append(tlm_entry) def dbg_message(self, msg): if self._debugEnabled: max_len = self.DEFAULT_DEBUG_MAX_LEN max_msg = (msg[:max_len] + '...') if len(msg) > max_len else msg log.info('AitOpenMctPlugin: ' + max_msg) @staticmethod def datetime_jsonifier(obj): if isinstance(obj, datetime.datetime): return obj.isoformat() else: return None @staticmethod def get_browser_name(browser): return getattr(browser, 'name', getattr(browser, '_name', '(none)'))
MIT License
ralphm/idavoll
idavoll/iidavoll.py
IStorage.createNode
python
def createNode(nodeIdentifier, owner, config):
Create new node. The implementation should make sure, the passed owner JID is stripped of the resource (e.g. using C{owner.userhostJID()}). The passed config is expected to have values for the fields returned by L{getDefaultConfiguration}, as well as a value for C{'pubsub#node_type'}. @param nodeIdentifier: NodeID of the new node. @type nodeIdentifier: C{unicode} @param owner: JID of the new nodes's owner. @type owner: L{JID<twisted.words.protocols.jabber.jid.JID>} @param config: Node configuration. @type config: C{dict} @return: deferred that fires on creation.
https://github.com/ralphm/idavoll/blob/8bb96845813bc9624cc0c0609a3deb7177964956/idavoll/iidavoll.py#L242-L259
from zope.interface import Attribute, Interface class IBackendService(Interface): def __init__(storage): def supportsPublisherAffiliation(): def supportsOutcastAffiliation(): def supportsPersistentItems(): def getNodeType(nodeIdentifier): def getNodes(): def getNodeMetaData(nodeIdentifier): def createNode(nodeIdentifier, requestor): def registerPreDelete(preDeleteFn): def deleteNode(nodeIdentifier, requestor): def purgeNode(nodeIdentifier, requestor): def subscribe(nodeIdentifier, subscriber, requestor): def unsubscribe(nodeIdentifier, subscriber, requestor): def getSubscribers(nodeIdentifier): def getSubscriptions(entity): def getAffiliations(entity): def publish(nodeIdentifier, items, requestor): def registerNotifier(observerfn, *args, **kwargs): def getNotifications(nodeIdentifier, items): def getItems(nodeIdentifier, requestor, maxItems=None, itemIdentifiers=[]): def retractItem(nodeIdentifier, itemIdentifier, requestor): class IStorage(Interface): def getNode(nodeIdentifier): def getNodeIds():
MIT License
lisa-lab/pylearn2
pylearn2/costs/cost.py
DefaultDataSpecsMixin.get_data_specs
python
def get_data_specs(self, model): if self.supervised: space = CompositeSpace([model.get_input_space(), model.get_target_space()]) sources = (model.get_input_source(), model.get_target_source()) return (space, sources) else: return (model.get_input_space(), model.get_input_source())
Provides a default data specification. The cost requests input features from the model's input space and input source. `self` must contain a bool field called `supervised`. If this field is True, the cost requests targets as well. Parameters ---------- model : pylearn2.models.Model TODO WRITEME
https://github.com/lisa-lab/pylearn2/blob/af81e5c362f0df4df85c3e54e23b2adeec026055/pylearn2/costs/cost.py#L548-L567
import functools import logging import warnings from theano.compat.six.moves import reduce import theano.tensor as T from theano.compat.six.moves import zip as izip from pylearn2.compat import OrderedDict from pylearn2.utils import safe_zip from pylearn2.utils import safe_union from pylearn2.space import CompositeSpace, NullSpace from pylearn2.utils.data_specs import DataSpecsMapping from pylearn2.utils.exc import reraise_as logger = logging.getLogger(__name__) class Cost(object): supervised = False def expr(self, model, data, ** kwargs): try: per_example = self.cost_per_example(self, model, data, **kwargs) except NotImplementedError: raise NotImplementedError(str(type(self)) + " does not implement " "expr.") if per_example is None: return None assert per_example.ndim == 1 return per_example.mean() def cost_per_example(self, model, data, ** kwargs): raise NotImplementedError(str(type(self)) + "does not implement " "cost_per_example.") def get_gradients(self, model, data, ** kwargs): try: cost = self.expr(model=model, data=data, **kwargs) except TypeError: message = "Error while calling " + str(type(self)) + ".expr" reraise_as(TypeError(message)) if cost is None: raise NotImplementedError(str(type(self)) + " represents an intractable cost and " "does not provide a gradient " "approximation scheme.") params = list(model.get_params()) grads = T.grad(cost, params, disconnected_inputs='ignore') gradients = OrderedDict(izip(params, grads)) updates = OrderedDict() return gradients, updates def get_monitoring_channels(self, model, data, **kwargs): self.get_data_specs(model)[0].validate(data) return OrderedDict() def get_fixed_var_descr(self, model, data): self.get_data_specs(model)[0].validate(data) fixed_var_descr = FixedVarDescr() return fixed_var_descr def get_data_specs(self, model): raise NotImplementedError(str(type(self)) + " does not implement " + "get_data_specs.") def is_stochastic(self): raise NotImplementedError(str(type(self)) + " needs to implement " "is_stochastic.") class SumOfCosts(Cost): def __init__(self, costs): assert isinstance(costs, list) assert len(costs) > 0 self.costs = [] self.coeffs = [] for cost in costs: if isinstance(cost, (list, tuple)): coeff, cost = cost else: coeff = 1. self.coeffs.append(coeff) self.costs.append(cost) if not isinstance(cost, Cost): raise ValueError("one of the costs is not " "Cost instance") self.supervised = any([cost_.supervised for cost_ in self.costs]) def expr(self, model, data, ** kwargs): self.get_data_specs(model)[0].validate(data) composite_specs, mapping = self.get_composite_specs_and_mapping(model) nested_data = mapping.nest(data) costs = [] for cost, cost_data in safe_zip(self.costs, nested_data): costs.append(cost.expr(model, cost_data, **kwargs)) assert len(costs) > 0 if any([cost is None for cost in costs]): sum_of_costs = None else: costs = [coeff * cost for coeff, cost in safe_zip(self.coeffs, costs)] assert len(costs) > 0 sum_of_costs = reduce(lambda x, y: x + y, costs) return sum_of_costs def get_composite_data_specs(self, model): spaces = [] sources = [] for cost in self.costs: space, source = cost.get_data_specs(model) spaces.append(space) sources.append(source) composite_space = CompositeSpace(spaces) sources = tuple(sources) return (composite_space, sources) def get_composite_specs_and_mapping(self, model): composite_space, sources = self.get_composite_data_specs(model) mapping = DataSpecsMapping((composite_space, sources)) return (composite_space, sources), mapping def get_data_specs(self, model): composite_specs, mapping = self.get_composite_specs_and_mapping(model) composite_space, sources = composite_specs flat_composite_space = mapping.flatten(composite_space) flat_sources = mapping.flatten(sources) data_specs = (flat_composite_space, flat_sources) return data_specs @functools.wraps(Cost.get_gradients) def get_gradients(self, model, data, ** kwargs): indiv_results = [] composite_specs, mapping = self.get_composite_specs_and_mapping(model) nested_data = mapping.nest(data) for cost, cost_data in safe_zip(self.costs, nested_data): result = cost.get_gradients(model, cost_data, ** kwargs) indiv_results.append(result) grads = OrderedDict() updates = OrderedDict() params = model.get_params() for coeff, packed in zip(self.coeffs, indiv_results): g, u = packed for param in g: if param not in params: raise ValueError("A shared variable (" + str(param) + ") that is not a parameter appeared " "a cost gradient dictionary.") for param in g: assert param.ndim == g[param].ndim v = coeff * g[param] if param not in grads: grads[param] = v else: grads[param] = grads[param] + v assert grads[param].ndim == param.ndim assert not any([state in updates for state in u]) assert not any([state in params for state in u]) updates.update(u) return grads, updates @functools.wraps(Cost.get_monitoring_channels) def get_monitoring_channels(self, model, data, ** kwargs): self.get_data_specs(model)[0].validate(data) rval = OrderedDict() composite_specs, mapping = self.get_composite_specs_and_mapping(model) nested_data = mapping.nest(data) for i, cost in enumerate(self.costs): cost_data = nested_data[i] try: channels = cost.get_monitoring_channels(model, cost_data, **kwargs) rval.update(channels) except TypeError: reraise_as(Exception('SumOfCosts.get_monitoring_channels ' 'encountered TypeError while calling {0}' '.get_monitoring_channels'.format( type(cost)))) value = cost.expr(model, cost_data, ** kwargs) if value is not None: name = '' if hasattr(value, 'name') and value.name is not None: name = '_' + value.name rval['term_' + str(i) + name] = value return rval def get_fixed_var_descr(self, model, data): data_specs = self.get_data_specs(model) data_specs[0].validate(data) composite_specs, mapping = self.get_composite_specs_and_mapping(model) nested_data = mapping.nest(data) descrs = [cost.get_fixed_var_descr(model, cost_data) for cost, cost_data in safe_zip(self.costs, nested_data)] return reduce(merge, descrs) class NullDataSpecsMixin(object): def get_data_specs(self, model): return (NullSpace(), '') class DefaultDataSpecsMixin(object):
BSD 3-Clause New or Revised License
ibm/mi-prometheus
miprometheus/models/VWM_model/attention_module.py
AttentionModule.forward
python
def forward(self, q, keys, values=None): if values is None: values = keys assert q.size(-1) == self.dim, 'Dimension mismatch in query' assert keys.size(-1) == self.dim, 'Dimension mismatch in keys' assert values.size(-2) == keys.size(-2), 'Num slots mismatch between keys and values' ca = self.attn(q[:, None, :] * keys) c = (ca * values).sum(1) ca = ca.squeeze(-1) return c, ca
Forward pass of the ``VWM model Attention_Module``. :param q : query [batch_size x dim] :type tensor :param keys : Keys [batch_size x N x dim] :type tensor :param values : Values [batch_size x N x dim_other] :type tensor :return: c : content [batch_size x dim_other] :type tensor :return: ca : attention [batch_size x N] :type tensor
https://github.com/ibm/mi-prometheus/blob/a8e8a5b339598b0637a251834c560bc24d5a9500/miprometheus/models/VWM_model/attention_module.py#L75-L112
__author__ = "Vincent Albouy, T.S. Jayram" import torch from torch.nn import Module from miprometheus.models.VWM_model.utils_VWM import linear class AttentionModule(Module): def __init__(self, dim): super(AttentionModule, self).__init__() self.attn = torch.nn.Sequential(linear(dim, 1, bias=False), torch.nn.Softmax(dim=1)) self.dim = dim
Apache License 2.0
spritelink/nipap
nipap-www/nipapwww/controllers/pool.py
PoolController.add_prefix
python
def add_prefix(self, id): if 'prefix' not in request.params: abort(400, 'Missing prefix.') pool = Pool.get(int(id)) prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = pool prefix.save() redirect(url(controller = 'pool', action = 'edit', id = id))
Add a prefix to pool 'id'
https://github.com/spritelink/nipap/blob/2e765db3b27eabfad81ee6e49f96a504c9d636f8/nipap-www/nipapwww/controllers/pool.py#L107-L120
import logging from pylons import request, response, session, tmpl_context as c, url from pylons.controllers.util import abort, redirect from nipapwww.lib.base import BaseController, render from pynipap import Pool, Prefix log = logging.getLogger(__name__) class PoolController(BaseController): def index(self): redirect(url(controller = 'pool', action = 'list')) def list(self): return render('/pool_list.html') def add(self): if request.method == 'POST': p = Pool() p.name = request.params.get('name') p.description = request.params.get('description') p.default_type = request.params.get('default_type') if request.params['ipv4_default_prefix_length'].strip() != '': p.ipv4_default_prefix_length = request.params['ipv4_default_prefix_length'] if request.params['ipv6_default_prefix_length'].strip() != '': p.ipv6_default_prefix_length = request.params['ipv6_default_prefix_length'] p.save() redirect(url(controller = 'pool', action = 'list')) return render("/pool_add.html") def edit(self, id): c.pool = Pool.get(int(id)) c.prefix_list = Prefix.list({ 'pool_id': c.pool.id }) c.prefix = '' if request.method == 'POST': c.pool.name = request.params['name'] c.pool.description = request.params['description'] c.pool.default_type = request.params['default_type'] if request.params['ipv4_default_prefix_length'].strip() == '': c.pool.ipv4_default_prefix_length = None else: c.pool.ipv4_default_prefix_length = request.params['ipv4_default_prefix_length'] if request.params['ipv6_default_prefix_length'].strip() == '': c.pool.ipv6_default_prefix_length = None else: c.pool.ipv6_default_prefix_length = request.params['ipv6_default_prefix_length'] c.pool.save() redirect(url(controller = 'pool', action = 'list')) c.search_opt_parent = 'all' c.search_opt_child = 'none' return render("/pool_edit.html") def remove(self, id): p = Pool.get(int(id)) p.remove() redirect(url(controller = 'pool', action = 'list')) def remove_prefix(self, id): if 'prefix' not in request.params: abort(400, 'Missing prefix.') prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = None prefix.save() redirect(url(controller = 'pool', action = 'edit', id = id))
MIT License
bayespy/bayespy
bayespy/inference/vmp/nodes/gaussian.py
GaussianGammaDistribution.compute_message_to_parent
python
def compute_message_to_parent(self, parent, index, u, u_mu_Lambda, u_a, u_b): x_tau = u[0] xx_tau = u[1] tau = u[2] logtau = u[3] if index == 0: m0 = x_tau m1 = -0.5 * tau m2 = -0.5 * xx_tau m3 = 0.5 return [m0, m1, m2, m3] elif index == 1: logb = u_b[1] m0 = logtau + logb m1 = -1 return [m0, m1] elif index == 2: a = u_a[0] m0 = -tau m1 = a return [m0, m1] else: raise ValueError("Index out of bounds")
r""" Compute the message to a parent node. - Parent :math:`(\boldsymbol{\mu}, \mathbf{\Lambda})` Moments: .. math:: \begin{bmatrix} \mathbf{\Lambda}\boldsymbol{\mu} \\ \boldsymbol{\mu}^T\mathbf{\Lambda}\boldsymbol{\mu} \\ \mathbf{\Lambda} \\ \log|\mathbf{\Lambda}| \end{bmatrix} Message: .. math:: \begin{bmatrix} \langle \tau \mathbf{x} \rangle \\ - \frac{1}{2} \langle \tau \rangle \\ - \frac{1}{2} \langle \tau \mathbf{xx}^T \rangle \\ \frac{1}{2} \end{bmatrix} - Parent :math:`a`: Moments: .. math:: \begin{bmatrix} a \\ \log \Gamma(a) \end{bmatrix} Message: .. math:: \begin{bmatrix} \langle \log\tau \rangle + \langle \log b \rangle \\ -1 \end{bmatrix} - Parent :math:`b`: Moments: .. math:: \begin{bmatrix} b \\ \log b \end{bmatrix} Message: .. math:: \begin{bmatrix} - \langle \tau \rangle \\ \langle a \rangle \end{bmatrix}
https://github.com/bayespy/bayespy/blob/0e6e6130c888a4295cc9421d61d4ad27b2960ebb/bayespy/inference/vmp/nodes/gaussian.py#L913-L1014
import numpy as np from scipy import special from bayespy.utils import (random, misc, linalg) from bayespy.utils.linalg import dot, mvdot from .expfamily import (ExponentialFamily, ExponentialFamilyDistribution, useconstructor) from .wishart import (WishartMoments, WishartPriorMoments) from .gamma import (GammaMoments, GammaDistribution, GammaPriorMoments) from .deterministic import Deterministic from .node import (Moments, ensureparents) class GaussianMoments(Moments): def __init__(self, shape): self.shape = shape self.ndim = len(shape) self.dims = (shape, 2*shape) super().__init__() def compute_fixed_moments(self, x): x = np.asanyarray(x) x = misc.atleast_nd(x, self.ndim) return [x, linalg.outer(x, x, ndim=self.ndim)] @classmethod def from_values(cls, x, ndim): if ndim == 0: return cls(()) else: return cls(np.shape(x)[-ndim:]) def get_instance_conversion_kwargs(self): return dict(ndim=self.ndim) def get_instance_converter(self, ndim): if ndim == self.ndim or ndim is None: return None return GaussianToGaussian(self, ndim) class GaussianToGaussian(): def __init__(self, moments_from, ndim_to): if not isinstance(moments_from, GaussianMoments): raise ValueError() if ndim_to < 0: return ValueError("ndim_to must be non-negative") self.shape_from = moments_from.shape self.ndim_from = moments_from.ndim self.ndim_to = ndim_to if self.ndim_to > self.ndim_from: raise ValueError() if self.ndim_to == 0: self.moments = GaussianMoments(()) else: self.moments = GaussianMoments(self.shape_from[-self.ndim_to:]) return def compute_moments(self, u): if self.ndim_to == self.ndim_from: return u u0 = u[0] u1 = misc.get_diag(u[1], ndim=self.ndim_from, ndim_to=self.ndim_to) return [u0, u1] def compute_message_to_parent(self, m, u_parent): m0 = m[0] * np.ones(self.shape_from) m1 = ( misc.make_diag(m[1], ndim=self.ndim_from, ndim_from=self.ndim_to) * misc.identity(*self.shape_from) ) return [m0, m1] def compute_weights_to_parent(self, weights): diff = self.ndim_from - self.ndim_to if diff == 0: return weights return np.sum( weights * np.ones(self.shape_from[:diff]), axis=tuple(range(-diff, 0)) ) def plates_multiplier_from_parent(self, plates_multiplier): diff = self.ndim_from - self.ndim_to return plates_multiplier + diff * (1,) def plates_from_parent(self, plates): diff = self.ndim_from - self.ndim_to if diff == 0: return plates return plates + self.shape_from[:diff] def plates_to_parent(self, plates): diff = self.ndim_from - self.ndim_to if diff == 0: return plates return plates[:-diff] class GaussianGammaMoments(Moments): def __init__(self, shape): self.shape = shape self.ndim = len(shape) self.dims = (shape, 2*shape, (), ()) super().__init__() def compute_fixed_moments(self, x_alpha): (x, alpha) = x_alpha x = np.asanyarray(x) alpha = np.asanyarray(alpha) u0 = x * misc.add_trailing_axes(alpha, self.ndim) u1 = (linalg.outer(x, x, ndim=self.ndim) * misc.add_trailing_axes(alpha, 2*self.ndim)) u2 = np.copy(alpha) u3 = np.log(alpha) u = [u0, u1, u2, u3] return u @classmethod def from_values(cls, x_alpha, ndim): (x, alpha) = x_alpha if ndim == 0: shape = ( (), (), (), () ) else: shape = np.shape(x)[-ndim:] return cls(shape) def get_instance_conversion_kwargs(self): return dict(ndim=self.ndim) def get_instance_converter(self, ndim): if ndim != self.ndim: raise NotImplementedError( "Conversion to different ndim in GaussianMoments not yet " "implemented." ) return None class GaussianWishartMoments(Moments): def __init__(self, shape): self.shape = shape self.ndim = len(shape) self.dims = ( shape, (), 2*shape, () ) super().__init__() def compute_fixed_moments(self, x, Lambda): x = np.asanyarray(x) Lambda = np.asanyarray(Lambda) u0 = linalg.mvdot(Lambda, x, ndim=self.ndim) u1 = np.einsum( '...i,...ij,...j->...', misc.flatten_axes(x, self.ndim), misc.flatten_axes(Lambda, self.ndim, self.ndim), misc.flatten_axes(x, self.ndim) ) u2 = np.copy(Lambda) u3 = linalg.logdet_cov(Lambda, ndim=self.ndim) return [u0, u1, u2, u3] @classmethod def from_values(self, x, Lambda, ndim): if ndim == 0: return cls(()) else: if np.ndim(x) < ndim: raise ValueError("Mean must be a vector") shape = np.shape(x)[-ndim:] if np.shape(Lambda)[-2*ndim:] != shape + shape: raise ValueError("Shapes inconsistent") return cls(shape) class GaussianDistribution(ExponentialFamilyDistribution): def __init__(self, shape): self.shape = shape self.ndim = len(shape) super().__init__() def compute_message_to_parent(self, parent, index, u, u_mu_Lambda): if index == 0: x = u[0] xx = u[1] m0 = x m1 = -0.5 m2 = -0.5*xx m3 = 0.5 return [m0, m1, m2, m3] else: raise ValueError("Index out of bounds") def compute_phi_from_parents(self, u_mu_Lambda, mask=True): Lambda_mu = u_mu_Lambda[0] Lambda = u_mu_Lambda[2] return [Lambda_mu, -0.5 * Lambda] def compute_moments_and_cgf(self, phi, mask=True): L = linalg.chol(-2*phi[1], ndim=self.ndim) k = np.shape(phi[0])[-1] u0 = linalg.chol_solve(L, phi[0], ndim=self.ndim) u1 = (linalg.outer(u0, u0, ndim=self.ndim) + linalg.chol_inv(L, ndim=self.ndim)) u = [u0, u1] g = (-0.5 * linalg.inner(u[0], phi[0], ndim=self.ndim) + 0.5 * linalg.chol_logdet(L, ndim=self.ndim)) return (u, g) def compute_cgf_from_parents(self, u_mu_Lambda): mu_Lambda_mu = u_mu_Lambda[1] logdet_Lambda = u_mu_Lambda[3] g = -0.5*mu_Lambda_mu + 0.5*logdet_Lambda return g def compute_fixed_moments_and_f(self, x, mask=True): k = np.shape(x)[-1] u = [x, linalg.outer(x, x, ndim=self.ndim)] f = -k/2*np.log(2*np.pi) return (u, f) def compute_gradient(self, g, u, phi): ndim = 1 x = u[0] xx = u[1] x_x = linalg.outer(x, x, ndim=self.ndim) Cov = xx - x_x cov_g0 = linalg.mvdot(Cov, g[0], ndim=self.ndim) cov_g0_x = linalg.outer(cov_g0, x, ndim=self.ndim) g1_x = linalg.mvdot(g[1], x, ndim=self.ndim) d0 = cov_g0 + 2 * linalg.mvdot(Cov, g1_x, ndim=self.ndim) d1 = (cov_g0_x + linalg.transpose(cov_g0_x, ndim=self.ndim) + 2 * linalg.mmdot(xx, linalg.mmdot(g[1], xx, ndim=self.ndim), ndim=self.ndim) - 2 * x_x * misc.add_trailing_axes(linalg.inner(g1_x, x, ndim=self.ndim), 2*self.ndim)) return [d0, d1] def random(self, *phi, plates=None): U = linalg.chol(-2*phi[1], ndim=self.ndim) mu = linalg.chol_solve(U, phi[0], ndim=self.ndim) shape = plates + self.shape z = np.random.randn(*shape) z = linalg.solve_triangular(U, z, trans='N', lower=False, ndim=self.ndim) return mu + z class GaussianARDDistribution(ExponentialFamilyDistribution): def __init__(self, shape): self.shape = shape self.ndim = len(shape) super().__init__() def compute_message_to_parent(self, parent, index, u, u_mu_alpha): if index == 0: x = u[0] x2 = misc.get_diag(u[1], ndim=self.ndim) m0 = x m1 = -0.5 * np.ones(self.shape) m2 = -0.5 * x2 m3 = 0.5 * np.ones(self.shape) return [m0, m1, m2, m3] else: raise ValueError("Invalid parent index") def compute_weights_to_parent(self, index, weights): if index != 0: raise IndexError() return misc.add_trailing_axes(weights, self.ndim) def compute_phi_from_parents(self, u_mu_alpha, mask=True): alpha_mu = u_mu_alpha[0] alpha = u_mu_alpha[2] phi0 = alpha_mu phi1 = -0.5 * alpha if self.ndim > 0: ones = np.ones(self.shape) phi0 = ones * phi0 phi1 = ones * phi1 phi1 = misc.diag(phi1, ndim=self.ndim) return [phi0, phi1] def compute_moments_and_cgf(self, phi, mask=True): if self.ndim == 0: u0 = -phi[0] / (2*phi[1]) u1 = u0**2 - 1 / (2*phi[1]) u = [u0, u1] g = (-0.5 * u[0] * phi[0] + 0.5 * np.log(-2*phi[1])) else: D = np.prod(self.shape) phi0 = np.reshape(phi[0], phi[0].shape[:-self.ndim] + (D,)) phi1 = np.reshape(phi[1], phi[1].shape[:-2*self.ndim] + (D,D)) L = linalg.chol(-2*phi1) Cov = linalg.chol_inv(L) u0 = linalg.chol_solve(L, phi0) u1 = linalg.outer(u0, u0) + Cov g = (- 0.5 * np.einsum('...i,...i', u0, phi0) + 0.5 * linalg.chol_logdet(L)) u0 = np.reshape(u0, u0.shape[:-1] + self.shape) u1 = np.reshape(u1, u1.shape[:-2] + self.shape + self.shape) u = [u0, u1] return (u, g) def compute_cgf_from_parents(self, u_mu_alpha): alpha_mu2 = u_mu_alpha[1] logdet_alpha = u_mu_alpha[3] axes = tuple(range(-self.ndim, 0)) if self.ndim > 0: alpha_mu2 = misc.sum_multiply(alpha_mu2, np.ones(self.shape), axis=axes) if self.ndim > 0: logdet_alpha = misc.sum_multiply(logdet_alpha, np.ones(self.shape), axis=axes) g = -0.5*alpha_mu2 + 0.5*logdet_alpha return g def compute_fixed_moments_and_f(self, x, mask=True): if self.ndim > 0 and np.shape(x)[-self.ndim:] != self.shape: raise ValueError("Invalid shape") k = np.prod(self.shape) u = [x, linalg.outer(x, x, ndim=self.ndim)] f = -k/2*np.log(2*np.pi) return (u, f) def plates_to_parent(self, index, plates): if index != 0: raise IndexError() return plates + self.shape def plates_from_parent(self, index, plates): if index != 0: raise IndexError() if self.ndim == 0: return plates else: return plates[:-self.ndim] def random(self, *phi, plates=None): D = self.ndim if D == 0: dims = () else: dims = np.shape(phi[0])[-D:] if np.prod(dims) == 1.0: phi1 = phi[1] if D > 0: phi1 = np.reshape(phi1, np.shape(phi1)[:-2*D] + D*(1,)) var = -0.5 / phi1 std = np.sqrt(var) mu = var * phi[0] shape = plates + dims z = np.random.randn(*shape) x = mu + std * z else: N = np.prod(dims) dims_cov = dims + dims plates_cov = np.shape(phi[1])[:-2*D] V = -2 * np.reshape(phi[1], plates_cov + (N,N)) U = linalg.chol(V) plates_phi0 = np.shape(phi[0])[:-D] phi0 = np.reshape(phi[0], plates_phi0 + (N,)) mu = linalg.chol_solve(U, phi0) shape = plates + (N,) z = np.random.randn(*shape) x = mu + linalg.solve_triangular(U, z, trans='N', lower=False) x = np.reshape(x, plates + dims) return x def compute_gradient(self, g, u, phi): ndim = self.ndim x = u[0] xx = u[1] x_x = linalg.outer(x, x, ndim=ndim) Cov = xx - x_x cov_g0 = linalg.mvdot(Cov, g[0], ndim=ndim) cov_g0_x = linalg.outer(cov_g0, x, ndim=ndim) g1_x = linalg.mvdot(g[1], x, ndim=ndim) d0 = cov_g0 + 2 * linalg.mvdot(Cov, g1_x, ndim=ndim) d1 = (cov_g0_x + linalg.transpose(cov_g0_x, ndim=ndim) + 2 * linalg.mmdot(xx, linalg.mmdot(g[1], xx, ndim=ndim), ndim=ndim) - 2 * x_x * misc.add_trailing_axes(linalg.inner(g1_x, x, ndim=ndim), 2*ndim)) return [d0, d1] class GaussianGammaDistribution(ExponentialFamilyDistribution): def __init__(self, shape): self.shape = shape self.ndim = len(shape) super().__init__()
MIT License
rjt1990/pyflux
pyflux/ssm/nllt.py
NLLT._model
python
def _model(self, data, beta): T, Z, R, Q, H = self._ss_matrices(beta) return univariate_kalman(data,Z,H,T,Q,R,0.0)
Creates the structure of the model Parameters ---------- data : np.array Contains the time series beta : np.array Contains untransformed starting values for latent variables Returns ---------- a,P,K,F,v : np.array Filted states, filtered variances, Kalman gains, F matrix, residuals
https://github.com/rjt1990/pyflux/blob/297f2afc2095acd97c12e827dd500e8ea5da0c0f/pyflux/ssm/nllt.py#L320-L339
import sys if sys.version_info < (3,): range = xrange import numpy as np import pandas as pd import scipy.stats as ss from scipy import optimize from .. import inference as ifr from .. import families as fam from .. import output as op from .. import tsm as tsm from .. import data_check as dc from .. import covariances as cov from .. import results as res from .. import gas as gas from .kalman import * from .llt import * class NLLT(tsm.TSM): def __init__(self, data, family, integ=0, target=None): super(NLLT,self).__init__('NLLT') self.integ = integ self.target = target self.max_lag = 0 self._z_hide = 0 self.supported_methods = ["MLE", "PML", "Laplace", "M-H", "BBVI"] self.default_method = "MLE" self.multivariate_model = False self.state_no = 2 self.data, self.data_name, self.is_pandas, self.index = dc.data_check(data,target) self.data = self.data.astype(np.float) self.data_original = self.data X = self.data for order in range(self.integ): X = np.diff(X) self.data_name = "Differenced " + self.data_name self.data = X self.cutoff = 0 self.data_length = self.data.shape[0] self._create_latent_variables() self.family = family self.model_name2, self.link, self.scale, self.shape, self.skewness, self.mean_transform, self.cythonized = self.family.setup() self.model_name = self.model_name2 + " Local Linear Trend Model" for no, i in enumerate(self.family.build_latent_variables()): self.latent_variables.add_z(i[0],i[1],i[2]) self.latent_variables.z_list[no+1].start = i[3] self.z_no = len(self.latent_variables.z_list) def _get_scale_and_shape(self, parm): if self.scale is True: if self.shape is True: model_shape = parm[-1] model_scale = parm[-2] else: model_shape = 0 model_scale = parm[-1] else: model_scale = 0 model_shape = 0 if self.skewness is True: model_skewness = parm[-3] else: model_skewness = 0 return model_scale, model_shape, model_skewness def neg_loglik(self, beta): Z = np.zeros(2) Z[0] = 1 states = np.zeros([self.state_no, self.data.shape[0]]) states[0,:] = beta[self.z_no:self.z_no+self.data.shape[0]] states[1,:] = beta[self.z_no+self.data.shape[0]:] parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(self.z_no)]) scale, shape, skewness = self._get_scale_and_shape(parm) return self.state_likelihood(beta, states) + self.family.neg_loglikelihood(self.data, self.link(np.dot(Z, states)), scale, shape, skewness) def likelihood_markov_blanket(self, beta): states = np.zeros([self.state_no, self.data_length]) for state_i in range(self.state_no): states[state_i,:] = beta[(self.z_no + (self.data_length*state_i)):(self.z_no + (self.data_length*(state_i+1)))] parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(self.z_no)]) scale, shape, skewness = self._get_scale_and_shape(parm) Z = np.zeros(2) Z[0] = 1 return self.family.markov_blanket(self.data, self.link(np.dot(Z, states)), scale, shape, skewness) def state_likelihood(self, beta, alpha): _, _, _, Q = self._ss_matrices(beta) residuals_1 = alpha[0][1:alpha[0].shape[0]]-alpha[0][0:alpha[0].shape[0]-1] residuals_2 = alpha[1][1:alpha[1].shape[0]]-alpha[1][0:alpha[1].shape[0]-1] return np.sum(ss.norm.logpdf(residuals_1,loc=0,scale=np.power(Q[0][0],0.5))) + np.sum(ss.norm.logpdf(residuals_2,loc=0,scale=np.power(Q[1][1],0.5))) def state_likelihood_markov_blanket(self, beta, alpha, col_no): _, _, _, Q = self._ss_matrices(beta) blanket = np.append(0,ss.norm.logpdf(alpha[col_no][1:]-alpha[col_no][:-1],loc=0,scale=np.sqrt(Q[col_no][col_no]))) blanket[:-1] = blanket[:-1] + blanket[1:] return blanket def neg_logposterior(self, beta): post = self.neg_loglik(beta) for k in range(0,self.z_no): post += -self.latent_variables.z_list[k].prior.logpdf(beta[k]) return post def markov_blanket(self, beta, alpha): likelihood_blanket = self.likelihood_markov_blanket(beta) state_blanket = self.state_likelihood_markov_blanket(beta,alpha,0) for i in range(self.state_no-1): likelihood_blanket = np.append(likelihood_blanket,self.likelihood_markov_blanket(beta)) state_blanket = np.append(state_blanket,self.state_likelihood_markov_blanket(beta,alpha,i+1)) return likelihood_blanket + state_blanket def evo_blanket(self, beta, alpha): evo_blanket = np.zeros(self.state_no) for i in range(evo_blanket.shape[0]): evo_blanket[i] = self.state_likelihood_markov_blanket(beta, alpha, i).sum() if self.z_no > 2: evo_blanket = np.append([self.likelihood_markov_blanket(beta).sum()]*(self.z_no-1),evo_blanket) return evo_blanket def log_p_blanket(self, beta): states = np.zeros([self.state_no, self.data_length]) for state_i in range(self.state_no): states[state_i,:] = beta[(self.z_no + (self.data_length*state_i)):(self.z_no + (self.data_length*(state_i+1)))] return np.append(self.evo_blanket(beta,states),self.markov_blanket(beta,states)) def _animate_bbvi(self, stored_parameters, stored_predictive_likelihood): from matplotlib.animation import FuncAnimation, writers import matplotlib.pyplot as plt import seaborn as sns fig = plt.figure() ax = fig.add_subplot(1, 1, 1) ud = BBVINLLTAnimate(ax,self.data,stored_parameters,self.index,self.z_no,self.link) anim = FuncAnimation(fig, ud, frames=np.arange(stored_parameters.shape[0]), init_func=ud.init, interval=10, blit=True) plt.plot(self.data) plt.xlabel("Time") plt.ylabel(self.data_name) plt.show() def _create_latent_variables(self): self.latent_variables.add_z('Sigma^2 level', fam.Flat(transform='exp'), fam.Normal(0,3)) self.latent_variables.add_z('Sigma^2 trend', fam.Flat(transform='exp'), fam.Normal(0,3))
BSD 3-Clause New or Revised License
oemof/tespy
src/tespy/components/turbomachinery/pump.py
Pump.eta_s_deriv
python
def eta_s_deriv(self, increment_filter, k): f = self.eta_s_func if not increment_filter[0, 1]: self.jacobian[k, 0, 1] = self.numeric_deriv(f, 'p', 0) if not increment_filter[1, 1]: self.jacobian[k, 1, 1] = self.numeric_deriv(f, 'p', 1) if not increment_filter[0, 2]: self.jacobian[k, 0, 2] = self.numeric_deriv(f, 'h', 0) self.jacobian[k, 1, 2] = -self.eta_s.val
r""" Partial derivatives for isentropic efficiency function. Parameters ---------- increment_filter : ndarray Matrix for filtering non-changing variables. k : int Position of derivatives in Jacobian matrix (k-th equation).
https://github.com/oemof/tespy/blob/70bf8da9fd8521a1177613a894829cd1fa78c663/src/tespy/components/turbomachinery/pump.py#L226-L245
import logging import numpy as np from tespy.components.turbomachinery.turbomachine import Turbomachine from tespy.tools.data_containers import ComponentCharacteristics as dc_cc from tespy.tools.data_containers import ComponentProperties as dc_cp from tespy.tools.document_models import generate_latex_eq from tespy.tools.fluid_properties import isentropic from tespy.tools.fluid_properties import v_mix_ph class Pump(Turbomachine): @staticmethod def component(): return 'pump' def get_variables(self): return { 'P': dc_cp( min_val=0, num_eq=1, deriv=self.energy_balance_deriv, func=self.energy_balance_func, latex=self.energy_balance_func_doc), 'eta_s': dc_cp( min_val=0, max_val=1, num_eq=1, deriv=self.eta_s_deriv, func=self.eta_s_func, latex=self.eta_s_func_doc), 'pr': dc_cp( min_val=1, num_eq=1, deriv=self.pr_deriv, func=self.pr_func, func_params={'pr': 'pr'}, latex=self.pr_func_doc), 'eta_s_char': dc_cc( param='v', num_eq=1, deriv=self.eta_s_char_deriv, func=self.eta_s_char_func, latex=self.eta_s_char_func_doc), 'flow_char': dc_cc( param='v', num_eq=1, deriv=self.flow_char_deriv, func=self.flow_char_func, char_params={'type': 'abs', 'inconn': 0, 'outconn': 0}, latex=self.flow_char_func_doc) } def eta_s_func(self): return ( -(self.outl[0].h.val_SI - self.inl[0].h.val_SI) * self.eta_s.val + (isentropic( self.inl[0].get_flow(), self.outl[0].get_flow(), T0=self.inl[0].T.val_SI) - self.inl[0].h.val_SI)) def eta_s_func_doc(self, label): latex = ( r'0 =-\left(h_\mathrm{out}-h_\mathrm{in}\right)\cdot' r'\eta_\mathrm{s}+\left(h_\mathrm{out,s}-h_\mathrm{in}\right)') return generate_latex_eq(self, latex, label)
MIT License
olitheolix/aiokubernetes
aiokubernetes/models/v1beta2_deployment_condition.py
V1beta2DeploymentCondition.type
python
def type(self, type): if type is None: raise ValueError("Invalid value for `type`, must not be `None`") self._type = type
Sets the type of this V1beta2DeploymentCondition. Type of deployment condition. # noqa: E501 :param type: The type of this V1beta2DeploymentCondition. # noqa: E501 :type: str
https://github.com/olitheolix/aiokubernetes/blob/266718b210dff2a9b2212183261ea89adf89115e/aiokubernetes/models/v1beta2_deployment_condition.py#L201-L212
import pprint import re class V1beta2DeploymentCondition(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'last_transition_time': 'datetime', 'last_update_time': 'datetime', 'message': 'str', 'reason': 'str', 'status': 'str', 'type': 'str' } attribute_map = { 'last_transition_time': 'lastTransitionTime', 'last_update_time': 'lastUpdateTime', 'message': 'message', 'reason': 'reason', 'status': 'status', 'type': 'type' } def __init__(self, last_transition_time=None, last_update_time=None, message=None, reason=None, status=None, type=None): self._last_transition_time = None self._last_update_time = None self._message = None self._reason = None self._status = None self._type = None self.discriminator = None if last_transition_time is not None: self.last_transition_time = last_transition_time if last_update_time is not None: self.last_update_time = last_update_time if message is not None: self.message = message if reason is not None: self.reason = reason self.status = status self.type = type @property def last_transition_time(self): return self._last_transition_time @last_transition_time.setter def last_transition_time(self, last_transition_time): self._last_transition_time = last_transition_time @property def last_update_time(self): return self._last_update_time @last_update_time.setter def last_update_time(self, last_update_time): self._last_update_time = last_update_time @property def message(self): return self._message @message.setter def message(self, message): self._message = message @property def reason(self): return self._reason @reason.setter def reason(self, reason): self._reason = reason @property def status(self): return self._status @status.setter def status(self, status): if status is None: raise ValueError("Invalid value for `status`, must not be `None`") self._status = status @property def type(self): return self._type @type.setter
Apache License 2.0
xuhua123/geekcomputers
primelib/primelib.py
primeFactorization
python
def primeFactorization(number): import math assert isinstance(number,int) and number >= 0, "'number' must been an int and >= 0" ans = [] factor = 2 quotient = number if number == 0 or number == 1: ans.append(number) elif not isPrime(number): while (quotient != 1): if isPrime(factor) and (quotient % factor == 0): ans.append(factor) quotient /= factor else: factor += 1 else: ans.append(number) assert isinstance(ans,list), "'ans' must been from type list" return ans
input: positive integer 'number' returns a list of the prime number factors of 'number'
https://github.com/xuhua123/geekcomputers/blob/28b25d4b2a0c484b8e905fd54ec5ffeecf6460c8/primelib/primelib.py#L159-L201
def pi(maxK=70, prec=1008, disp=1007): from decimal import Decimal as Dec, getcontext as gc gc().prec = prec K, M, L, X, S = 6, 1, 13591409, 1, 13591409 for k in range(1, maxK+1): M = Dec((K**3 - (K<<4)) * M / k**3) L += 545140134 X *= -262537412640768000 S += Dec(M * L) / X K += 12 pi = 426880 * Dec(10005).sqrt() / S pi = Dec(str(pi)[:disp]) return pi def isPrime(number): import math assert isinstance(number,int) and (number >= 0) , "'number' must been an int and positive" status = True if number <= 1: status = False for divisor in range(2,int(round(math.sqrt(number)))+1): if number % divisor == 0: status = False break assert isinstance(status,bool), "'status' must been from type bool" return status def sieveEr(N): assert isinstance(N,int) and (N > 2), "'N' must been an int and > 2" beginList = [x for x in range(2,N+1)] ans = [] for i in range(len(beginList)): for j in range(i+1,len(beginList)): if (beginList[i] != 0) and (beginList[j] % beginList[i] == 0): beginList[j] = 0 ans = [x for x in beginList if x != 0] assert isinstance(ans,list), "'ans' must been from type list" return ans def getPrimeNumbers(N): assert isinstance(N,int) and (N > 2), "'N' must been an int and > 2" ans = [] for number in range(2,N+1): if isPrime(number): ans.append(number) assert isinstance(ans,list), "'ans' must been from type list" return ans
MIT License
salesforce/django-declarative-apis
django_declarative_apis/machinery/__init__.py
BaseEndpointDefinition.response
python
def response(self): return self.resource
By default it returns :code:`self.resource` unless it is overridden.
https://github.com/salesforce/django-declarative-apis/blob/5659e63361185aa1b857f784a1b27078378f0be8/django_declarative_apis/machinery/__init__.py#L549-L553
import abc import http.client import itertools import logging import sys import django from django.conf import settings from django.db import models from django.http import HttpResponse from django_declarative_apis.machinery.filtering import apply_filters_to_object from django_declarative_apis.models import BaseConsumer from django_declarative_apis.resources.utils import HttpStatusCode from . import errors from .attributes import ( Aggregate, ConsumerAttribute, DeferrableEndpointTask, EndpointAttribute, EndpointTask, RawRequestObjectProperty, RequestAdhocQuerySet, RequestAttribute, RequestField, RequestProperty, RequestUrlField, RequireAllAttribute, RequireAllIfAnyAttribute, RequireOneAttribute, ResourceField, ) from .attributes import TypedEndpointAttributeMixin, RequestFieldGroup from .utils import locate_object, rate_limit_exceeded logger = logging.getLogger(__name__) class EndpointResourceAttribute(EndpointAttribute): def __init__(self, type, filter=None, returns_list=False, **kwargs): super(EndpointResourceAttribute, self).__init__(**kwargs) self.type = type self.filter = filter self.func = None self.returns_list = returns_list def __call__(self, func): self.func = func return self def get_instance_value(self, owner_instance, owner_class): if not owner_instance: return self try: value = self.func(owner_instance) except django.core.exceptions.ObjectDoesNotExist: raise errors.ClientErrorNotFound( "{0} instance not found".format(self.type.__name__) ) if value.__class__ == dict: return value if not getattr(value, "_api_filter", False): value._api_filter = self.filter return value class EndpointResponseAttribute(EndpointAttribute): def __init__(self, type, filter=None, **kwargs): super(EndpointResponseAttribute, self).__init__(**kwargs) self.type = type self.filter = filter self.func = None def __call__(self, func): self.func = func return self def get_instance_value(self, owner_instance, owner_class): if not owner_instance: return self value = self.func(owner_instance) if not getattr(value, "_api_filter", False): if self.filter: value._api_filter = self.filter return value class EndpointDefinitionMeta(abc.ABCMeta, metaclass=abc.ABCMeta): def __init__(cls, class_name, bases=None, dict=None): super(EndpointDefinitionMeta, cls).__init__(class_name, bases, dict) ancestor_attribs = (ancestor.__dict__.items() for ancestor in cls.mro()) for name, attribute in itertools.chain(dict.items(), *ancestor_attribs): try: if not attribute.name: attribute.name = name except AttributeError as e: pass class EndpointBinder(object): class BoundEndpointManager(object): def __init__(self, manager, bound_endpoint): self.manager = manager self.bound_endpoint = bound_endpoint self.binding_exc_info = None self.validation_exc_info = None def get_response(self): error = self.binding_exc_info or self.validation_exc_info if error: exc_type, exc_value, exc_traceback = error if isinstance(exc_value, errors.ClientError): logger.warning(exc_value.error_message) else: logger.error(str(exc_value.args) + "\n" + str(exc_traceback)) raise exc_value.with_traceback(exc_traceback) resource = self.bound_endpoint.resource if hasattr(resource, "is_dirty"): if resource and resource.is_dirty(check_relationship=True): resource.save() endpoint_tasks = sorted( self.manager.endpoint_tasks, key=lambda t: t.priority ) immediate_tasks = filter( lambda t: not isinstance(t, DeferrableEndpointTask), endpoint_tasks ) deferred_tasks = filter( lambda t: isinstance(t, DeferrableEndpointTask), endpoint_tasks ) try: for immediate_task in immediate_tasks: immediate_task.run(self.bound_endpoint) except errors.ClientError as ce: if ce.save_changes and resource and resource.is_dirty(): resource.save() raise if hasattr(resource, "is_dirty"): if resource and resource.is_dirty(check_relationship=True): resource.save() for deferred_task in deferred_tasks: deferred_task.run(self.bound_endpoint) if getattr(resource, "_api_filter", False): filter_def = resource._api_filter else: filter_def = self.bound_endpoint.response_filter data = self.bound_endpoint.response status_code = self.bound_endpoint.http_status if isinstance(data, HttpResponse): if 200 <= status_code <= 299: return status_code, data else: raise HttpStatusCode(data) else: try: x_expand = self.bound_endpoint.request.META.get("HTTP_X_EXPAND") except AttributeError: x_expand = "" return ( status_code, apply_filters_to_object(data, filter_def, x_expand), ) def __init__(self, endpoint_definition): super(EndpointBinder, self).__init__() self.endpoint_definition = endpoint_definition self.endpoint_attributes = endpoint_definition.get_endpoint_attributes() self.request_properties = endpoint_definition.get_request_properties() self.required_request_properties = ( endpoint_definition.get_required_request_properties() ) try: self.consumer_attributes = endpoint_definition.get_consumer_attributes() except AttributeError: self.consumer_attributes = [] self.request_fields = endpoint_definition.get_request_fields() self.required_request_fields = endpoint_definition.get_required_request_fields() self.endpoint_tasks = endpoint_definition.get_tasks() self.url_fields = endpoint_definition.get_url_fields() self.adhoc_queries = endpoint_definition.get_adhoc_queries() def create_bound_endpoint(self, manager, request, *args, **kwargs): endpoint = self.endpoint_definition() for url_field in self.url_fields: if (url_field.api_name or url_field.name) in kwargs: url_field.set_value(kwargs.get(url_field.api_name or url_field.name)) for adhoc_query_field in self.adhoc_queries: adhoc_query_field.set_value( { key: val for (key, val) in request.GET.items() if key.startswith(adhoc_query_field.name) } ) RequestProperty.bind_request_to_instance(endpoint, request) bound_endpoint_manager = EndpointBinder.BoundEndpointManager(manager, endpoint) try: self._bind_endpoint(endpoint) except Exception as e: bound_endpoint_manager.binding_exc_info = sys.exc_info() return bound_endpoint_manager try: self._validate_endpoint(endpoint) except Exception as e: bound_endpoint_manager.validation_exc_info = sys.exc_info() return bound_endpoint_manager def _bind_endpoint(self, endpoint): extra_error_message = "" missing_required_properties = [] invalid_value_properties = [] for request_property in self.request_properties: try: value = getattr(endpoint, request_property.name) if value is None and request_property.required: if isinstance(request_property, ConsumerAttribute): raise errors.ClientErrorForbidden() else: missing_required_properties.append(request_property) except errors.ClientErrorMissingFields as mfe: extra_error_message += mfe.error_message except (ValueError, errors.ClientErrorInvalidFieldValues) as ve: invalid_value_properties.append(request_property) if missing_required_properties or extra_error_message: raise errors.ClientErrorMissingFields( [property.name for property in missing_required_properties], extra_message=extra_error_message, ) if invalid_value_properties: raise errors.ClientErrorInvalidFieldValues( [request_property.name for request_property in invalid_value_properties] ) def _validate_endpoint(self, endpoint): try: if not ( endpoint.is_authorized() and endpoint.is_permitted() and endpoint.is_valid() ): raise errors.ClientErrorForbidden( additional_info=getattr(endpoint, "_validation_error_message", None) ) except django.core.exceptions.ObjectDoesNotExist: raise errors.ClientErrorNotFound() rate_limit_key = endpoint.rate_limit_key() if (rate_limit_key is not None) and rate_limit_exceeded( rate_limit_key, endpoint.rate_limit_period() ): raise errors.ClientErrorRequestThrottled() class _EndpointRequestLifecycleManager(object): def __init__(self, endpoint_definition): super(_EndpointRequestLifecycleManager, self).__init__() self.endpoint_definition = endpoint_definition self.binder = EndpointBinder(endpoint_definition) self.endpoint_tasks = endpoint_definition.get_tasks() def bind_endpoint_to_request(self, request, *args, **kwargs): return self.binder.create_bound_endpoint(self, request, *args, **kwargs) def process_request_and_get_response(self, request, *args, **kwargs): bound_endpoint = self.bind_endpoint_to_request(request, *args, **kwargs) return bound_endpoint.get_response() def __str__(self): return self.endpoint_definition.__name__ class BehavioralEndpointDefinitionRouter(object): def __init__(self, *endpoint_definitions): super(BehavioralEndpointDefinitionRouter, self).__init__() self.endpoint_definitions = endpoint_definitions self.endpoint_managers = [ _EndpointRequestLifecycleManager(endpoint) for endpoint in endpoint_definitions ] self.endpoint_manager_names = "({0})".format( ",".join(map(lambda e: e.__name__, endpoint_definitions)) ) def bind_endpoint_to_request(self, request, *args, **kwargs): bound_endpoint = None for candidate_endpoint_manager in self.endpoint_managers: bound_endpoint = candidate_endpoint_manager.bind_endpoint_to_request( request, *args, **kwargs ) if bound_endpoint.binding_exc_info is None: break return bound_endpoint def process_request_and_get_response(self, request, *args, **kwargs): try: bound_endpoint = self.bind_endpoint_to_request(request, *args, **kwargs) logger.info( "Processing request with handler %s", bound_endpoint.bound_endpoint.__class__.__name__, ) return bound_endpoint.get_response() except errors.ApiError: raise except Exception as e: raise errors.ServerError() from e def __call__(self, *args, **kwargs): return self.process_request_and_get_response(*args, **kwargs) def __str__(self): return self.endpoint_manager_names @property def documentation(self): return [x.documentation() for x in self.endpoint_definitions] class EndpointDefinitionMixin(metaclass=EndpointDefinitionMeta): pass class BaseEndpointDefinition(metaclass=EndpointDefinitionMeta): @abc.abstractmethod def is_authorized(self): return False def is_permitted(self): return True def is_valid(self): return True def rate_limit_key(self): return None def rate_limit_period(self): return 1 @property def response_filter(self): filter_def_name = getattr( settings, "DECLARATIVE_ENDPOINT_DEFAULT_FILTERS", None ) if filter_def_name: filter_def = locate_object(filter_def_name) else: filter_def = {} return filter_def @property def http_status(self): return http.client.OK @property @abc.abstractmethod def resource(self): raise NotImplementedError("Endpoints must implement self.resource property") @property
BSD 3-Clause New or Revised License
shellderp/sublime-robot-plugin
lib/robot/libraries/BuiltIn.py
_Variables.set_variable
python
def set_variable(self, *values): if len(values) == 0: return '' elif len(values) == 1: return values[0] else: return list(values)
Returns the given values which can then be assigned to a variables. This keyword is mainly used for setting scalar variables. Additionally it can be used for converting a scalar variable containing a list to a list variable or to multiple scalar variables. It is recommended to use `Create List` when creating new lists. Examples: | ${hi} = | Set Variable | Hello, world! | | ${hi2} = | Set Variable | I said: ${hi} | | ${var1} | ${var2} = | Set Variable | Hello | world | | @{list} = | Set Variable | ${list with some items} | | ${item1} | ${item2} = | Set Variable | ${list with 2 items} | Variables created with this keyword are available only in the scope where they are created. See `Set Global Variable`, `Set Test Variable` and `Set Suite Variable` for information on how to set variables so that they are available also in a larger scope.
https://github.com/shellderp/sublime-robot-plugin/blob/07069ebf20c82663eee24e4c7d82cccca020e8e9/lib/robot/libraries/BuiltIn.py#L921-L946
import os import re import time from robot.output import LOGGER, Message from robot.errors import DataError, ExecutionFailed, ExecutionFailures from robot import utils from robot.utils import asserts from robot.variables import is_var, is_list_var from robot.running import Keyword, RUN_KW_REGISTER from robot.running.context import EXECUTION_CONTEXTS from robot.common import UserErrorHandler from robot.version import get_version from robot.model import TagPatterns if utils.is_jython: from java.lang import String, Number try: bin except NameError: def bin(integer): if not isinstance(integer, (int, long)): raise TypeError if integer >= 0: prefix = '0b' else: prefix = '-0b' integer = abs(integer) bins = [] while integer > 1: integer, remainder = divmod(integer, 2) bins.append(str(remainder)) bins.append(str(integer)) return prefix + ''.join(reversed(bins)) class _Converter: def convert_to_integer(self, item, base=None): self._log_types(item) return self._convert_to_integer(item, base) def _convert_to_integer(self, orig, base=None): try: item = self._handle_java_numbers(orig) item, base = self._get_base(item, base) if base: return int(item, self._convert_to_integer(base)) return int(item) except: raise RuntimeError("'%s' cannot be converted to an integer: %s" % (orig, utils.get_error_message())) def _handle_java_numbers(self, item): if not utils.is_jython: return item if isinstance(item, String): return utils.unic(item) if isinstance(item, Number): return item.doubleValue() return item def _get_base(self, item, base): if not isinstance(item, basestring): return item, base item = utils.normalize(item) if item.startswith(('-', '+')): sign = item[0] item = item[1:] else: sign = '' bases = {'0b': 2, '0o': 8, '0x': 16} if base or not item.startswith(tuple(bases)): return sign+item, base return sign+item[2:], bases[item[:2]] def convert_to_binary(self, item, base=None, prefix=None, length=None): return self._convert_to_bin_oct_hex(bin, item, base, prefix, length) def convert_to_octal(self, item, base=None, prefix=None, length=None): return self._convert_to_bin_oct_hex(oct, item, base, prefix, length) def convert_to_hex(self, item, base=None, prefix=None, length=None, lowercase=False): return self._convert_to_bin_oct_hex(hex, item, base, prefix, length, lowercase) def _convert_to_bin_oct_hex(self, method, item, base, prefix, length, lowercase=False): self._log_types(item) ret = method(self._convert_to_integer(item, base)).upper() prefix = prefix or '' if ret[0] == '-': prefix = '-' + prefix ret = ret[1:] if len(ret) > 1: prefix_length = {bin: 2, oct: 1, hex: 2}[method] ret = ret[prefix_length:] if length: ret = ret.rjust(self._convert_to_integer(length), '0') if lowercase: ret = ret.lower() return prefix + ret def convert_to_number(self, item, precision=None): self._log_types(item) return self._convert_to_number(item, precision) def _convert_to_number(self, item, precision=None): number = self._convert_to_number_without_precision(item) if precision: number = round(number, self._convert_to_integer(precision)) return number def _convert_to_number_without_precision(self, item): try: if utils.is_jython: item = self._handle_java_numbers(item) return float(item) except: error = utils.get_error_message() try: return float(self._convert_to_integer(item)) except RuntimeError: raise RuntimeError("'%s' cannot be converted to a floating " "point number: %s" % (item, error)) def convert_to_string(self, item): self._log_types(item) return self._convert_to_string(item) def _convert_to_string(self, item): return utils.unic(item) def convert_to_boolean(self, item): self._log_types(item) if isinstance(item, basestring): if utils.eq(item, 'True'): return True if utils.eq(item, 'False'): return False return bool(item) def create_list(self, *items): return list(items) class _Verify: def fail(self, msg=None, *tags): set_tags = [tag for tag in tags if not tag.startswith('-')] remove_tags = [tag[1:] for tag in tags if tag.startswith('-')] if remove_tags: self.remove_tags(*remove_tags) if set_tags: self.set_tags(*set_tags) raise AssertionError(msg) if msg else AssertionError() def fatal_error(self, msg=None): error = AssertionError(msg) if msg else AssertionError() error.ROBOT_EXIT_ON_FAILURE = True raise error def exit_for_loop(self): error = AssertionError('Exit for loop without enclosing for loop.') error.ROBOT_EXIT_FOR_LOOP = True raise error def should_not_be_true(self, condition, msg=None): if not msg: msg = "'%s' should not be true" % condition asserts.fail_if(self._is_true(condition), msg) def should_be_true(self, condition, msg=None): if not msg: msg = "'%s' should be true" % condition asserts.fail_unless(self._is_true(condition), msg) def should_be_equal(self, first, second, msg=None, values=True): self._log_types(first, second) self._should_be_equal(first, second, msg, values) def _should_be_equal(self, first, second, msg, values): asserts.fail_unless_equal(first, second, msg, self._include_values(values)) def _log_types(self, *args): msg = ["Argument types are:"] + [self._get_type(a) for a in args] self.log('\n'.join(msg)) def _get_type(self, arg): if isinstance(arg, unicode): return "<type 'unicode'>" return str(type(arg)) def _include_values(self, values): if isinstance(values, basestring): return values.lower() not in ['no values', 'false'] return bool(values) def should_not_be_equal(self, first, second, msg=None, values=True): self._log_types(first, second) self._should_not_be_equal(first, second, msg, values) def _should_not_be_equal(self, first, second, msg, values): asserts.fail_if_equal(first, second, msg, self._include_values(values)) def should_not_be_equal_as_integers(self, first, second, msg=None, values=True, base=None): self._log_types(first, second) self._should_not_be_equal(self._convert_to_integer(first, base), self._convert_to_integer(second, base), msg, values) def should_be_equal_as_integers(self, first, second, msg=None, values=True, base=None): self._log_types(first, second) self._should_be_equal(self._convert_to_integer(first, base), self._convert_to_integer(second, base), msg, values) def should_not_be_equal_as_numbers(self, first, second, msg=None, values=True, precision=6): self._log_types(first, second) first = self._convert_to_number(first, precision) second = self._convert_to_number(second, precision) self._should_not_be_equal(first, second, msg, values) def should_be_equal_as_numbers(self, first, second, msg=None, values=True, precision=6): self._log_types(first, second) first = self._convert_to_number(first, precision) second = self._convert_to_number(second, precision) self._should_be_equal(first, second, msg, values) def should_not_be_equal_as_strings(self, first, second, msg=None, values=True): self._log_types(first, second) first, second = [self._convert_to_string(i) for i in first, second] self._should_not_be_equal(first, second, msg, values) def should_be_equal_as_strings(self, first, second, msg=None, values=True): self._log_types(first, second) first, second = [self._convert_to_string(i) for i in first, second] self._should_be_equal(first, second, msg, values) def should_not_start_with(self, str1, str2, msg=None, values=True): msg = self._get_string_msg(str1, str2, msg, values, 'starts with') asserts.fail_if(str1.startswith(str2), msg) def should_start_with(self, str1, str2, msg=None, values=True): msg = self._get_string_msg(str1, str2, msg, values, 'does not start with') asserts.fail_unless(str1.startswith(str2), msg) def should_not_end_with(self, str1, str2, msg=None, values=True): msg = self._get_string_msg(str1, str2, msg, values, 'ends with') asserts.fail_if(str1.endswith(str2), msg) def should_end_with(self, str1, str2, msg=None, values=True): msg = self._get_string_msg(str1, str2, msg, values, 'does not end with') asserts.fail_unless(str1.endswith(str2), msg) def should_not_contain(self, item1, item2, msg=None, values=True): msg = self._get_string_msg(item1, item2, msg, values, 'contains') asserts.fail_if(item2 in item1, msg) def should_contain(self, item1, item2, msg=None, values=True): msg = self._get_string_msg(item1, item2, msg, values, 'does not contain') asserts.fail_unless(item2 in item1, msg) def should_contain_x_times(self, item1, item2, count, msg=None): if not msg: msg = "'%s' does not contain '%s' %s times" % (utils.unic(item1), utils.unic(item2), count) self.should_be_equal_as_integers(self.get_count(item1, item2), count, msg, values=False) def get_count(self, item1, item2): if not hasattr(item1, 'count'): try: item1 = list(item1) except: raise RuntimeError("Converting '%s' to list failed: %s" % (item1, utils.get_error_message())) count = item1.count(item2) self.log('Item found from the first item %d time%s' % (count, utils.plural_or_not(count))) return count def should_not_match(self, string, pattern, msg=None, values=True): msg = self._get_string_msg(string, pattern, msg, values, 'matches') asserts.fail_if(self._matches(string, pattern), msg) def should_match(self, string, pattern, msg=None, values=True): msg = self._get_string_msg(string, pattern, msg, values, 'does not match') asserts.fail_unless(self._matches(string, pattern), msg) def should_match_regexp(self, string, pattern, msg=None, values=True): msg = self._get_string_msg(string, pattern, msg, values, 'does not match') res = re.search(pattern, string) asserts.fail_if_none(res, msg, False) match = res.group(0) groups = res.groups() if groups: return [match] + list(groups) return match def should_not_match_regexp(self, string, pattern, msg=None, values=True): msg = self._get_string_msg(string, pattern, msg, values, 'matches') asserts.fail_unless_none(re.search(pattern, string), msg, False) def get_length(self, item): length = self._get_length(item) self.log('Length is %d' % length) return length def _get_length(self, item): try: return len(item) except utils.RERAISED_EXCEPTIONS: raise except: try: return item.length() except utils.RERAISED_EXCEPTIONS: raise except: try: return item.size() except utils.RERAISED_EXCEPTIONS: raise except: try: return item.length except utils.RERAISED_EXCEPTIONS: raise except: raise RuntimeError("Could not get length of '%s'" % item) def length_should_be(self, item, length, msg=None): length = self._convert_to_integer(length) actual = self.get_length(item) if actual != length: raise AssertionError(msg or "Length of '%s' should be %d but is %d" % (item, length, actual)) def should_be_empty(self, item, msg=None): if self.get_length(item) > 0: raise AssertionError(msg or "'%s' should be empty" % item) def should_not_be_empty(self, item, msg=None): if self.get_length(item) == 0: raise AssertionError(msg or "'%s' should not be empty" % item) def _get_string_msg(self, str1, str2, msg, values, delim): default = "'%s' %s '%s'" % (utils.unic(str1), delim, utils.unic(str2)) if not msg: msg = default elif values is True: msg = '%s: %s' % (msg, default) return msg class _Variables: def get_variables(self): return utils.NormalizedDict(self._variables.current, ignore='_') def get_variable_value(self, name, default=None): try: return self._variables[self._get_var_name(name)] except DataError: return self._variables.replace_scalar(default) def log_variables(self, level='INFO'): variables = self.get_variables() for name in sorted(variables.keys(), key=lambda s: s.lower()): msg = utils.format_assign_message(name, variables[name], cut_long=False) self.log(msg, level) def variable_should_exist(self, name, msg=None): name = self._get_var_name(name) msg = self._variables.replace_string(msg) if msg else "Variable %s does not exist" % name asserts.fail_unless(name in self._variables, msg) def variable_should_not_exist(self, name, msg=None): name = self._get_var_name(name) msg = self._variables.replace_string(msg) if msg else "Variable %s exists" % name asserts.fail_if(name in self._variables, msg) def replace_variables(self, text): return self._variables.replace_scalar(text)
Apache License 2.0
fdev/bc125csv
bc125csv/scanner.py
DeviceLookup.get_device
python
def get_device(self): for device in self.context.list_devices(): if self.is_scanner(device) and self.is_tty(device): return device for device in self.context.list_devices(): if self.is_scanner(device): return device
Find compatible scanner and return usb device. If found a tty device will be returned, otherwise the usb device will be returned.
https://github.com/fdev/bc125csv/blob/a521d636a563fdf7ec450c6a44db775cce11b3b6/bc125csv/scanner.py#L281-L295
from __future__ import print_function from __future__ import division import re import sys try: import pyudev except ImportError: sys.exit("Failed to import pyudev (https://pyudev.readthedocs.org/):," " install using:\n pip install pyudev") try: import serial except ImportError: sys.exit("Failed to import pyserial (http://pyserial.sourceforge.net/)," " install using:\n pip install pyserial") CTCSS_TONES = [ "67.0","69.3","71.9","74.4","77.0","79.7","82.5","85.4","88.5","91.5", "94.8","97.4","100.0","103.5","107.2","110.9","114.8","118.8","123.0", "127.3","131.8","136.5","141.3","146.2","151.4","156.7","159.8","162.2", "165.5","167.9","171.3","173.8","177.3","179.9","183.5","186.2","189.9", "192.8","196.6","199.5","203.5","206.5","210.7","218.1","225.7","229.1", "233.6","241.8","250.3","254.1", ] DCS_CODES = [ "023","025","026","031","032","036","043","047","051","053", "054","065","071","072","073","074","114","115","116","122", "125","131","132","134","143","145","152","155","156","162", "165","172","174","205","212","223","225","226","243","244", "245","246","251","252","255","261","263","265","266","271", "274","306","311","315","325","331","332","343","346","351", "356","364","365","371","411","412","413","423","431","432", "445","446","452","454","455","462","464","465","466","503", "506","516","523","526","532","546","565","606","612","624", "627","631","632","654","662","664","703","712","723","731", "732","734","743","754", ] SUPPORTED_MODELS = ("BC125AT", "UBC125XLT", "UBC126AT") class Channel(object): def __init__(self, index, name, frequency, modulation="AUTO", tqcode=0, delay=2, lockout=False, priority=False): self.index = index self.name = name self.frequency = frequency self.modulation = modulation self.tqcode = tqcode self.delay = delay self.lockout = lockout self.priority = priority @property def tq(self): if self.tqcode == 0: return "none" if self.tqcode == 127: return "search" if self.tqcode == 240: return "no tone" if 64 <= self.tqcode <= 113: return CTCSS_TONES[self.tqcode - 64] + " Hz" if 128 <= self.tqcode <= 231: return "DCS " + DCS_CODES[self.tqcode - 128] @property def freqcode(self): return self.frequency.replace(".", "").zfill(8) def __repr__(self): return "CH%03d: %s %s" % (self.index, self.frequency, self.modulation) class ScannerException(Exception): pass class Scanner(serial.Serial, object): RE_CIN = re.compile(r""" # CIN,[INDEX],[NAME],[FRQ],[MOD],[CTCSS/DCS],[DLY],[LOUT],[PRI] ^ # No characters before CIN, (?P<index>\d{1,3}), (?P<name>[^,]{0,16}), (?P<freq>\d{5,8}), # 4 decimals, so at least 5 digits (?P<modulation>AUTO|AM|FM|NFM), (?P<tq>\d{1,3}), (?P<delay>-10|-5|0|1|2|3|4|5), (?P<lockout>0|1), (?P<priority>0|1) # no comma! $ # No characters after """, flags=re.VERBOSE) def __init__(self, port, baudrate=9600): super(Scanner, self).__init__(port=port, baudrate=baudrate) def writeread(self, command): self.write((command + "\r").encode()) self.flush() return self.readlinecr() def send(self, command): result = self.writeread(command) if not re.match(r"(^ERR|,NG$)", result): return result def readlinecr(self): line = "" while True: c = self.read(1).decode() if c == "\r": return line line += c def enter_programming(self): result = self.send("PRG") if not result or result != "PRG,OK": raise ScannerException("Failed to enter programming mode.") def exit_programming(self): result = self.send("EPG") if not result or result != "EPG,OK": raise ScannerException("Failed to leave programming mode.") def get_model(self): result = self.send("MDL") if not result or not result.startswith("MDL,"): raise ScannerException("Could not get model name.") return result[4:] def get_channel(self, index): result = self.send("CIN,%d" % index) if not result: raise ScannerException("Could not read channel %d." % index) match = self.RE_CIN.match(result) if not match: raise ScannerException("Unexpected data for channel %d." % index) data = match.groupdict() if data["freq"] == "00000000": return frequency = "%s.%s" % (data["freq"][:-4].lstrip("0"), data["freq"][-4:]) return Channel(**{ "index": int(data["index"]), "name": data["name"].strip(), "frequency": frequency, "modulation": data["modulation"], "tqcode": int(data["tq"]), "delay": int(data["delay"]), "lockout": data["lockout"] == "1", "priority": data["priority"] == "1", }) def set_channel(self, channel): command = ",".join(map(str, [ "CIN", channel.index, channel.name, channel.freqcode, channel.modulation, channel.tqcode, channel.delay, int(channel.lockout), int(channel.priority), ])) result = self.send(command) if not result or result != "CIN,OK": raise ScannerException("Could not write to channel %d." % channel.index) def delete_channel(self, index): channel = self.get_channel(index) if channel: result = self.send("DCH,%d" % index) if not result or result != "DCH,OK": raise ScannerException("Could not delete channel %d." % index) class VirtualScanner(Scanner): def __init__(self, *args, **kwargs): pass def writeread(self, command): if command == "MDL": return "MDL,VIRTUAL" if command == "PRG": return "PRG,OK" if command == "EPG": return "EPG,OK" if re.match(r"^CIN,([1-9]|1[0-9]|5[1-9])$", command): index = int(command[4:]) lockout = index == 55 priority = index == 15 return "CIN,{0},Channel {0},1{0:02d}0000,FM,0,2,{1:d},{2:d}" .format(index, lockout, priority) elif re.match(r"^CIN,[0-9]+$", command): index = int(command[4:]) tq = (0, 127, 240, 145)[index % 4] return "CIN,{0},,00000000,FM,{1},0,0,0".format(index, tq) if command.startswith("CIN,"): return "CIN,OK" if command.startswith("DCH,"): return "DCH,OK" return "ERR" class DeviceLookup(object): def __init__(self): self.context = pyudev.Context() def is_scanner(self, device): return device.get("ID_VENDOR_ID") == "1965" and device.get("ID_MODEL") in SUPPORTED_MODELS def is_tty(self, device): return device.get("SUBSYSTEM") == "tty"
MIT License
hakiergrzonzo/tinypub
tinyPub/htmlParser/renderer.py
element.render
python
def render(self, force_inline = False): if self.type == 'in-line' or force_inline: text_str = str() for child in self.children: post_render = child.render(force_inline = True) if isinstance(post_render, text): text_str += post_render.render() else: text_str += post_render return text(text_str, self.style) elif self.type in ['block', 'list-item']: result = str() text_children = list() last_margin_bottom_len = 0 for child in self.children: if isinstance(child, text): text_children.append(child) elif child.type == 'in-line': text_children.append(child.render()) else: post_render = child.render() result += self.make_paragraph(text_children) text_children = list() if isinstance(post_render, text): post_render = child.make_paragraph([post_render]) post_render = ' '*child.margins.left + post_render.replace('\n', ' '*child.margins.right + '\n' + ' '*child.margins.left, post_render.count('\n') - 1) result += '\n'*max(last_margin_bottom_len, child.margins.top) + post_render last_margin_bottom_len = child.margins.bottom result += self.make_paragraph(text_children) result = ' '*self.margins.left + result.replace('\n', ' '*self.margins.right + '\n' + ' '*self.margins.left, result.count('\n') - 1) return result else: return str()
Returns rendered block as str, or rendered text/inline element as Text Parameters ---------- force_inline: bool internal parameter to ignore block elements in inline elements
https://github.com/hakiergrzonzo/tinypub/blob/077f39b666c2ee02755eccf7dfa684df301d35b7/tinyPub/htmlParser/renderer.py#L138-L194
from .styles import Margins, unitConverter from bs4 import Tag from .textFormater import bold, italic, justify def debug_printer(item): if isinstance(item, element): res = '<{0}>:'.format(item.type) for x in item.children: res += '\n\t' + debug_printer(x).replace('\n', '\n\t') return res else: return 'text: ' + item.string.replace(' ', '_') class text(): def __init__(self, string, style): if style.get('font-weight') == 'bold': string = bold(string) if style.get('font-style') in ['oblique', 'italic']: string = italic(string) try: string = string.replace('\t', ' ') except: pass self.string = string def render(self, force_inline = False): return self.string class element(): def __init__(self, tag, stylesheet, rootWidth, minimal_content_width = 5, tags_to_ignore = []): self.style = stylesheet.get(tag) self.margins = self.style.get('margins', Margins()) self.type = self.style.get('display', 'in-line') self.children = list() self.width = rootWidth self.minimal_content_width = minimal_content_width for child in tag.children: if isinstance(child, Tag): if tag.name not in tags_to_ignore: self.children.append(element(child, stylesheet, self.content_width(), tags_to_ignore = tags_to_ignore)) else: if len(child.strip()) > 0: self.children.append(text(child, self.style)) def content_width(self): x = self.width - self.margins.left - self.margins.right if x > self.minimal_content_width: return x else: return self.minimal_content_width def make_paragraph(self, text_children): if len(text_children) > 0: text = str() for child in text_children: text += child.render() text = [x.strip() for x in text.split('\n')] text_str = str() for x in text: text_str += x + ' ' text_str = text_str[:len(text_str) -1] i = 0 while True: try: if text_str[i] == ' ' and text_str[i+1] == ' ': text_str = text_str[:i] + text_str[i:] i += 1 except IndexError: break lines = list() current_line = ' ' * unitConverter(self.style.get('text-indent')) for word in text_str.split(' '): if len(current_line) + len(word) + 1 < self.content_width(): current_line += ' ' + word else: lines.append(current_line) current_line = word lines.append(current_line) alingment = self.style.get('text-align', 'justify') if alingment == 'center': lines = [x.center(self.content_width()) for x in lines] elif alingment == 'right': lines = [x.rjust(self.content_width()) for x in lines] elif alingment == 'justify': lines = justify(lines, self.content_width()) elif alingment == 'left': lines = [x.ljust(self.content_width()) for x in lines] res = str() for line in lines: res += line + '\n' return res else: return str()
MIT License
aldebaran/qibuild
python/qidoc/actions/clean.py
do
python
def do(args): doc_builder = qidoc.parsers.get_doc_builder(args) doc_projects = doc_builder.get_dep_projects() to_clean = list() for doc_project in doc_projects: try: build_dir = doc_project.build_dir except AttributeError: continue if not os.path.exists(build_dir): continue if qisys.sh.is_empty(build_dir): qisys.sh.rm(build_dir) continue to_clean.append(build_dir) if not to_clean: ui.info(ui.green, "Nothing to clean") return if not args.force: ui.info(ui.green, "Build directories that will be removed", ui.white, "(use -f to apply)") for i, build_dir in enumerate(to_clean): if args.force: ui.info_count(i, len(to_clean), ui.green, "Cleaning", ui.reset, build_dir) qisys.sh.rm(build_dir) else: ui.info_count(i, len(to_clean), build_dir)
Main Entry Point
https://github.com/aldebaran/qibuild/blob/efea6fa3744664348717fe5e8df708a3cf392072/python/qidoc/actions/clean.py#L28-L58
from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function import os import qidoc.parsers import qidoc.builder import qisys.sh import qisys.parsers from qisys import ui def configure_parser(parser): qisys.parsers.worktree_parser(parser) qisys.parsers.project_parser(parser) group = parser.add_argument_group("qidoc clean options") group.add_argument("-f", "--force", help="force the clean", action="store_true")
BSD 3-Clause New or Revised License
arthurbernard/fynance
fynance/neural_networks/roll_aggregated_multi_neural_networks.py
RollAggrMultiNeuralNet.set_aggregate
python
def set_aggregate(self, *args): self._aggregate = lambda x: x for arg in args: self._aggregate = lambda x: arg(self._aggregate(x)) return self
Set your own aggregation method. Parameters ---------- args : tuple of function Any function such that the final value is a numpy array. Returns ------- ramnn : RollAggrMultiNeuralNet
https://github.com/arthurbernard/fynance/blob/efd9a2e6f8eddcff017d828972236312f6f24084/fynance/neural_networks/roll_aggregated_multi_neural_networks.py#L220-L236
import numpy as np from matplotlib import pyplot as plt from fynance.backtest.dynamic_plot_backtest import DynaPlotBackTest from fynance.neural_networks.roll_multi_neural_networks import RollMultiNeuralNet plt.style.use('seaborn') class RollAggrMultiNeuralNet(RollMultiNeuralNet): def __init__(self, *args, agg_fun='mean', **kwargs): RollMultiNeuralNet.__init__(self, *args, **kwargs) self.agg_fun = agg_fun def __call__(self, y, X, NN, start=0, end=1e8, x_axis=None): RollMultiNeuralNet.__call__( self, y, X, NN, start=start, end=end, x_axis=x_axis ) self.agg_y = np.zeros([self.T, 1]) return self def run(self, y, X, NN, plot_loss=True, plot_perf=True, x_axis=None): if isinstance(NN, list): self.n_NN = len(NN) else: self.n_NN = 1 self.perf_train = self.V0 * np.ones([y.size, self.n_NN]) self.perf_estim = self.V0 * np.ones([y.size, self.n_NN]) self.perf_agg = self.V0 * np.ones([y.size, 1]) f, ax_loss, ax_perf = self._set_figure(plot_loss, plot_perf) for pred_train, pred_estim in self(y, X, NN, x_axis=x_axis): t, s, t_s = self.t, self.s, min(self.t + self.s, self.T) returns = np.sign(pred_train) * y[t - s: t] cum_ret = np.exp(np.cumsum(returns, axis=0)) self.perf_train[t - s: t] = self.perf_train[t - s - 1] * cum_ret returns = np.sign(pred_estim) * y[t: t_s] cum_ret = np.exp(np.cumsum(returns, axis=0)) self.perf_estim[t: t_s] = self.perf_estim[t - 1] * cum_ret self.aggregate(pred_estim, y[t: t_s], t=t, t_s=t_s) returns = np.sign(self.agg_y[t: t_s]) * y[t: t_s] cum_ret = np.exp(np.cumsum(returns, axis=0)) self.perf_agg[t: t_s] = self.perf_agg[t - 1] * cum_ret self._dynamic_plot(f, ax_loss=ax_loss, ax_perf=ax_perf) return self def aggregate(self, mat_pred, y, t=0, t_s=-1): self.agg_y[t: t_s, 0] = self._aggregate(mat_pred, y) return self def _aggregate(self, mat_pred, y): if self.agg_fun == 'mean': return np.mean(mat_pred, axis=1) elif self.agg_fun == 'sum': return np.sum(mat_pred, axis=1) elif self.agg_fun == 'best': i = np.argmax(self.perf_estim[self.t]) return mat_pred[:, i] elif self.agg_fun == 'bests': perfs = self.perf_estim[self.t] perf_list = [] arg_list = [] for i in range(self.n_NN): if len(perf_list) < 3: perf_list += [perfs[i]] arg_list += [i] elif perfs[i] > min(perf_list): j = np.argmin(perf_list) perf_list[j] = perfs[i] arg_list[j] = i else: pass y = mat_pred[:, arg_list[0]] y += mat_pred[:, arg_list[1]] y += mat_pred[:, arg_list[2]] y /= 3 return y
MIT License
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1alpha1_audit_sink_list.py
V1alpha1AuditSinkList.metadata
python
def metadata(self): return self._metadata
Gets the metadata of this V1alpha1AuditSinkList. # noqa: E501 :return: The metadata of this V1alpha1AuditSinkList. # noqa: E501 :rtype: V1ListMeta
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1alpha1_audit_sink_list.py#L141-L148
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1alpha1AuditSinkList(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'api_version': 'str', 'items': 'list[V1alpha1AuditSink]', 'kind': 'str', 'metadata': 'V1ListMeta' } attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._items = None self._kind = None self._metadata = None self.discriminator = None if api_version is not None: self.api_version = api_version self.items = items if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata @property def api_version(self): return self._api_version @api_version.setter def api_version(self, api_version): self._api_version = api_version @property def items(self): return self._items @items.setter def items(self, items): if self.local_vars_configuration.client_side_validation and items is None: raise ValueError("Invalid value for `items`, must not be `None`") self._items = items @property def kind(self): return self._kind @kind.setter def kind(self, kind): self._kind = kind @property
Apache License 2.0
plaidweb/publ
publ/user.py
User.auth_type
python
def auth_type(self): return self._auth_type
The type of user authentication (session, token, etc.)
https://github.com/plaidweb/publ/blob/67efc5e32bf25dbac72a83d1167de038b79db5a7/publ/user.py#L132-L134
import ast import collections import configparser import datetime import logging import typing import urllib.parse import arrow import authl.disposition import flask import werkzeug.exceptions as http_error from pony import orm from werkzeug.utils import cached_property from . import caching, model, tokens, utils from .config import config LOGGER = logging.getLogger(__name__) @caching.cache.memoize(timeout=5) def get_groups(identity: str, include_self: bool = True) -> typing.Set[str]: @caching.cache.memoize(timeout=10) def load_groups() -> typing.DefaultDict[str, typing.Set[str]]: cfg = configparser.ConfigParser(delimiters=( '\000'), allow_no_value=True, interpolation=None) cfg.optionxform = lambda option: option cfg.read(config.user_list) groups: typing.DefaultDict[str, typing.Set[str]] = collections.defaultdict(set) for group, members in cfg.items(): for member in members.keys(): groups[member].add(group) return groups groups = load_groups() result: typing.Set[str] = set() pending: typing.Deque[str] = collections.deque() pending.append(identity) while pending: check = pending.popleft() if check not in result: if include_self or check != identity: result.add(check) pending += groups.get(check, []) return result class User(caching.Memoizable): def __init__(self, identity: str, auth_type: typing.Optional[str] = None, scope: typing.Optional[str] = None): self._identity = identity self._auth_type = auth_type self._scope = scope def _key(self): return self._identity, self._auth_type, self._scope def __lt__(self, other): return self.identity < other.identity @cached_property def identity(self): return self._identity @cached_property def humanize(self) -> str: url = self.profile.get('profile_url', self._identity) parsed = urllib.parse.urlparse(url) return ''.join(p for p in ( f'{parsed.scheme}:' if parsed.scheme not in ('http', 'https') else '', parsed.netloc, parsed.path, )) @cached_property def name(self) -> str: if 'name' in self.profile: return self.profile['name'] return self.humanize @property def profile(self) -> dict: return self._info[0] @cached_property def groups(self) -> typing.Set[str]: return get_groups(self._identity, False) @cached_property def auth_groups(self) -> typing.Set[str]: return get_groups(self._identity, True) @cached_property def is_admin(self) -> bool: return bool(config.admin_group and config.admin_group in self.groups) @cached_property
MIT License
quantumiracle/reinforcement_learning_for_traffic_light_control
4.multithread_for_grid/RL_brain.py
DeepQNetwork.full_batch_norm
python
def full_batch_norm(self, x, n_out, phase_train=tf.constant(False, dtype=tf.bool), scope='bn'): with tf.variable_scope(scope): beta = tf.Variable(tf.constant(0.0, shape=[n_out]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(x, [0], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.5) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) return normed
Batch normalization on convolutional maps. Args: x: Tensor, 4D BHWD input maps n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps
https://github.com/quantumiracle/reinforcement_learning_for_traffic_light_control/blob/464c17ba25ebcb49f78d6cdcc96d7fe3764d7508/4.multithread_for_grid/RL_brain.py#L79-L106
import numpy as np import pandas as pd import tensorflow as tf import scipy import threading import urllib from multiprocessing.dummy import Pool as ThreadPool from multiprocessing import Process import multiprocessing as mp import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt class DeepQNetwork: def __init__( self, n_actions, n_features, reward_decay=0.99, e_greedy=0.9, replace_target_iter=300, memory_size=500, batch_size=20, e_greedy_increment=None, output_graph=False, ): self.n_actions = n_actions self.n_features = n_features self.gamma = reward_decay self.epsilon_max = e_greedy self.replace_target_iter = replace_target_iter self.memory_size = memory_size if not hasattr(self, 'memory_counter'): self.memory_counter = 0 self.batch_size = batch_size self.epsilon_increment = e_greedy_increment self.epsilon = 0 if e_greedy_increment is not None else self.epsilon_max self.save_file = './weights/model.ckpt' self.lo=threading.Lock() self.learn_step_counter = 0 self.memory = np.zeros((self.memory_size, n_features * 2 + 2)) self._build_net() t_params = tf.get_collection('target_net_params') e_params = tf.get_collection('eval_net_params') self.replace_target_op = [tf.assign(t, e) for t, e in zip(t_params, e_params)] config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True) config.gpu_options.per_process_gpu_memory_fraction = 0.4 self.sess = tf.Session(config=config) if output_graph: tf.summary.FileWriter("logs/", self.sess.graph) self.sess.run(tf.global_variables_initializer()) self.cost_his = [] self.step_set=[]
Apache License 2.0
tensorflow/transform
examples/census_example.py
get_feature_columns
python
def get_feature_columns(tf_transform_output): real_valued_columns = [ tf.feature_column.numeric_column(key, shape=()) for key in common.NUMERIC_FEATURE_KEYS ] one_hot_columns = [ tf.feature_column.indicator_column( tf.feature_column.categorical_column_with_identity( key=key, num_buckets=(common.NUM_OOV_BUCKETS + tf_transform_output.vocabulary_size_by_name( vocab_filename=key)))) for key in common.CATEGORICAL_FEATURE_KEYS ] return real_valued_columns + one_hot_columns
Returns the FeatureColumns for the model. Args: tf_transform_output: A `TFTransformOutput` object. Returns: A list of FeatureColumns.
https://github.com/tensorflow/transform/blob/6349d7f6d847cb8979f31b9b315981d79ffba3e5/examples/census_example.py#L97-L123
import os import pprint import tempfile import tensorflow as tf import tensorflow_transform as tft import census_example_common as common def _make_training_input_fn(tf_transform_output, transformed_examples, batch_size): def input_fn(): dataset = tf.data.experimental.make_batched_features_dataset( file_pattern=transformed_examples, batch_size=batch_size, features=tf_transform_output.transformed_feature_spec(), reader=tf.data.TFRecordDataset, shuffle=True) transformed_features = tf.compat.v1.data.make_one_shot_iterator( dataset).get_next() transformed_labels = tf.where( tf.equal(transformed_features.pop(common.LABEL_KEY), 1)) return transformed_features, transformed_labels[:, 1] return input_fn def _make_serving_input_fn(tf_transform_output): raw_feature_spec = common.RAW_DATA_FEATURE_SPEC.copy() raw_feature_spec.pop(common.LABEL_KEY) def serving_input_fn(): raw_input_fn = tf.estimator.export.build_parsing_serving_input_receiver_fn( raw_feature_spec, default_batch_size=None) serving_input_receiver = raw_input_fn() raw_features = serving_input_receiver.features transformed_features = tf_transform_output.transform_raw_features( raw_features) return tf.estimator.export.ServingInputReceiver( transformed_features, serving_input_receiver.receiver_tensors) return serving_input_fn
Apache License 2.0
simpeg/simpeg
SimPEG/maps.py
SphericalSystem.inverse
python
def inverse(self, model): return mat_utils.cartesian2spherical(model.reshape((-1, 3), order="F"))
Cartesian to spherical. :param numpy.ndarray model: physical property in Cartesian :return: model
https://github.com/simpeg/simpeg/blob/a264ba6a32ba3c83d82601add37f51d8e1cc5e90/SimPEG/maps.py#L589-L597
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .utils.code_utils import deprecate_class from six import integer_types from six import string_types from collections import namedtuple import warnings import numpy as np from numpy.polynomial import polynomial import scipy.sparse as sp from scipy.sparse.linalg import LinearOperator from scipy.interpolate import UnivariateSpline from scipy.constants import mu_0 from scipy.sparse import csr_matrix as csr import properties from discretize.tests import checkDerivative from .utils import ( setKwargs, mkvc, rotationMatrixFromNormals, Zero, Identity, sdiag, mat_utils, speye, ) class IdentityMap(properties.HasProperties): def __init__(self, mesh=None, nP=None, **kwargs): setKwargs(self, **kwargs) if nP is not None: if isinstance(nP, string_types): assert nP == "*", "nP must be an integer or '*', not {}".format(nP) assert isinstance( nP, integer_types + (np.int64,) ), "Number of parameters must be an integer. Not `{}`.".format(type(nP)) nP = int(nP) elif mesh is not None: nP = mesh.nC else: nP = "*" self.mesh = mesh self._nP = nP @property def nP(self): if self._nP != "*": return int(self._nP) if self.mesh is None: return "*" return int(self.mesh.nC) @property def shape(self): if self.mesh is None: return (self.nP, self.nP) return (self.mesh.nC, self.nP) def _transform(self, m): return m def inverse(self, D): raise NotImplementedError("The transformInverse is not implemented.") def deriv(self, m, v=None): if v is not None: return v if isinstance(self.nP, integer_types): return sp.identity(self.nP) return Identity() def test(self, m=None, num=4, **kwargs): print("Testing {0!s}".format(str(self))) if m is None: m = abs(np.random.rand(self.nP)) if "plotIt" not in kwargs: kwargs["plotIt"] = False assert isinstance( self.nP, integer_types ), "nP must be an integer for {}".format(self.__class__.__name__) return checkDerivative( lambda m: [self * m, self.deriv(m)], m, num=num, **kwargs ) def testVec(self, m=None, **kwargs): print("Testing {0!s}".format(self)) if m is None: m = abs(np.random.rand(self.nP)) if "plotIt" not in kwargs: kwargs["plotIt"] = False return checkDerivative( lambda m: [self * m, lambda x: self.deriv(m, x)], m, num=4, **kwargs ) def _assertMatchesPair(self, pair): assert ( isinstance(self, pair) or isinstance(self, ComboMap) and isinstance(self.maps[0], pair) ), "Mapping object must be an instance of a {0!s} class.".format(pair.__name__) def __mul__(self, val): if isinstance(val, IdentityMap): if ( not (self.shape[1] == "*" or val.shape[0] == "*") and not self.shape[1] == val.shape[0] ): raise ValueError( "Dimension mismatch in {0!s} and {1!s}.".format(str(self), str(val)) ) return ComboMap([self, val]) elif isinstance(val, np.ndarray): if not self.shape[1] == "*" and not self.shape[1] == val.shape[0]: raise ValueError( "Dimension mismatch in {0!s} and np.ndarray{1!s}.".format( str(self), str(val.shape) ) ) return self._transform(val) elif isinstance(val, Zero): return Zero() raise Exception( "Unrecognized data type to multiply. Try a map or a numpy.ndarray!" "You used a {} of type {}".format(val, type(val)) ) def dot(self, val): return self.__mul__(val) def __matmul__(self, val): return self.__mul__(val) __numpy_ufunc__ = True def __add__(self, map2): return SumMap([self, map2]) def __str__(self): return "{0!s}({1!s},{2!s})".format( self.__class__.__name__, self.shape[0], self.shape[1] ) def __len__(self): return 1 class ComboMap(IdentityMap): def __init__(self, maps, **kwargs): IdentityMap.__init__(self, None, **kwargs) self.maps = [] for ii, m in enumerate(maps): assert isinstance(m, IdentityMap), "Unrecognized data type, " if ( ii > 0 and not (self.shape[1] == "*" or m.shape[0] == "*") and not self.shape[1] == m.shape[0] ): prev = self.maps[-1] raise ValueError( "Dimension mismatch in map[{0!s}] ({1!s}, {2!s}) " "and map[{3!s}] ({4!s}, {5!s}).".format( prev.__class__.__name__, prev.shape[0], prev.shape[1], m.__class__.__name__, m.shape[0], m.shape[1], ) ) if np.any([isinstance(m, SumMap), isinstance(m, IdentityMap)]): self.maps += [m] elif isinstance(m, ComboMap): self.maps += m.maps else: raise ValueError("Map[{0!s}] not supported", m.__class__.__name__) @property def shape(self): return (self.maps[0].shape[0], self.maps[-1].shape[1]) @property def nP(self): return self.maps[-1].nP def _transform(self, m): for map_i in reversed(self.maps): m = map_i * m return m def deriv(self, m, v=None): if v is not None: deriv = v else: deriv = 1 mi = m for map_i in reversed(self.maps): deriv = map_i.deriv(mi) * deriv mi = map_i * mi return deriv def __str__(self): return "ComboMap[{0!s}]({1!s},{2!s})".format( " * ".join([m.__str__() for m in self.maps]), self.shape[0], self.shape[1] ) def __len__(self): return len(self.maps) class Projection(IdentityMap): def __init__(self, nP, index, **kwargs): assert isinstance( index, (np.ndarray, slice, list) ), "index must be a np.ndarray or slice, not {}".format(type(index)) super(Projection, self).__init__(nP=nP, **kwargs) if isinstance(index, slice): index = list(range(*index.indices(self.nP))) if isinstance(index, np.ndarray): if index.dtype is np.dtype("bool"): index = np.where(index)[0] self.index = index self._shape = nI, nP = len(self.index), self.nP assert max(index) < nP, "maximum index must be less than {}".format(nP) self.P = sp.csr_matrix((np.ones(nI), (range(nI), self.index)), shape=(nI, nP)) def _transform(self, m): return m[self.index] @property def shape(self): return self._shape def deriv(self, m, v=None): if v is not None: return self.P * v return self.P class SumMap(ComboMap): def __init__(self, maps, **kwargs): IdentityMap.__init__(self, None, **kwargs) self.maps = [] for ii, m in enumerate(maps): if not isinstance(m, IdentityMap): raise TypeError( "Unrecognized data type {}, inherit from an " "IdentityMap!".format(type(m)) ) if ( ii > 0 and not (self.shape == "*" or m.shape == "*") and not self.shape == m.shape ): raise ValueError( "Dimension mismatch in map[{0!s}] ({1!s}, {2!s}) " "and map[{3!s}] ({4!s}, {5!s}).".format( self.maps[0].__class__.__name__, self.maps[0].shape[0], self.maps[0].shape[1], m.__class__.__name__, m.shape[0], m.shape[1], ) ) self.maps += [m] @property def shape(self): return (self.maps[0].shape[0], self.maps[0].shape[1]) @property def nP(self): return self.maps[-1].shape[1] def _transform(self, m): for ii, map_i in enumerate(self.maps): m0 = m.copy() m0 = map_i * m0 if ii == 0: mout = m0 else: mout += m0 return mout def deriv(self, m, v=None): for ii, map_i in enumerate(self.maps): m0 = m.copy() if v is not None: deriv = v else: deriv = sp.eye(self.nP) deriv = map_i.deriv(m0, v=deriv) if ii == 0: sumDeriv = deriv else: sumDeriv += deriv return sumDeriv class SurjectUnits(IdentityMap): indices = properties.List( "list of indices for each unit to be surjected into", properties.Array( "indices for the unit to be mapped to", dtype=bool, shape=("*",) ), required=True, ) def __init__(self, indices, **kwargs): super(SurjectUnits, self).__init__(**kwargs) self.indices = indices @property def P(self): if getattr(self, "_P", None) is None: row = [] col = [] val = [] for ii, ind in enumerate(self.indices): col += [ii] * ind.sum() row += np.where(ind)[0].tolist() val += [1] * ind.sum() self._P = sp.csr_matrix( (val, (row, col)), shape=(len(self.indices[0]), self.nP) ) return self._P def _transform(self, m): return self.P * m @property def nP(self): return len(self.indices) @property def shape(self): return (len(self.indices[0]), self.nP) def deriv(self, m, v=None): if v is not None: return self.P * v return self.P class SphericalSystem(IdentityMap): def __init__(self, mesh=None, nP=None, **kwargs): super().__init__(mesh, nP, **kwargs) self.model = None def sphericalDeriv(self, model): if getattr(self, "model", None) is None: self.model = model if getattr(self, "_sphericalDeriv", None) is None or not all( self.model == model ): self.model = model m_xyz = mat_utils.spherical2cartesian(model.reshape((-1, 3), order="F")) m_atp = mat_utils.cartesian2spherical( m_xyz.reshape((-1, 3), order="F") ).reshape((-1, 3), order="F") nC = m_atp[:, 0].shape[0] dm_dx = sp.hstack( [ sp.diags(np.cos(m_atp[:, 1]) * np.cos(m_atp[:, 2]), 0), sp.diags( -m_atp[:, 0] * np.sin(m_atp[:, 1]) * np.cos(m_atp[:, 2]), 0 ), sp.diags( -m_atp[:, 0] * np.cos(m_atp[:, 1]) * np.sin(m_atp[:, 2]), 0 ), ] ) dm_dy = sp.hstack( [ sp.diags(np.cos(m_atp[:, 1]) * np.sin(m_atp[:, 2]), 0), sp.diags( -m_atp[:, 0] * np.sin(m_atp[:, 1]) * np.sin(m_atp[:, 2]), 0 ), sp.diags( m_atp[:, 0] * np.cos(m_atp[:, 1]) * np.cos(m_atp[:, 2]), 0 ), ] ) dm_dz = sp.hstack( [ sp.diags(np.sin(m_atp[:, 1]), 0), sp.diags(m_atp[:, 0] * np.cos(m_atp[:, 1]), 0), csr((nC, nC)), ] ) self._sphericalDeriv = sp.vstack([dm_dx, dm_dy, dm_dz]) return self._sphericalDeriv def _transform(self, model): return mat_utils.spherical2cartesian(model.reshape((-1, 3), order="F"))
MIT License
pythonpredictions/cobra
cobra/evaluation/evaluator.py
ClassificationEvaluator.plot_lift_curve
python
def plot_lift_curve(self, path: str=None, dim: tuple=(12, 8)): if self.lift_curve is None: msg = ("This {} instance is not fitted yet. Call 'fit' with " "appropriate arguments before using this method.") raise NotFittedError(msg.format(self.__class__.__name__)) x_labels, lifts, _ = self.lift_curve with plt.style.context("seaborn-ticks"): fig, ax = plt.subplots(figsize=dim) plt.bar(x_labels[::-1], lifts, align="center", color="cornflowerblue") plt.ylabel("lift", fontsize=16) plt.xlabel("decile", fontsize=16) ax.set_xticks(x_labels) ax.set_xticklabels(x_labels) plt.axhline(y=1, color="darkorange", linestyle="--", xmin=0.05, xmax=0.95, linewidth=3, label="Baseline") ax.legend(loc="upper right") sns.despine(ax=ax, right=True, left=True) ax.grid(False) ax.set_title("Cumulative Lift curve", fontsize=20) if path is not None: plt.savefig(path, format="png", dpi=300, bbox_inches="tight") plt.show()
Plot lift per decile. Parameters ---------- path : str, optional Path to store the figure. dim : tuple, optional Tuple with width and length of the plot.
https://github.com/pythonpredictions/cobra/blob/a44a6934885482558b5edf6352ab5c3d75d785fe/cobra/evaluation/evaluator.py#L284-L331
import numpy as np import pandas as pd import matplotlib.pyplot as plt import matplotlib.ticker as mticker import seaborn as sns from numpy import sqrt from scipy.stats import norm from sklearn.metrics import precision_score from sklearn.metrics import recall_score from sklearn.metrics import f1_score from sklearn.metrics import accuracy_score from sklearn.metrics import roc_curve from sklearn.metrics import confusion_matrix from sklearn.metrics import roc_auc_score from sklearn.metrics import matthews_corrcoef from sklearn.exceptions import NotFittedError from sklearn.metrics import mean_absolute_error from sklearn.metrics import mean_squared_error from sklearn.metrics import r2_score class ClassificationEvaluator(): def __init__(self, probability_cutoff: float=None, lift_at: float=0.05, n_bins: int = 10): self.y_true = None self.y_pred = None self.lift_at = lift_at self.probability_cutoff = probability_cutoff self.n_bins = n_bins self.scalar_metrics = None self.roc_curve = None self.confusion_matrix = None self.lift_curve = None self.cumulative_gains = None def fit(self, y_true: np.ndarray, y_pred: np.ndarray): fpr, tpr, thresholds = roc_curve(y_true=y_true, y_score=y_pred) if not self.probability_cutoff: self.probability_cutoff = (ClassificationEvaluator. _compute_optimal_cutoff(fpr, tpr, thresholds)) y_pred_b = np.array([0 if pred <= self.probability_cutoff else 1 for pred in y_pred]) self.scalar_metrics = ClassificationEvaluator._compute_scalar_metrics( y_true, y_pred, y_pred_b, self.lift_at ) self.y_true = y_true self.y_pred = y_pred self.roc_curve = {"fpr": fpr, "tpr": tpr, "thresholds": thresholds} self.confusion_matrix = confusion_matrix(y_true, y_pred_b) self.lift_curve = ClassificationEvaluator._compute_lift_per_bin(y_true, y_pred, self.n_bins) self.cumulative_gains = ClassificationEvaluator._compute_cumulative_gains(y_true, y_pred) @staticmethod def _compute_scalar_metrics(y_true: np.ndarray, y_pred: np.ndarray, y_pred_b: np.ndarray, lift_at: float) -> pd.Series: return pd.Series({ "accuracy": accuracy_score(y_true, y_pred_b), "AUC": roc_auc_score(y_true, y_pred), "precision": precision_score(y_true, y_pred_b), "recall": recall_score(y_true, y_pred_b), "F1": f1_score(y_true, y_pred_b, average=None)[1], "matthews_corrcoef": matthews_corrcoef(y_true, y_pred_b), "lift at {}".format(lift_at): np.round(ClassificationEvaluator ._compute_lift(y_true=y_true, y_pred=y_pred, lift_at=lift_at), 2) }) def plot_roc_curve(self, path: str=None, dim: tuple=(12, 8)): if self.roc_curve is None: msg = ("This {} instance is not fitted yet. Call 'fit' with " "appropriate arguments before using this method.") raise NotFittedError(msg.format(self.__class__.__name__)) auc = float(self.scalar_metrics.loc["AUC"]) with plt.style.context("seaborn-whitegrid"): fig, ax = plt.subplots(figsize=dim) ax.plot(self.roc_curve["fpr"], self.roc_curve["tpr"], color="cornflowerblue", linewidth=3, label="ROC curve (area = {s:.3})".format(s=auc)) ax.plot([0, 1], [0, 1], color="darkorange", linewidth=3, linestyle="--") ax.set_xlabel("False Positive Rate", fontsize=15) ax.set_ylabel("True Positive Rate", fontsize=15) ax.legend(loc="lower right") ax.set_title("ROC curve", fontsize=20) if path: plt.savefig(path, format="png", dpi=300, bbox_inches="tight") plt.show() def plot_confusion_matrix(self, path: str=None, dim: tuple=(12, 8), labels: list=["0", "1"]): if self.confusion_matrix is None: msg = ("This {} instance is not fitted yet. Call 'fit' with " "appropriate arguments before using this method.") raise NotFittedError(msg.format(self.__class__.__name__)) fig, ax = plt.subplots(figsize=dim) ax = sns.heatmap(self.confusion_matrix, annot=self.confusion_matrix.astype(str), fmt="s", cmap="Blues", xticklabels=labels, yticklabels=labels) ax.set_title("Confusion matrix", fontsize=20) if path: plt.savefig(path, format="png", dpi=300, bbox_inches="tight") plt.show() def plot_cumulative_response_curve(self, path: str=None, dim: tuple=(12, 8)): if self.lift_curve is None: msg = ("This {} instance is not fitted yet. Call 'fit' with " "appropriate arguments before using this method.") raise NotFittedError(msg.format(self.__class__.__name__)) x_labels, lifts, inc_rate = self.lift_curve lifts = np.array(lifts)*inc_rate*100 with plt.style.context("seaborn-ticks"): fig, ax = plt.subplots(figsize=dim) plt.bar(x_labels[::-1], lifts, align="center", color="cornflowerblue") plt.ylabel("response (%)", fontsize=16) plt.xlabel("decile", fontsize=16) ax.set_xticks(x_labels) ax.set_xticklabels(x_labels) plt.axhline(y=inc_rate*100, color="darkorange", linestyle="--", xmin=0.05, xmax=0.95, linewidth=3, label="Incidence") ax.legend(loc="upper right") sns.despine(ax=ax, right=True, left=True) ax.grid(False) ax.set_title("Cumulative Response curve", fontsize=20) if path is not None: plt.savefig(path, format="png", dpi=300, bbox_inches="tight") plt.show()
MIT License
mkwiatkowski/pythoscope
lib2to3/pgen2/parse.py
Parser.setup
python
def setup(self, start=None): if start is None: start = self.grammar.start newnode = (start, None, None, []) stackentry = (self.grammar.dfas[start], 0, newnode) self.stack = [stackentry] self.rootnode = None self.used_names = set()
Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol.
https://github.com/mkwiatkowski/pythoscope/blob/b4b89b77b5184b25992893e320d58de32ed987f1/lib2to3/pgen2/parse.py#L107-L129
try: set except NameError: from sets import Set as set import token class ParseError(Exception): def __init__(self, msg, type, value, context): Exception.__init__(self, "%s: type=%r, value=%r, context=%r" % (msg, type, value, context)) self.msg = msg self.type = type self.value = value self.context = context def __reduce__(self): return (ParseError, (self.msg, self.type, self.value, self.context)) class Parser(object): def __init__(self, grammar, convert=None): self.grammar = grammar self.convert = convert or (lambda grammar, node: node)
MIT License
avigad/boole
boole/elaboration/terms.py
print_box
python
def print_box(expr): if not conf.implicit: box_str = "cast({0!s},{1!s},{2!s})" .format(expr.conv, expr.expr, expr.type) else: box_str = color.purple + "cast" + color.reset + "({0!s}, {1!s})".format(expr.expr, expr.type) return box_str
Arguments: - `expr`:
https://github.com/avigad/boole/blob/2a436c2967dbc968f6a5877c220b9757c3bc17c3/boole/elaboration/terms.py#L159-L171
from boole.core.info import * from boole.core.context import * from boole.core.expr import Const, Sub, Pair, Fst, Snd, Box, root_app, root_clause, root_pi, subst_expr from boole.elaboration.color import * import boole.core.expr as e import boole.core.typing as typing import elab as elab_tools from boole.elaboration.elab import app_expr, mvar_infer, sub_mvar import boole.core.tactics as tac import unif as u import boole.semantics.value as v from boole.semantics.value import Value import config as conf from config import current_ctxt, push_ctxt import boole.user_conf as user_conf class TermError(Exception): def __init__(self, mess): Exception.__init__(self, mess) def print_const(expr): if not (expr.info.unicode is None) and conf.print_unicode: return expr.info.unicode elif not (expr.info.sage_name is None) and user_conf.in_sage: return expr.info.sage_name else: return expr.name def print_app(expr): if conf.implicit: root, args = root_app_implicit(expr) if root.is_const() and root.info.print_iterable_app: return print_iterable_app(expr, root) elif root.is_const() and root.info.print_implies: return print_implies(expr) elif root.info.infix and len(args) == 2: return "({0!s} {1!s} {2!s})".format(args[0], root, args[1]) else: args_str = map(str, args) args_str = ", ".join(args_str) return "{0!s}({1!s})".format(root, args_str) else: root, args = root_app(expr) args_str = map(str, args) args_str = ", ".join(args_str) return "{0!s}({1!s})".format(root, args_str) def print_iterable_app(expr, op): args = dest_binop_left(expr, op) args_str = map(str, args) if op.info.infix: return '(' + (' ' + str(op) + ' ').join(args_str) + ')' else: return "{0!s}({1!s})".format(op, ', '.join(args_str)) def print_implies(expr): hyps, conc = dest_implies(expr) if len(hyps) == 1: return "{0!s}({1!s}, {2!s})".format(implies, hyps[0], conc) else: hyp_str = ", ".join(map(str, hyps)) return "{0!s}([{1!s}], {2!s})".format(implies, hyp_str, conc) def print_pair(expr): if not conf.implicit: pair_str = "pair({0!s}, {1!s}, {2!s})" .format(expr.fst, expr.snd, expr.type) else: pair_str = "pair({0!s}, {1!s})".format(expr.fst, expr.snd) return pair_str def print_fst(expr): return color.cyan + "fst" + color.reset + "({0!s})".format(expr.expr) def print_snd(expr): return color.cyan + "snd" + color.reset + "({0!s})".format(expr.expr)
Apache License 2.0
jaantollander/crowddynamics
crowddynamics/core/sampling.py
random_sample_triangle
python
def random_sample_triangle(a, b, c): r1 = np.random.random() r2 = np.random.random() return (1 - np.sqrt(r1)) * a + np.sqrt(r1) * (1 - r2) * b + r2 * np.sqrt(r1) * c
r""" Generate uniform random sample from inside of a triangle defined by points :math:`a`, :math:`b` and :math:`c`. [1]_ .. math:: P = (1 - \sqrt{R_1}) \mathbf{a} + (\sqrt{R_1} (1 - R_2)) \mathbf{b} + (R_2 \sqrt{R_1}) \mathbf{c}, where uniformly distributed random variables are .. math:: R_1, R_2 \sim \mathcal{U}(0, 1) Does not work for triangles that have area of zero. Args: a (numpy.ndarray): Vertex of the triangle b (numpy.ndarray): Vertex of the triangle c (numpy.ndarray): Vertex of the triangle Returns: numpy.ndarray: Uniformly distributed random point P References: .. [1] http://math.stackexchange.com/questions/18686/uniform-random-point-in-triangle
https://github.com/jaantollander/crowddynamics/blob/a5858c02c06ed72f49b7bd6aaabd7cf16b3054c3/crowddynamics/core/sampling.py#L80-L112
import numba import numpy as np from numba import f8 from scipy.spatial.qhull import Delaunay from crowddynamics.core.geom2D import polygon_area @numba.jit(f8[:](f8[:, :]), nopython=True, nogil=True, cache=True) def linestring_length_cumsum(vertices): n = len(vertices) - 1 cumsum = np.zeros(n) for i in range(n): diff = vertices[i] - vertices[i + 1] cumsum[i] = np.hypot(diff[0], diff[1]) return cumsum @numba.jit(f8[:](f8[:], f8[:]), nopython=True, nogil=True, cache=True) def random_sample_line(p0, p1): x = np.random.random() return x * (p1 - p0) + p0 def linestring_sample(vertices): assert len(vertices.shape) == 2 assert vertices.shape[1] == 2 weights = linestring_length_cumsum(vertices) weights /= weights[-1] while True: x = np.random.random() i = np.searchsorted(weights, x) yield random_sample_line(vertices[i], vertices[i+1]) @numba.jit([f8[:](f8[:, :, :])], nopython=True, nogil=True, cache=True) def triangle_area_cumsum(trimesh): area = 0.0 rows = trimesh.shape[0] cumsum = np.zeros(rows) for i in range(rows): area += polygon_area(trimesh[i, :, :]) cumsum[i] = area return cumsum @numba.jit(f8[:](f8[:], f8[:], f8[:]), nopython=True, nogil=True, cache=True)
MIT License
mishal23/virtual-clinic
server/views.py
parse_session
python
def parse_session(request, template_data=None): server_ip1 = socket.gethostbyname(socket.getfqdn()) client_ip = get_client_ip(request) if template_data is None: template_data = {} if request.session.has_key('alert_success'): template_data['alert_success'] = request.session.get('alert_success') del request.session['alert_success'] if request.session.has_key('alert_danger'): template_data['alert_danger'] = request.session.get('alert_danger') del request.session['alert_danger'] template_data['server_ip1'] = server_ip1 template_data['client_ip'] = client_ip return template_data
Checks the session for any alert data. If there is alert data, it added to the given template data. :param request: The request to check session data for :param template_data: The dictionary to update :return: The updated dictionary
https://github.com/mishal23/virtual-clinic/blob/3601c9a481dfa79b0253feaa71a19b29ef3dd5f5/server/views.py#L80-L100
from django.http import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist import datetime from server.models import Account, Profile, Action, MedicalInfo, Prescription from server import logger from easy_pdf.views import PDFTemplateView from server.utils import render_to_pdf from django.views.generic import View from django.template.loader import get_template import socket class GeneratePdf(View): def get(self, request, *args, **kwargs): authentication_result = authentication_check( request, [Account.ACCOUNT_DOCTOR, Account.ACCOUNT_PATIENT] ) if authentication_result is not None: return authentication_result template = get_template('pdf.html') pk = request.GET['pk'] print(pk) prescription = Prescription.objects.get(pk=pk) print(prescription) data = { 'today': datetime.date.today(), 'date': prescription.date, 'patient_name': prescription.patient, 'doctor_name': prescription.doctor, 'medication': prescription.medication, 'instruction': prescription.instruction, 'strength': prescription.strength, 'order_id': prescription.pk } pdf = render_to_pdf('pdf.html', data) return HttpResponse(pdf, content_type='application/pdf') def authentication_check(request, required_roles=None, required_GET=None): if not request.user.is_authenticated: request.session['alert_danger'] = "You must be logged into VirtualClinic to view that page." return HttpResponseRedirect('/') try: request.user.account except ObjectDoesNotExist: request.session['alert_danger'] = "Your account was not properly created, please try a different account." return HttpResponseRedirect('/logout/') if required_roles and request.user.account.role not in required_roles: request.session['alert_danger'] = "You don't have permission to view that page." return HttpResponseRedirect('/error/denied/') if required_GET: for key in required_GET: if key not in request.GET: request.session['alert_danger'] = "Looks like you tried to use a malformed URL" return HttpResponseRedirect('/error/denied/') def get_client_ip(request): x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') return ip
MIT License
qiskit/qiskit-aqua
qiskit/chemistry/transformations/fermionic_transformation.py
FermionicTransformation.interpret
python
def interpret(self, raw_result: Union[EigenstateResult, EigensolverResult, MinimumEigensolverResult]) -> ElectronicStructureResult: eigenstate_result = None if isinstance(raw_result, EigenstateResult): eigenstate_result = raw_result elif isinstance(raw_result, EigensolverResult): eigenstate_result = EigenstateResult() eigenstate_result.raw_result = raw_result eigenstate_result.eigenenergies = raw_result.eigenvalues eigenstate_result.eigenstates = raw_result.eigenstates eigenstate_result.aux_operator_eigenvalues = raw_result.aux_operator_eigenvalues elif isinstance(raw_result, MinimumEigensolverResult): eigenstate_result = EigenstateResult() eigenstate_result.raw_result = raw_result eigenstate_result.eigenenergies = np.asarray([raw_result.eigenvalue]) eigenstate_result.eigenstates = [raw_result.eigenstate] eigenstate_result.aux_operator_eigenvalues = [raw_result.aux_operator_eigenvalues] result = ElectronicStructureResult(eigenstate_result.data) result.computed_energies = np.asarray([e.real for e in eigenstate_result.eigenenergies]) result.hartree_fock_energy = self._hf_energy result.nuclear_repulsion_energy = self._nuclear_repulsion_energy if self._nuclear_dipole_moment is not None: result.nuclear_dipole_moment = tuple(x for x in self._nuclear_dipole_moment) result.ph_extracted_energy = self._ph_energy_shift result.frozen_extracted_energy = self._energy_shift if result.aux_operator_eigenvalues is not None: result.num_particles = [] result.total_angular_momentum = [] result.magnetization = [] result.computed_dipole_moment = [] result.ph_extracted_dipole_moment = [] result.frozen_extracted_dipole_moment = [] if not isinstance(result.aux_operator_eigenvalues, list): aux_operator_eigenvalues = [result.aux_operator_eigenvalues] else: aux_operator_eigenvalues = result.aux_operator_eigenvalues for aux_op_eigenvalues in aux_operator_eigenvalues: if aux_op_eigenvalues is None: continue if aux_op_eigenvalues[0] is not None: result.num_particles.append(aux_op_eigenvalues[0][0].real) if aux_op_eigenvalues[1] is not None: result.total_angular_momentum.append(aux_op_eigenvalues[1][0].real) if aux_op_eigenvalues[2] is not None: result.magnetization.append(aux_op_eigenvalues[2][0].real) if len(aux_op_eigenvalues) >= 6 and self._has_dipole_moments: dipole_moment = [] for moment in aux_op_eigenvalues[3:6]: if moment is not None: dipole_moment += [moment[0].real] else: dipole_moment += [None] result.reverse_dipole_sign = self._reverse_dipole_sign result.computed_dipole_moment.append(cast(DipoleTuple, tuple(dipole_moment))) result.ph_extracted_dipole_moment.append( (self._ph_x_dipole_shift, self._ph_y_dipole_shift, self._ph_z_dipole_shift)) result.frozen_extracted_dipole_moment.append( (self._x_dipole_shift, self._y_dipole_shift, self._z_dipole_shift)) return result
Interprets an EigenstateResult in the context of this transformation. Args: raw_result: an eigenstate result object. Returns: An electronic structure result.
https://github.com/qiskit/qiskit-aqua/blob/5ccf0e20129880e78a57f2f78c59b9a362ebb208/qiskit/chemistry/transformations/fermionic_transformation.py#L522-L602
from functools import partial from typing import Optional, List, Union, cast, Tuple, Dict, Any, Callable import logging from enum import Enum import numpy as np from qiskit.tools import parallel_map from qiskit.aqua import AquaError, aqua_globals from qiskit.aqua.operators import Z2Symmetries, WeightedPauliOperator, OperatorBase from qiskit.aqua.algorithms import EigensolverResult, MinimumEigensolverResult from qiskit.chemistry import QiskitChemistryError, QMolecule from qiskit.chemistry.fermionic_operator import FermionicOperator from qiskit.chemistry.drivers import BaseDriver from qiskit.chemistry.results import DipoleTuple, EigenstateResult, ElectronicStructureResult from qiskit.chemistry.components.variational_forms import UCCSD from .transformation import Transformation logger = logging.getLogger(__name__) class FermionicTransformationType(Enum): FULL = 'full' PARTICLE_HOLE = 'particle_hole' class FermionicQubitMappingType(Enum): JORDAN_WIGNER = 'jordan_wigner' PARITY = 'parity' BRAVYI_KITAEV = 'bravyi_kitaev' class FermionicTransformation(Transformation): def __init__(self, transformation: FermionicTransformationType = FermionicTransformationType.FULL, qubit_mapping: FermionicQubitMappingType = FermionicQubitMappingType.PARITY, two_qubit_reduction: bool = True, freeze_core: bool = False, orbital_reduction: Optional[List[int]] = None, z2symmetry_reduction: Optional[Union[str, List[int]]] = None) -> None: orbital_reduction = orbital_reduction if orbital_reduction is not None else [] super().__init__() self._transformation = transformation.value self._qubit_mapping = qubit_mapping.value self._two_qubit_reduction = two_qubit_reduction self._freeze_core = freeze_core self._orbital_reduction = orbital_reduction if z2symmetry_reduction is not None: if isinstance(z2symmetry_reduction, str): if z2symmetry_reduction != 'auto': raise QiskitChemistryError('Invalid z2symmetry_reduction value') self._z2symmetry_reduction = z2symmetry_reduction self._has_dipole_moments = False self._untapered_qubit_op = None self._hf_energy = None self._nuclear_repulsion_energy = None self._nuclear_dipole_moment = None self._reverse_dipole_sign = None self._energy_shift = 0.0 self._x_dipole_shift = 0.0 self._y_dipole_shift = 0.0 self._z_dipole_shift = 0.0 self._ph_energy_shift = 0.0 self._ph_x_dipole_shift = 0.0 self._ph_y_dipole_shift = 0.0 self._ph_z_dipole_shift = 0.0 self._molecule_info: Dict[str, Any] = {} @property def commutation_rule(self) -> bool: return False @property def molecule_info(self) -> Dict[str, Any]: return self._molecule_info @property def qubit_mapping(self) -> str: return self._qubit_mapping def transform(self, driver: BaseDriver, aux_operators: Optional[List[FermionicOperator]] = None ) -> Tuple[OperatorBase, List[OperatorBase]]: q_molecule = driver.run() ops, aux_ops = self._do_transform(q_molecule, aux_operators) ops = ops.to_opflow() if isinstance(ops, WeightedPauliOperator) else ops aux_ops = [a.to_opflow() if isinstance(a, WeightedPauliOperator) else a for a in aux_ops] return ops, aux_ops def _do_transform(self, qmolecule: QMolecule, aux_operators: Optional[List[FermionicOperator]] = None ) -> Tuple[WeightedPauliOperator, List[WeightedPauliOperator]]: logger.debug('Processing started...') self._hf_energy = qmolecule.hf_energy self._nuclear_repulsion_energy = qmolecule.nuclear_repulsion_energy self._nuclear_dipole_moment = qmolecule.nuclear_dipole_moment self._reverse_dipole_sign = qmolecule.reverse_dipole_sign core_list = qmolecule.core_orbitals if self._freeze_core else [] reduce_list = self._orbital_reduction if self._freeze_core: logger.info("Freeze_core specified. Core orbitals to be frozen: %s", core_list) if reduce_list: logger.info("Configured orbital reduction list: %s", reduce_list) reduce_list = [x + qmolecule.num_orbitals if x < 0 else x for x in reduce_list] freeze_list = [] remove_list = [] orb_list = list(set(core_list + reduce_list)) num_alpha = qmolecule.num_alpha num_beta = qmolecule.num_beta new_num_alpha = num_alpha new_num_beta = num_beta if orb_list: orbitals_list = np.array(orb_list) orbitals_list = orbitals_list[(cast(np.ndarray, orbitals_list) >= 0) & (orbitals_list < qmolecule.num_orbitals)] freeze_list_alpha = [i for i in orbitals_list if i < num_alpha] freeze_list_beta = [i for i in orbitals_list if i < num_beta] freeze_list = np.append(freeze_list_alpha, [i + qmolecule.num_orbitals for i in freeze_list_beta]) remove_list_alpha = [i for i in orbitals_list if i >= num_alpha] remove_list_beta = [i for i in orbitals_list if i >= num_beta] rla_adjust = -len(freeze_list_alpha) rlb_adjust = -len(freeze_list_alpha) - len(freeze_list_beta) + qmolecule.num_orbitals remove_list = np.append([i + rla_adjust for i in remove_list_alpha], [i + rlb_adjust for i in remove_list_beta]) logger.info("Combined orbital reduction list: %s", orbitals_list) logger.info(" converting to spin orbital reduction list: %s", np.append(np.array(orbitals_list), np.array(orbitals_list) + qmolecule.num_orbitals)) logger.info(" => freezing spin orbitals: %s", freeze_list) logger.info(" => removing spin orbitals: %s (indexes accounting for freeze %s)", np.append(remove_list_alpha, np.array(remove_list_beta) + qmolecule.num_orbitals), remove_list) new_num_alpha -= len(freeze_list_alpha) new_num_beta -= len(freeze_list_beta) new_nel = [new_num_alpha, new_num_beta] fer_op = FermionicOperator(h1=qmolecule.one_body_integrals, h2=qmolecule.two_body_integrals) fer_op, self._energy_shift, did_shift = FermionicTransformation._try_reduce_fermionic_operator(fer_op, freeze_list, remove_list) if aux_operators is not None: aux_operators = [ FermionicTransformation._try_reduce_fermionic_operator( op, freeze_list, remove_list)[0] for op in aux_operators ] if did_shift: logger.info("Frozen orbital energy shift: %s", self._energy_shift) if self._transformation == FermionicTransformationType.PARTICLE_HOLE.value: fer_op, ph_shift = fer_op.particle_hole_transformation(new_nel) self._ph_energy_shift = -ph_shift logger.info("Particle hole energy shift: %s", self._ph_energy_shift) if aux_operators is not None: aux_operators = [ op.particle_hole_transformation(new_nel)[0] for op in aux_operators ] logger.debug('Converting to qubit using %s mapping', self._qubit_mapping) qubit_op = FermionicTransformation._map_fermionic_operator_to_qubit( fer_op, self._qubit_mapping, new_nel, self._two_qubit_reduction ) qubit_op.name = 'Fermionic Operator' logger.debug(' num paulis: %s, num qubits: %s', len(qubit_op.paulis), qubit_op.num_qubits) aux_ops = [] def _add_aux_op(aux_op: FermionicOperator, name: str) -> None: aux_qop = FermionicTransformation._map_fermionic_operator_to_qubit( aux_op, self._qubit_mapping, new_nel, self._two_qubit_reduction ) aux_qop.name = name aux_ops.append(aux_qop) logger.debug(' num paulis: %s', aux_qop.paulis) logger.debug('Creating aux op for Number of Particles') _add_aux_op(fer_op.total_particle_number(), 'Number of Particles') logger.debug('Creating aux op for S^2') _add_aux_op(fer_op.total_angular_momentum(), 'S^2') logger.debug('Creating aux op for Magnetization') _add_aux_op(fer_op.total_magnetization(), 'Magnetization') if qmolecule.has_dipole_integrals(): self._has_dipole_moments = True def _dipole_op(dipole_integrals: np.ndarray, axis: str) -> Tuple[WeightedPauliOperator, float, float]: logger.debug('Creating aux op for dipole %s', axis) fer_op_ = FermionicOperator(h1=dipole_integrals) fer_op_, shift, did_shift_ = self._try_reduce_fermionic_operator(fer_op_, freeze_list, remove_list) if did_shift_: logger.info("Frozen orbital %s dipole shift: %s", axis, shift) ph_shift_ = 0.0 if self._transformation == FermionicTransformationType.PARTICLE_HOLE.value: fer_op_, ph_shift_ = fer_op_.particle_hole_transformation(new_nel) ph_shift_ = -ph_shift_ logger.info("Particle hole %s dipole shift: %s", axis, ph_shift_) qubit_op_ = self._map_fermionic_operator_to_qubit(fer_op_, self._qubit_mapping, new_nel, self._two_qubit_reduction) qubit_op_.name = 'Dipole ' + axis logger.debug(' num paulis: %s', len(qubit_op_.paulis)) return qubit_op_, shift, ph_shift_ op_dipole_x, self._x_dipole_shift, self._ph_x_dipole_shift = _dipole_op(qmolecule.x_dipole_integrals, 'x') op_dipole_y, self._y_dipole_shift, self._ph_y_dipole_shift = _dipole_op(qmolecule.y_dipole_integrals, 'y') op_dipole_z, self._z_dipole_shift, self._ph_z_dipole_shift = _dipole_op(qmolecule.z_dipole_integrals, 'z') aux_ops += [op_dipole_x, op_dipole_y, op_dipole_z] if aux_operators is not None: for aux_op in aux_operators: if hasattr(aux_op, 'name'): name = aux_op.name else: name = '' _add_aux_op(aux_op, name) logger.info('Molecule num electrons: %s, remaining for processing: %s', [num_alpha, num_beta], new_nel) nspinorbs = qmolecule.num_orbitals * 2 new_nspinorbs = nspinorbs - len(freeze_list) - len(remove_list) logger.info('Molecule num spin orbitals: %s, remaining for processing: %s', nspinorbs, new_nspinorbs) self._molecule_info['num_particles'] = (new_num_alpha, new_num_beta) self._molecule_info['num_orbitals'] = new_nspinorbs reduction = self._two_qubit_reduction if self._qubit_mapping == 'parity' else False self._molecule_info['two_qubit_reduction'] = reduction self._untapered_qubit_op = qubit_op z2symmetries = Z2Symmetries([], [], [], None) if self._z2symmetry_reduction is not None: logger.debug('Processing z2 symmetries') qubit_op, aux_ops, z2symmetries = self._process_z2symmetry_reduction(qubit_op, aux_ops) self._molecule_info['z2_symmetries'] = z2symmetries logger.debug('Processing complete ready to run algorithm') return qubit_op, aux_ops @property def untapered_qubit_op(self): return self._untapered_qubit_op def _process_z2symmetry_reduction(self, qubit_op: WeightedPauliOperator, aux_ops: List[WeightedPauliOperator]) -> Tuple: z2_symmetries = Z2Symmetries.find_Z2_symmetries(qubit_op) if z2_symmetries.is_empty(): logger.debug('No Z2 symmetries found') z2_qubit_op = qubit_op z2_aux_ops = aux_ops z2_symmetries = Z2Symmetries([], [], [], None) else: logger.debug('%s Z2 symmetries found: %s', len(z2_symmetries.symmetries), ','.join([symm.to_label() for symm in z2_symmetries.symmetries])) logger.debug('Checking operators commute with symmetry:') symmetry_ops = [] for symmetry in z2_symmetries.symmetries: symmetry_ops.append(WeightedPauliOperator(paulis=[[1.0, symmetry]])) commutes = FermionicTransformation._check_commutes(symmetry_ops, qubit_op) if not commutes: raise QiskitChemistryError('Z2 symmetry failure main operator must commute ' 'with symmetries found from it') for i, aux_op in enumerate(aux_ops): commutes = FermionicTransformation._check_commutes(symmetry_ops, aux_op) if not commutes: aux_ops[i] = None if self._z2symmetry_reduction == 'auto': from ..circuit.library.initial_states.hartree_fock import hartree_fock_bitstring hf_bitstr = hartree_fock_bitstring( num_orbitals=self._molecule_info['num_orbitals'], qubit_mapping=self._qubit_mapping, two_qubit_reduction=self._two_qubit_reduction, num_particles=self._molecule_info['num_particles'] ) z2_symmetries = FermionicTransformation._pick_sector(z2_symmetries, hf_bitstr) else: if len(self._z2symmetry_reduction) != len(z2_symmetries.symmetries): raise QiskitChemistryError('z2symmetry_reduction tapering values list has ' 'invalid length {} should be {}'. format(len(self._z2symmetry_reduction), len(z2_symmetries.symmetries))) valid = np.all(np.isin(self._z2symmetry_reduction, [-1, 1])) if not valid: raise QiskitChemistryError('z2symmetry_reduction tapering values list must ' 'contain -1\'s and/or 1\'s only was {}'. format(self._z2symmetry_reduction,)) z2_symmetries.tapering_values = self._z2symmetry_reduction logger.debug('Apply symmetry with tapering values %s', z2_symmetries.tapering_values) chop_to = 0.00000001 z2_qubit_op = z2_symmetries.taper(qubit_op).chop(chop_to) z2_aux_ops = [] for aux_op in aux_ops: if aux_op is None: z2_aux_ops += [None] else: z2_aux_ops += [z2_symmetries.taper(aux_op).chop(chop_to)] return z2_qubit_op, z2_aux_ops, z2_symmetries @staticmethod def _check_commutes(cliffords: List[WeightedPauliOperator], operator: WeightedPauliOperator) -> bool: commutes = [] for clifford in cliffords: commutes.append(operator.commute_with(clifford)) does_commute = np.all(commutes) logger.debug(' \'%s\' commutes: %s, %s', operator.name, does_commute, commutes) return cast(bool, does_commute) def get_default_filter_criterion(self) -> Optional[Callable[[Union[List, np.ndarray], float, Optional[List[float]]], bool]]: def filter_criterion(self, eigenstate, eigenvalue, aux_values): num_particles_aux = aux_values[0][0] total_angular_momentum_aux = aux_values[1][0] return np.isclose(sum(self.molecule_info['num_particles']), num_particles_aux) and np.isclose(0., total_angular_momentum_aux) return partial(filter_criterion, self) @staticmethod def _pick_sector(z2_symmetries: Z2Symmetries, hf_str: List[bool]) -> Z2Symmetries: taper_coef = [] for sym in z2_symmetries.symmetries: coef = -1 if np.logical_xor.reduce(np.logical_and(sym.z[::-1], hf_str)) else 1 taper_coef.append(coef) z2_symmetries.tapering_values = taper_coef return z2_symmetries
Apache License 2.0
jpoppe/seedbank
seedbank/iso.py
Build.prepare
python
def prepare(self): utils.rmtree(self.work_path) utils.make_dirs(os.path.join(self.work_iso, 'seedbank/etc/runonce.d')) utils.make_dirs(self.work_initrd) utils.run('bsdtar -C "%s" -xf "%s"' % (self.work_iso, self.iso_file)) utils.run('chmod -R u+w "%s"' % self.work_iso)
remove temporary files, create the directory structure
https://github.com/jpoppe/seedbank/blob/0ec9cf48422a9dd058c6bfa5dc16a3db835c1997/seedbank/iso.py#L47-L53
__author__ = 'Jasper Poppe <jgpoppe@gmail.com>' __copyright__ = 'Copyright (c) 2009-2015 Jasper Poppe' __credits__ = '' __license__ = 'Apache License, Version 2.0' __version__ = '2.0.0rc7' __maintainer__ = 'Jasper Poppe' __email__ = 'jgpoppe@gmail.com' __status__ = 'production' import os import sys import manage import utils class Build: def __init__(self, cfg, iso_file, fqdn, dst): self.cfg = cfg work_path = os.path.join(cfg['paths']['temp'], 'seedbank', fqdn, 'iso') self.work_path = work_path self.work_initrd = os.path.join(work_path, 'initrd') self.work_iso = os.path.join(work_path, 'iso') self.iso_file = iso_file self.iso_dst = dst self.data = cfg['iso'] self.data['architecture'] = None
Apache License 2.0
amz-driverless/rbb_core
rbb_client/src/rbb_client/models/tag.py
Tag.tag
python
def tag(self, tag): self._tag = tag
Sets the tag of this Tag. :param tag: The tag of this Tag. :type: str
https://github.com/amz-driverless/rbb_core/blob/618617270314af5335de30179072244e1f440c4c/rbb_client/src/rbb_client/models/tag.py#L64-L72
from pprint import pformat from six import iteritems class Tag(object): def __init__(self): self.swagger_types = { 'tag': 'str', 'color': 'str' } self.attribute_map = { 'tag': 'tag', 'color': 'color' } self._tag = None self._color = None @property def tag(self): return self._tag @tag.setter
MIT License
pyglet/pyglet
tools/wraptypes/cparser.py
apply_specifiers
python
def apply_specifiers(specifiers, declaration): for s in specifiers: if type(s) == StorageClassSpecifier: if declaration.storage: p.parser.cparser.handle_error( 'Declaration has more than one storage class', '???', p.lineno(1)) return declaration.storage = s elif type(s) in (TypeSpecifier, StructTypeSpecifier, EnumSpecifier): declaration.type.specifiers.append(s) elif type(s) == TypeQualifier: declaration.type.qualifiers.append(s)
Apply specifiers to the declaration (declaration may be a Parameter instead).
https://github.com/pyglet/pyglet/blob/b9a63ea179735c8f252ac31d51751bdf8a741c9d/tools/wraptypes/cparser.py#L210-L224
from __future__ import print_function __docformat__ = 'restructuredtext' __version__ = '$Id$' import cPickle import operator import os.path import re import sys import time import warnings import preprocessor import yacc tokens = ( 'PP_IF', 'PP_IFDEF', 'PP_IFNDEF', 'PP_ELIF', 'PP_ELSE', 'PP_ENDIF', 'PP_INCLUDE', 'PP_DEFINE', 'PP_DEFINE_CONSTANT', 'PP_UNDEF', 'PP_LINE', 'PP_ERROR', 'PP_PRAGMA', 'IDENTIFIER', 'CONSTANT', 'CHARACTER_CONSTANT', 'STRING_LITERAL', 'SIZEOF', 'PTR_OP', 'INC_OP', 'DEC_OP', 'LEFT_OP', 'RIGHT_OP', 'LE_OP', 'GE_OP', 'EQ_OP', 'NE_OP', 'AND_OP', 'OR_OP', 'MUL_ASSIGN', 'DIV_ASSIGN', 'MOD_ASSIGN', 'ADD_ASSIGN', 'SUB_ASSIGN', 'LEFT_ASSIGN', 'RIGHT_ASSIGN', 'AND_ASSIGN', 'XOR_ASSIGN', 'OR_ASSIGN', 'HASH_HASH', 'PERIOD', 'TYPE_NAME', 'TYPEDEF', 'EXTERN', 'STATIC', 'AUTO', 'REGISTER', 'CHAR', 'SHORT', 'INT', 'LONG', 'SIGNED', 'UNSIGNED', 'FLOAT', 'DOUBLE', 'CONST', 'VOLATILE', 'VOID', 'STRUCT', 'UNION', 'ENUM', 'ELLIPSIS', 'CASE', 'DEFAULT', 'IF', 'ELSE', 'SWITCH', 'WHILE', 'DO', 'FOR', 'GOTO', 'CONTINUE', 'BREAK', 'RETURN', '__ASM__' ) keywords = [ 'auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do', 'double', 'else', 'enum', 'extern', 'float', 'for', 'goto', 'if', 'int', 'long', 'register', 'return', 'short', 'signed', 'sizeof', 'static', 'struct', 'switch', 'typedef', 'union', 'unsigned', 'void', 'volatile', 'while', '__asm__' ] class Declaration(object): def __init__(self): self.declarator = None self.type = Type() self.storage = None def __repr__(self): d = { 'declarator': self.declarator, 'type': self.type, } if self.storage: d['storage'] = self.storage l = ['%s=%r' % (k, v) for k, v in d.items()] return 'Declaration(%s)' % ', '.join(l) class Declarator(object): pointer = None def __init__(self): self.identifier = None self.initializer = None self.array = None self.parameters = None pointer = property(lambda self: None) def __repr__(self): s = self.identifier or '' if self.array: s += repr(self.array) if self.initializer: s += ' = %r' % self.initializer if self.parameters is not None: s += '(' + ', '.join([repr(p) for p in self.parameters]) + ')' return s class Pointer(Declarator): pointer = None def __init__(self): super(Pointer, self).__init__() self.qualifiers = [] def __repr__(self): q = '' if self.qualifiers: q = '<%s>' % ' '.join(self.qualifiers) return 'POINTER%s(%r)' % (q, self.pointer) + super(Pointer, self).__repr__() class Array(object): def __init__(self): self.size = None self.array = None def __repr__(self): if self.size: a = '[%r]' % self.size else: a = '[]' if self.array: return repr(self.array) + a else: return a class Parameter(object): def __init__(self): self.type = Type() self.storage = None self.declarator = None def __repr__(self): d = { 'type': self.type, } if self.declarator: d['declarator'] = self.declarator if self.storage: d['storage'] = self.storage l = ['%s=%r' % (k, v) for k, v in d.items()] return 'Parameter(%s)' % ', '.join(l) class Type(object): def __init__(self): self.qualifiers = [] self.specifiers = [] def __repr__(self): return ' '.join(self.qualifiers + [str(s) for s in self.specifiers]) class StorageClassSpecifier(str): pass class TypeSpecifier(str): pass class StructTypeSpecifier(object): def __init__(self, is_union, tag, declarations): self.is_union = is_union self.tag = tag self.declarations = declarations def __repr__(self): if self.is_union: s = 'union' else: s = 'struct' if self.tag: s += ' %s' % self.tag if self.declarations: s += ' {%s}' % '; '.join([repr(d) for d in self.declarations]) return s class EnumSpecifier(object): def __init__(self, tag, enumerators): self.tag = tag self.enumerators = enumerators def __repr__(self): s = 'enum' if self.tag: s += ' %s' % self.tag if self.enumerators: s += ' {%s}' % ', '.join([repr(e) for e in self.enumerators]) return s class Enumerator(object): def __init__(self, name, expression): self.name = name self.expression = expression def __repr__(self): s = self.name if self.expression: s += ' = %r' % self.expression return s class TypeQualifier(str): pass
BSD 3-Clause New or Revised License
jahjajaka/afternoon_cleaner
slim/nets/nasnet/pnasnet.py
build_pnasnet_mobile
python
def build_pnasnet_mobile(images, num_classes, is_training=True, final_endpoint=None, config=None): hparams = copy.deepcopy(config) if config else mobile_imagenet_config() nasnet._update_hparams(hparams, is_training) if tf.test.is_gpu_available() and hparams.data_format == 'NHWC': tf.logging.info('A GPU is available on the machine, consider using NCHW ' 'data format for increased speed on GPU.') if hparams.data_format == 'NCHW': images = tf.transpose(images, [0, 3, 1, 2]) total_num_cells = hparams.num_cells + 2 normal_cell = PNasNetNormalCell(hparams.num_conv_filters, hparams.drop_path_keep_prob, total_num_cells, hparams.total_training_steps, hparams.use_bounded_activation) with arg_scope( [slim.dropout, nasnet_utils.drop_path, slim.batch_norm], is_training=is_training): with arg_scope( [ slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm, slim.separable_conv2d, nasnet_utils.factorized_reduction, nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index, nasnet_utils.get_channel_dim ], data_format=hparams.data_format): return _build_pnasnet_base( images, normal_cell=normal_cell, num_classes=num_classes, hparams=hparams, is_training=is_training, final_endpoint=final_endpoint)
Build PNASNet Mobile model for the ImageNet Dataset.
https://github.com/jahjajaka/afternoon_cleaner/blob/590bdf58a216cbc6cfc47ef8f49d7af3df3703b7/slim/nets/nasnet/pnasnet.py#L212-L257
from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import tensorflow as tf from nets.nasnet import nasnet from nets.nasnet import nasnet_utils arg_scope = tf.contrib.framework.arg_scope slim = tf.contrib.slim def large_imagenet_config(): return tf.contrib.training.HParams( stem_multiplier=3.0, dense_dropout_keep_prob=0.5, num_cells=12, filter_scaling_rate=2.0, num_conv_filters=216, drop_path_keep_prob=0.6, use_aux_head=1, num_reduction_layers=2, data_format='NHWC', skip_reduction_layer_input=1, total_training_steps=250000, use_bounded_activation=False, ) def mobile_imagenet_config(): return tf.contrib.training.HParams( stem_multiplier=1.0, dense_dropout_keep_prob=0.5, num_cells=9, filter_scaling_rate=2.0, num_conv_filters=54, drop_path_keep_prob=1.0, use_aux_head=1, num_reduction_layers=2, data_format='NHWC', skip_reduction_layer_input=1, total_training_steps=250000, use_bounded_activation=False, ) def pnasnet_large_arg_scope(weight_decay=4e-5, batch_norm_decay=0.9997, batch_norm_epsilon=0.001): return nasnet.nasnet_large_arg_scope( weight_decay, batch_norm_decay, batch_norm_epsilon) def pnasnet_mobile_arg_scope(weight_decay=4e-5, batch_norm_decay=0.9997, batch_norm_epsilon=0.001): return nasnet.nasnet_mobile_arg_scope(weight_decay, batch_norm_decay, batch_norm_epsilon) def _build_pnasnet_base(images, normal_cell, num_classes, hparams, is_training, final_endpoint=None): end_points = {} def add_and_check_endpoint(endpoint_name, net): end_points[endpoint_name] = net return final_endpoint and (endpoint_name == final_endpoint) reduction_indices = nasnet_utils.calc_reduction_layers( hparams.num_cells, hparams.num_reduction_layers) stem = lambda: nasnet._imagenet_stem(images, hparams, normal_cell) net, cell_outputs = stem() if add_and_check_endpoint('Stem', net): return net, end_points aux_head_cell_idxes = [] if len(reduction_indices) >= 2: aux_head_cell_idxes.append(reduction_indices[1] - 1) filter_scaling = 1.0 true_cell_num = 2 activation_fn = tf.nn.relu6 if hparams.use_bounded_activation else tf.nn.relu for cell_num in range(hparams.num_cells): is_reduction = cell_num in reduction_indices stride = 2 if is_reduction else 1 if is_reduction: filter_scaling *= hparams.filter_scaling_rate if hparams.skip_reduction_layer_input or not is_reduction: prev_layer = cell_outputs[-2] net = normal_cell( net, scope='cell_{}'.format(cell_num), filter_scaling=filter_scaling, stride=stride, prev_layer=prev_layer, cell_num=true_cell_num) if add_and_check_endpoint('Cell_{}'.format(cell_num), net): return net, end_points true_cell_num += 1 cell_outputs.append(net) if (hparams.use_aux_head and cell_num in aux_head_cell_idxes and num_classes and is_training): aux_net = activation_fn(net) nasnet._build_aux_head(aux_net, end_points, num_classes, hparams, scope='aux_{}'.format(cell_num)) with tf.variable_scope('final_layer'): net = activation_fn(net) net = nasnet_utils.global_avg_pool(net) if add_and_check_endpoint('global_pool', net) or not num_classes: return net, end_points net = slim.dropout(net, hparams.dense_dropout_keep_prob, scope='dropout') logits = slim.fully_connected(net, num_classes) if add_and_check_endpoint('Logits', logits): return net, end_points predictions = tf.nn.softmax(logits, name='predictions') if add_and_check_endpoint('Predictions', predictions): return net, end_points return logits, end_points def build_pnasnet_large(images, num_classes, is_training=True, final_endpoint=None, config=None): hparams = copy.deepcopy(config) if config else large_imagenet_config() nasnet._update_hparams(hparams, is_training) if tf.test.is_gpu_available() and hparams.data_format == 'NHWC': tf.logging.info('A GPU is available on the machine, consider using NCHW ' 'data format for increased speed on GPU.') if hparams.data_format == 'NCHW': images = tf.transpose(images, [0, 3, 1, 2]) total_num_cells = hparams.num_cells + 2 normal_cell = PNasNetNormalCell(hparams.num_conv_filters, hparams.drop_path_keep_prob, total_num_cells, hparams.total_training_steps, hparams.use_bounded_activation) with arg_scope( [slim.dropout, nasnet_utils.drop_path, slim.batch_norm], is_training=is_training): with arg_scope([slim.avg_pool2d, slim.max_pool2d, slim.conv2d, slim.batch_norm, slim.separable_conv2d, nasnet_utils.factorized_reduction, nasnet_utils.global_avg_pool, nasnet_utils.get_channel_index, nasnet_utils.get_channel_dim], data_format=hparams.data_format): return _build_pnasnet_base( images, normal_cell=normal_cell, num_classes=num_classes, hparams=hparams, is_training=is_training, final_endpoint=final_endpoint) build_pnasnet_large.default_image_size = 331
MIT License
cartodb/bigmetadata
tasks/base_tasks.py
TableTask.version
python
def version(self): return 0
Must return a version control number, which is useful for forcing a re-run/overwrite without having to track down and delete output artifacts.
https://github.com/cartodb/bigmetadata/blob/a32325382500f23b8a607e4e02cc0ec111360869/tasks/base_tasks.py#L809-L815
import os import re import json import time import requests import subprocess import importlib import inspect import zipfile import gzip import csv import uuid import urllib.request from urllib.parse import quote_plus from collections import OrderedDict from datetime import date from luigi import (Task, Parameter, LocalTarget, BoolParameter, IntParameter, ListParameter, DateParameter, WrapperTask, Event, ExternalTask) from luigi.contrib.s3 import S3Target from sqlalchemy.dialects.postgresql import JSON from google.cloud import storage from lib.util import digest_file from lib.logger import get_logger from tasks.meta import (OBSColumn, OBSTable, metadata, current_session, session_commit, session_rollback, GEOM_REF) from tasks.targets import (ColumnTarget, TagTarget, CartoDBTarget, PostgresTarget, TableTarget, RepoTarget, URLTarget) from tasks.util import (classpath, query_cartodb, sql_to_cartodb_table, underscore_slugify, shell, create_temp_schema, unqualified_task_id, generate_tile_summary, uncompress_file, copyfile) from tasks.simplification import SIMPLIFIED_SUFFIX from tasks.simplify import Simplify LOGGER = get_logger(__name__) MAX_PG_IDENTIFIER_LENGTH = 63 class ColumnsTask(Task): def columns(self): raise NotImplementedError('Must return iterable of OBSColumns') def on_failure(self, ex): session_rollback(self, ex) super(ColumnsTask, self).on_failure(ex) def on_success(self): session_commit(self) def run(self): for _, coltarget in self.output().items(): coltarget.update_or_create() def version(self): return 0 def prefix(self): if self.task_namespace and self.task_namespace != '__not_user_specified': splitted_namespace = self.task_namespace.split('.') if splitted_namespace[0] == 'tasks': splitted_namespace.pop(0) return '.'.join(splitted_namespace) else: return self.task_namespace else: return classpath(self) def output(self): session = current_session() if hasattr(self, '_colids') and self.complete(): return OrderedDict([ (colkey, ColumnTarget(session.query(OBSColumn).get(cid), self)) for colkey, cid in self.colids.items() ]) already_in_session = [obj for obj in session] output = OrderedDict() input_ = self.input() for col_key, col in self.columns().items(): if not isinstance(col, OBSColumn): raise RuntimeError( 'Values in `.columns()` must be of type OBSColumn, but ' '"{col}" is type {type}'.format(col=col_key, type=type(col))) if not col.version: col.version = self.version() col.id = '.'.join([self.prefix(), col.id or col_key]) tags = self.tags(input_, col_key, col) if isinstance(tags, TagTarget): col.tags.append(tags) else: col.tags.extend(tags) output[col_key] = ColumnTarget(col, self) now_in_session = [obj for obj in session] for obj in now_in_session: if obj not in already_in_session: if obj in session: session.expunge(obj) return output @property def colids(self): if not hasattr(self, '_colids'): self._colids = OrderedDict([ (colkey, ct._id) for colkey, ct in self.output().items() ]) return self._colids def complete(self): deps = self.deps() if deps and not all([d.complete() for d in deps]): return False else: cnt = current_session().execute( ''' SELECT COUNT(*) FROM observatory.obs_column WHERE id IN ('{ids}') AND version = '{version}' '''.format( ids="', '".join(list(self.colids.values())), version=self.version() )).fetchone()[0] return cnt == len(list(self.colids.values())) def tags(self, input_, col_key, col): return [] class TagsTask(Task): def tags(self): raise NotImplementedError('Must return iterable of OBSTags') def on_failure(self, ex): session_rollback(self, ex) super(TagsTask, self).on_failure(ex) def on_success(self): session_commit(self) def run(self): for _, tagtarget in self.output().items(): tagtarget.update_or_create() def version(self): return 0 def output(self): output = {} for tag in self.tags(): orig_id = tag.id tag.id = '.'.join([classpath(self), orig_id]) if not tag.version: tag.version = self.version() output[orig_id] = TagTarget(tag, self) return output class TableToCartoViaImportAPI(Task): force = BoolParameter(default=False, significant=False) schema = Parameter(default='observatory') username = Parameter(default=None, significant=False) api_key = Parameter(default=None, significant=False) outname = Parameter(default=None, significant=False) table = Parameter() columns = ListParameter(default=[]) def run(self): carto_url = 'https://{}.carto.com'.format(self.username) if self.username else os.environ['CARTODB_URL'] api_key = self.api_key if self.api_key else os.environ['CARTODB_API_KEY'] try: os.makedirs(os.path.join('tmp', classpath(self))) except OSError: pass outname = self.outname or self.table tmp_file_path = os.path.join('tmp', classpath(self), outname + '.csv') if not self.columns: shell(r'''psql -c '\copy "{schema}".{tablename} TO '"'"{tmp_file_path}"'"' WITH CSV HEADER' '''.format( schema=self.schema, tablename=self.table, tmp_file_path=tmp_file_path, )) else: shell(r'''psql -c '\copy (SELECT {columns} FROM "{schema}".{tablename}) TO '"'"{tmp_file_path}"'"' WITH CSV HEADER' '''.format( schema=self.schema, tablename=self.table, tmp_file_path=tmp_file_path, columns=', '.join(self.columns), )) curl_resp = shell( 'curl -s -F privacy=public -F type_guessing=false ' ' -F file=@{tmp_file_path} "{url}/api/v1/imports/?api_key={api_key}"'.format( tmp_file_path=tmp_file_path, url=carto_url, api_key=api_key )) try: import_id = json.loads(curl_resp)["item_queue_id"] except ValueError: raise Exception(curl_resp) while True: resp = requests.get('{url}/api/v1/imports/{import_id}?api_key={api_key}'.format( url=carto_url, import_id=import_id, api_key=api_key )) if resp.json()['state'] == 'complete': LOGGER.info("Waiting for import %s for %s", import_id, outname) break elif resp.json()['state'] == 'failure': raise Exception('Import failed: {}'.format(resp.json())) print(resp.json()['state']) time.sleep(1) if resp.json()['table_name'] != outname: query_cartodb('ALTER TABLE {oldname} RENAME TO {newname}'.format( oldname=resp.json()['table_name'], newname=outname, carto_url=carto_url, api_key=api_key, )) assert resp.status_code == 200 try: session = current_session() resp = session.execute( ''' SELECT att.attname, pg_catalog.format_type(atttypid, NULL) AS display_type, att.attndims FROM pg_attribute att JOIN pg_class tbl ON tbl.oid = att.attrelid JOIN pg_namespace ns ON tbl.relnamespace = ns.oid WHERE tbl.relname = '{tablename}' AND pg_catalog.format_type(atttypid, NULL) NOT IN ('character varying', 'text', 'user-defined', 'geometry') AND att.attname IN (SELECT column_name from information_schema.columns WHERE table_schema='{schema}' AND table_name='{tablename}') AND ns.nspname = '{schema}'; '''.format(schema=self.schema, tablename=self.table.lower())).fetchall() alter = ', '.join([ " ALTER COLUMN {colname} SET DATA TYPE {data_type} " " USING NULLIF({colname}, '')::{data_type}".format( colname=colname, data_type=data_type ) for colname, data_type, _ in resp]) if alter: alter_stmt = 'ALTER TABLE {tablename} {alter}'.format( tablename=outname, alter=alter) LOGGER.info(alter_stmt) resp = query_cartodb(alter_stmt, api_key=api_key, carto_url=carto_url) if resp.status_code != 200: raise Exception('could not alter columns for "{tablename}":' '{err}'.format(tablename=outname, err=resp.text)) except Exception as err: self.output().remove(carto_url=carto_url, api_key=api_key) raise err def output(self): carto_url = 'https://{}.carto.com'.format(self.username) if self.username else os.environ['CARTODB_URL'] api_key = self.api_key if self.api_key else os.environ['CARTODB_API_KEY'] target = CartoDBTarget(self.outname or self.table, api_key=api_key, carto_url=carto_url) if self.force: target.remove(carto_url=carto_url, api_key=api_key) self.force = False return target class TableToCarto(Task): force = BoolParameter(default=False, significant=False) schema = Parameter(default='observatory') table = Parameter() outname = Parameter(default=None) def run(self): json_colnames = [] table = '.'.join([self.schema, self.table]) if table in metadata.tables: cols = metadata.tables[table].columns for colname, coldef in list(cols.items()): coltype = coldef.type if isinstance(coltype, JSON): json_colnames.append(colname) sql_to_cartodb_table(self.output().tablename, self.table, json_colnames, schema=self.schema) self.force = False def output(self): if self.schema != 'observatory': table = '.'.join([self.schema, self.table]) else: table = self.table if self.outname is None: self.outname = underscore_slugify(table) target = CartoDBTarget(self.outname) if self.force and target.exists(): target.remove() self.force = False return target class RepoFileUncompressTask(Task): def version(self): return 1 def get_url(self): raise NotImplementedError('RepoFileUncompressTask must define get_url()') def _copy_from_repo(self, repo_file): copyfile(repo_file.path, '{output}.{extension}'.format(output=self.output().path, extension=self.compressed_extension)) def run(self): repo_file = yield RepoFile(resource_id=self.task_id, version=self.version(), url=self.get_url()) os.makedirs(self.output().path) try: self._copy_from_repo(repo_file) self.uncompress() except: os.rmdir(self.output().path) raise def uncompress(self): raise NotImplementedError('RepoFileUncompressTask must define uncompress()') def output(self): return LocalTarget(os.path.join('tmp', classpath(self), self.task_id)) class RepoFileUnzipTask(RepoFileUncompressTask): compressed_extension = 'zip' def uncompress(self): uncompress_file(self.output().path) class RepoFileGUnzipTask(RepoFileUncompressTask): compressed_extension = 'gz' file_extension = Parameter(default='csv') def uncompress(self): gunzip = gzip.GzipFile('{output}.{extension}'.format(output=self.output().path, extension=self.compressed_extension), 'rb') with open(os.path.join(self.output().path, '{filename}.{extension}'.format( filename=self.task_id, extension=self.file_extension)), 'wb') as outfile: outfile.write(gunzip.read()) class TempTableTask(Task): force = BoolParameter(default=False, significant=False) def on_failure(self, ex): session_rollback(self, ex) super(TempTableTask, self).on_failure(ex) def on_success(self): session_commit(self) def target_tablename(self): return unqualified_task_id(self.task_id)[:MAX_PG_IDENTIFIER_LENGTH] def run(self): raise Exception('Must override `run`') def output(self): return PostgresTarget(classpath(self), self.target_tablename()) @TempTableTask.event_handler(Event.START) def clear_temp_table(task): create_temp_schema(task) target = task.output() if task.force or target.empty(): session = current_session() session.execute('DROP TABLE IF EXISTS "{schema}".{tablename}'.format( schema=classpath(task), tablename=unqualified_task_id(task.task_id))) session.flush() class SimplifiedTempTableTask(TempTableTask): def get_table_id(self): return '.'.join([self.input().schema, '_'.join(self.input().tablename.split('_')[:-1])]) def get_suffix(self): return SIMPLIFIED_SUFFIX def run(self): yield Simplify(schema=self.input().schema, table=self.input().tablename, table_id=self.get_table_id(), suffix=self.get_suffix()) def output(self): return PostgresTarget(self.input().schema, self.input().tablename + self.get_suffix()) class GdbFeatureClass2TempTableTask(TempTableTask): feature_class = Parameter() def input_gdb(self): raise NotImplementedError("Must define `input_gdb` method") def run(self): shell(''' PG_USE_COPY=yes ogr2ogr -f "PostgreSQL" PG:"dbname=$PGDATABASE \ active_schema={schema}" -t_srs "EPSG:4326" -nlt MultiPolygon \ -nln {tablename} {infile} '''.format(schema=self.output().schema, infile=self.input_gdb(), tablename=self.output().tablename)) class GeoFile2TempTableTask(TempTableTask): encoding = Parameter(default='latin1', significant=False) other_options = '' def input_files(self): raise NotImplementedError("Must specify `input_files` method") def run(self): if isinstance(self.input_files(), str): files = [self.input_files()] else: files = self.input_files() schema = self.output().schema tablename = self.output().tablename operation = '-overwrite -lco OVERWRITE=yes -lco SCHEMA={schema} -lco PRECISION=no'.format( schema=schema) for geofile in files: cmd = self.build_ogr_command(encoding='utf-8', schema=schema, tablename=tablename, input=geofile, operation=operation, other_options=self.other_options) output = shell(cmd) if self.utf8_error(output): cmd = self.build_ogr_command(encoding=self.encoding, schema=schema, tablename=tablename, input=geofile, operation=operation, other_options=self.other_options) shell(cmd) operation = '-append '.format(schema=schema) def utf8_error(self, cmd_output): regex = re.compile('invalid byte sequence for encoding \"UTF8\"', re.IGNORECASE) return re.search(regex, cmd_output) is not None def build_ogr_command(self, **args): return 'PG_USE_COPY=yes PGCLIENTENCODING=UTF8 ' 'ogr2ogr --config SHAPE_ENCODING {encoding} -f PostgreSQL PG:"dbname=$PGDATABASE ' 'active_schema={schema}" -t_srs "EPSG:4326" ' '{other_options} ' '-nlt MultiPolygon -nln {table} ' '{operation} \'{input}\' '.format(encoding=args['encoding'], schema=args['schema'], table=args['tablename'], input=args['input'], operation=args['operation'], other_options=args['other_options']) class CSV2TempTableTask(TempTableTask): delimiter = Parameter(default=',', significant=False) has_header = BoolParameter(default=True, significant=False) encoding = Parameter(default='utf8', significant=False) def input_csv(self): raise NotImplementedError("Must specify `input_csv` method") def coldef(self): if isinstance(self.input_csv(), str): csvfile = self.input_csv() else: raise NotImplementedError("Cannot automatically determine colnames " "if several input CSVs.") with open('{csv}'.format(csv=csvfile), 'r', encoding=self.encoding) as f: header_row = next(csv.reader(f, delimiter=self.delimiter)) return [(h, 'Text') for h in header_row] def read_method(self, fname): return 'cat "{input}"'.format(input=fname) def run(self): if isinstance(self.input_csv(), str): csvs = [self.input_csv()] else: csvs = self.input_csv() session = current_session() session.execute('CREATE TABLE {output} ({coldef})'.format( output=self.output().table, coldef=', '.join(['"{}" {}'.format(c[0], c[1]) for c in self.coldef()]) )) session.commit() options = [''' DELIMITER '"'{delimiter}'"' ENCODING '"'{encoding}'"' '''.format(delimiter=self.delimiter, encoding=self.encoding)] if self.has_header: options.append('CSV HEADER') try: for csvfile in csvs: shell(r'''{read_method} | psql -c '\copy {table} FROM STDIN {options}' '''.format( read_method=self.read_method(csvfile), table=self.output().table, options=' '.join(options) )) self.after_copy() except: session.rollback() session.execute('DROP TABLE IF EXISTS {output}'.format( output=self.output().table)) session.commit() raise def after_copy(self): pass class LoadPostgresFromURL(TempTableTask): def load_from_url(self, url): shell('curl {url} | gunzip -c | grep -v default_tablespace | psql'.format( url=url)) self.mark_done() def mark_done(self): session = current_session() session.execute('DROP TABLE IF EXISTS {table}'.format( table=self.output().table)) session.execute('CREATE TABLE {table} AS SELECT now() creation_time'.format( table=self.output().table)) class LoadPostgresFromZipFile(TempTableTask): def load_from_zipfile(self, zipfile): shell('gunzip {zipfile} -c | grep -v default_tablespace | psql'.format( zipfile=zipfile)) self.mark_done() def mark_done(self): session = current_session() session.execute('DROP TABLE IF EXISTS {table}'.format( table=self.output().table)) session.execute('CREATE TABLE {table} AS SELECT now() creation_time'.format( table=self.output().table)) class TableTask(Task): def _requires(self): reqs = super(TableTask, self)._requires() if self._testmode: return [r for r in reqs if isinstance(r, (TagsTask, TableTask, ColumnsTask))] else: return reqs
BSD 3-Clause New or Revised License
khazhyk/dango.py
dango/extensions.py
WatchdogExtensionLoader.watch_spec
python
def watch_spec(self, plugin_spec): module_parts = plugin_spec.split('.') exts_to_load = [] if module_parts[-1] == "*": plugin_spec = ".".join(module_parts[:-1]) if plugin_spec in sys.modules: log.warning("%s is already loaded by some outside source, we " "may not be able to unload it!", plugin_spec) self._register.set_reloadable(plugin_spec) mod = importlib.import_module(plugin_spec) if not mod.__spec__.submodule_search_locations: raise ValueError("There's no submodules to watch here...") watched_location = os.path.normpath(list(mod.__spec__.submodule_search_locations)[0]) def _module_name(src_path): assert src_path.startswith(watched_location) if "__pycache__" in src_path: return subpath = src_path[len(watched_location)+1:] subparts = subpath.split(os.sep) if not subparts or not subparts[0]: return modname, ext = os.path.splitext(subparts[0]) if ext == ".py": return ".".join([plugin_spec, modname]) if os.path.exists(os.path.join(watched_location, modname, '__init__.py')): return ".".join([plugin_spec, modname]) for item in os.listdir(watched_location): lib = _module_name(os.path.join(watched_location, item)) if lib: try: self._register.load_extension(lib) except config.InvalidConfig: log.error("Could not load %s due to invalid config!", lib) self._watches[watched_location] = ModuleDirWatchdog(self._register, lambda e: _module_name(e.src_path)) else: if plugin_spec in sys.modules: log.warning("%s is already loaded by some outside source, we " "may not be able to unload it!", plugin_spec) self._register.set_reloadable(plugin_spec) mod = importlib.import_module(plugin_spec) if mod.__spec__.submodule_search_locations: watched_location = mod.__spec__.submodule_search_locations[0] def module_name(event): return plugin_spec else: watched_file = mod.__spec__.origin def module_name(event): if event.src_path != watched_file: return return plugin_spec watched_location = os.path.split(mod.__spec__.origin)[0] self._watches[watched_location] = ModuleDirWatchdog(self._register, module_name) try: self._register.load_extension(plugin_spec) except config.InvalidConfig: log.error("Could not load %s due to invalid config!", plugin_spec)
Watch the plugin spec for changes. Arguments --------- plugin_spec: str "some.module" - Load this module, watch changes to it and below. "module.*" - Load all submodules in this module, using a directory listing. Will only go one level (e.g. will load_ext path.to.module but not path.to.module.submodule) Raises ------ ValueError if plugin_spec overlaps with existing spec.
https://github.com/khazhyk/dango.py/blob/1435beaa58b40aecd2d4381296a70da7946d1199/dango/extensions.py#L100-L185
import asyncio import collections import logging import os import time import types import threading import sys import importlib from discord.ext.commands import errors from watchdog import events from watchdog import observers from . import config log = logging.getLogger(__name__) def _is_submodule(parent, child): return parent == child or child.startswith(parent + ".") class ModuleDirWatchdog(events.FileSystemEventHandler): def __init__(self, register, module_lookup, loop=None): self._register = register self.module_lookup = module_lookup self.loop = loop or asyncio.get_event_loop() self.debounce = collections.defaultdict(int) super().__init__() def _call(self, coro): fut = asyncio.run_coroutine_threadsafe( coro, loop=self.loop) return fut.result() async def _unload(self, mod_to_unload): self._register.unload_extension(mod_to_unload) async def _try_reload(self, mod_to_reload): now = time.time() if self.debounce[mod_to_reload] + 2 > now: log.warning("Debouncing reloading of %s", mod_to_reload) return self.debounce[mod_to_reload] = now try: log.info("Reloading %s", mod_to_reload) self._register.reload_extension(mod_to_reload) except BaseException: log.exception("Failed to reload! %s", mod_to_reload) def on_created(self, event): mod_to_reload = self.module_lookup(event) if mod_to_reload: log.info("Detected creation of %s, loading...", event.src_path) self._call(self._try_reload(mod_to_reload)) def on_deleted(self, event): mod_to_reload = self.module_lookup(event) if mod_to_reload: log.info("Detected deletion of %s, unloading...", event.src_path) self._call(self._unload(mod_to_reload)) def on_modified(self, event): mod_to_reload = self.module_lookup(event) if mod_to_reload: log.info("Detected change to %s, reloading...", event.src_path) self._call(self._try_reload(mod_to_reload)) def on_moved(self, event): pass class WatchdogExtensionLoader: def __init__(self, bot): self.bot = bot self._register = ExtensionDependencyRegister(bot) self._watches = {} self._observer = None def start(self): self._observer = observers.Observer() for dir_, handler in self._watches.items(): self._observer.schedule(handler, dir_, recursive=True) self._observer.start() def close(self): if self._observer: self._observer.unschedule_all() self._observer = None
MIT License
pioneers/piecentral
runtime/runtime/ansible.py
TCPClass.receiver
python
def receiver(self, bad_things_queue, state_queue, _pipe): def unpackage(data): received_proto = notification_pb2.Notification() received_proto.ParseFromString(data) return received_proto try: while True: recv_data, _ = self.sock.recvfrom(2048) if not recv_data: bad_things_queue.put( BadThing( sys.exc_info(), "restarting Ansible Processes due to disconnection", event=BAD_EVENTS.DAWN_DISCONNECTED, printStackTrace=False)) break unpackaged_data = unpackage(recv_data) if unpackaged_data.header == notification_pb2.Notification.TIMESTAMP_DOWN: timestamps = list(unpackaged_data.timestamps) timestamps.append(time.perf_counter()) state_queue.put([HIBIKE_COMMANDS.TIMESTAMP_DOWN, timestamps]) continue if unpackaged_data.header == notification_pb2.Notification.STUDENT_SENT: state_queue.put([SM_COMMANDS.STUDENT_UPLOAD, []]) except ConnectionResetError: bad_things_queue.put( BadThing( sys.exc_info(), "restarting Ansible Processes due to disconnection", event=BAD_EVENTS.DAWN_DISCONNECTED, printStackTrace=False)) except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "TCP receiver crashed with error: " + str(e), event=BAD_EVENTS.TCP_ERROR, printStackTrace=True))
Function run in its own thread which receives data from Dawn The receiver can receive a command that Dawn is about to upload student Code. This message is passed along to SM to ensure all appropriate processes are kiled. The receiver detects disconnection from Dawn and restarts all Ansible processes by sending a BadThing to runtime.py
https://github.com/pioneers/piecentral/blob/d6437d4ddfa386f44e6f70391c5211c723dde56d/runtime/runtime/ansible.py#L457-L506
import asyncio import os import socket import threading import time import sys import selectors import csv import aio_msgpack_rpc as rpc from . import runtime_pb2 from . import ansible_pb2 from . import notification_pb2 from .util import * UDP_SEND_PORT = 1235 UDP_RECV_PORT = 1236 TCP_PORT = 1234 TCP_HZ = 5.0 PACKAGER_HZ = 5.0 SOCKET_HZ = 5.0 @unique class ThreadNames(Enum): UDP_PACKAGER = "udpPackager" UDP_SENDER = "udpSender" UDP_RECEIVER = "udpReceiver" UDP_UNPACKAGER = "udpUnpackager" TCP_PACKAGER = "tcpPackager" TCP_SENDER = "tcpSender" TCP_RECEIVER = "tcpReceiver" TCP_UNPACKAGER = "tcpUnpackager" class TwoBuffer: def __init__(self): self.data = [0, 0] self.put_index = 0 self.get_index = 1 def replace(self, item): self.data[self.put_index] = item self.put_index = 1 - self.put_index self.get_index = 1 - self.get_index def get(self): return self.data[self.get_index] class AnsibleHandler: def __init__(self, packagerName, packagerThread, socketName, socketThread, badThingsQueue, stateQueue, pipe): self.packager_fn = packagerThread self.socket_fn = socketThread self.bad_things_queue = badThingsQueue self.state_queue = stateQueue self.pipe = pipe self.packager_name = packagerName self.socket_name = socketName def thread_maker(self, thread_target, thread_name): thread = threading.Thread( target=thread_target, name=thread_name, args=( self, self.bad_things_queue, self.state_queue, self.pipe)) thread.daemon = True return thread def start(self): packager_thread = self.thread_maker(self.packager_fn, self.packager_name) socket_thread = self.thread_maker(self.socket_fn, self.socket_name) packager_thread.start() socket_thread.start() packager_thread.join() socket_thread.join() class UDPSendClass(AnsibleHandler): def __init__(self, badThingsQueue, stateQueue, pipe): self.send_buffer = TwoBuffer() packager_name = ThreadNames.UDP_PACKAGER sock_send_name = ThreadNames.UDP_SENDER stateQueue.put([SM_COMMANDS.SEND_ADDR, [PROCESS_NAMES.UDP_SEND_PROCESS]]) self.dawn_ip = pipe.recv()[0] super().__init__( packager_name, UDPSendClass.package_data, sock_send_name, UDPSendClass.udp_sender, badThingsQueue, stateQueue, pipe) def package_data(self, bad_things_queue, state_queue, pipe): def package(state): try: proto_message = runtime_pb2.RuntimeData() proto_message.robot_state = state['studentCodeState'][0] for uid, values in state['hibike'][0]['devices'][0].items(): sensor = proto_message.sensor_data.add() sensor.uid = str(uid) sensor.device_type = SENSOR_TYPE[uid >> 72] for param, value in values[0].items(): if value[0] is None: continue param_value_pair = sensor.param_value.add() param_value_pair.param = param if isinstance(value[0], bool): param_value_pair.bool_value = value[0] elif isinstance(value[0], float): param_value_pair.float_value = value[0] elif isinstance(value[0], int): param_value_pair.int_value = value[0] return proto_message.SerializeToString() except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "UDP packager thread has crashed with error:" + str(e), event=BAD_EVENTS.UDP_SEND_ERROR, printStackTrace=True)) while True: try: next_call = time.time() state_queue.put([SM_COMMANDS.SEND_ANSIBLE, []]) raw_state = pipe.recv() pack_state = package(raw_state) self.send_buffer.replace(pack_state) next_call += 1.0 / PACKAGER_HZ time.sleep(max(next_call - time.time(), 0)) except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "UDP packager thread has crashed with error:" + str(e), event=BAD_EVENTS.UDP_SEND_ERROR, printStackTrace=True)) def udp_sender(self, bad_things_queue, _state_queue, _pipe): with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: while True: try: next_call = time.time() msg = self.send_buffer.get() if msg != 0 and msg is not None and self.dawn_ip is not None: sock.sendto(msg, (self.dawn_ip, UDP_SEND_PORT)) next_call += 1.0 / SOCKET_HZ time.sleep(max(next_call - time.time(), 0)) except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "UDP sender thread has crashed with error: " + str(e), event=BAD_EVENTS.UDP_SEND_ERROR, printStackTrace=True)) class UDPRecvClass(AnsibleHandler): def __init__(self, badThingsQueue, stateQueue, pipe): self.recv_buffer = TwoBuffer() packager_name = ThreadNames.UDP_UNPACKAGER sock_recv_name = ThreadNames.UDP_RECEIVER host = "" self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind((host, UDP_RECV_PORT)) self.socket.setblocking(False) self.curr_addr = None self.control_state = None self.sm_mapping = { ansible_pb2.DawnData.IDLE: SM_COMMANDS.ENTER_IDLE, ansible_pb2.DawnData.TELEOP: SM_COMMANDS.ENTER_TELEOP, ansible_pb2.DawnData.AUTONOMOUS: SM_COMMANDS.ENTER_AUTO, ansible_pb2.DawnData.ESTOP: SM_COMMANDS.EMERGENCY_STOP } self.team_color_mapping = { ansible_pb2.DawnData.BLUE: "blue", ansible_pb2.DawnData.GOLD: "yellow", } super().__init__( packager_name, UDPRecvClass.unpackage_data, sock_recv_name, UDPRecvClass.udp_receiver, badThingsQueue, stateQueue, pipe) def udp_receiver(self): try: while True: recv_data, addr = self.socket.recvfrom(2048) except BlockingIOError: self.recv_buffer.replace(recv_data) if self.curr_addr is None: self.curr_addr = addr self.state_queue.put([SM_COMMANDS.SET_ADDR, [addr]]) def unpackage_data(self): def unpackage(data): unpackaged_data = {} received_proto = ansible_pb2.DawnData() received_proto.ParseFromString(data) new_state = received_proto.student_code_status unpackaged_data["student_code_status"] = [new_state, time.time()] if self.pipe.poll(): self.control_state = self.pipe.recv() if self.control_state is None or new_state != self.control_state: self.control_state = received_proto.student_code_status sm_state_command = self.sm_mapping[new_state] self.state_queue.put([sm_state_command, []]) all_gamepad_dict = {} for gamepad in received_proto.gamepads: gamepad_dict = {} gamepad_dict["axes"] = dict(enumerate(gamepad.axes)) gamepad_dict["buttons"] = dict(enumerate(gamepad.buttons)) all_gamepad_dict[gamepad.index] = gamepad_dict unpackaged_data["gamepads"] = [all_gamepad_dict, time.time()] if received_proto.team_color != ansible_pb2.DawnData.NONE: self.state_queue.put([SM_COMMANDS.SET_TEAM, [self.team_color_mapping[received_proto.team_color]]]) return unpackaged_data unpackaged_data = unpackage(self.recv_buffer.get()) self.state_queue.put([SM_COMMANDS.RECV_ANSIBLE, [unpackaged_data]]) def start(self): sel = selectors.DefaultSelector() sel.register(self.socket, selectors.EVENT_READ) try: while True: sel.select() self.udp_receiver() self.unpackage_data() except Exception as e: self.bad_things_queue.put( BadThing( sys.exc_info(), "UDP receiver thread has crashed with error: " + str(e), event=BAD_EVENTS.UDP_RECV_ERROR, printStackTrace=True)) class TCPClass(AnsibleHandler): def __init__(self, badThingsQueue, stateQueue, pipe): self.send_buffer = TwoBuffer() self.recv_buffer = TwoBuffer() send_name = ThreadNames.TCP_SENDER recv_name = ThreadNames.TCP_RECEIVER super().__init__( send_name, TCPClass.sender, recv_name, TCPClass.receiver, badThingsQueue, stateQueue, pipe) stateQueue.put([SM_COMMANDS.SEND_ADDR, [PROCESS_NAMES.TCP_PROCESS]]) self.dawn_ip = pipe.recv()[0] self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.connect((self.dawn_ip, TCP_PORT)) proto_message = notification_pb2.Notification() proto_message.header = notification_pb2.Notification.SENSOR_MAPPING peripherals_file = os.path.join(os.path.dirname(__file__), 'namedPeripherals.csv') with open(peripherals_file, 'r') as mapping_file: sensor_mappings = csv.reader(mapping_file) for row in sensor_mappings: pair = proto_message.sensor_mapping.add() pair.device_student_name = row[0] pair.device_uid = row[1] self.sock.sendall(proto_message.SerializeToString()) def sender(self, bad_things_queue, _state_queue, pipe): def package_message(data): try: proto_message = notification_pb2.Notification() proto_message.header = notification_pb2.Notification.CONSOLE_LOGGING proto_message.console_output = data return proto_message.SerializeToString() except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "TCP packager crashed with error: " + str(e), event=BAD_EVENTS.TCP_ERROR, printStackTrace=True)) def package_confirm(confirm): try: proto_message = notification_pb2.Notification() if confirm: proto_message.header = notification_pb2.Notification.STUDENT_RECEIVED else: proto_message.header = notification_pb2.Notification.STUDENT_NOT_RECEIVED return proto_message.SerializeToString() except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "TCP packager crashed with error: " + str(e), event=BAD_EVENTS.TCP_ERROR, printStackTrace=True)) def package_timestamp(timestamps): try: timestamp_message = notification_pb2.Notification() timestamp_message.header = notification_pb2.Notification.TIMESTAMP_UP timestamp_message.timestamps.extend(timestamps + [time.perf_counter()]) return timestamp_message.SerializeToString() except Exception as e: bad_things_queue.put( BadThing( sys.exc_info(), "TCP packager crashed with error: " + str(e), event=BAD_EVENTS.TCP_ERROR, printStackTrace=True)) while True: try: raw_message = pipe.recv() next_call = time.time() next_call += 1.0 / TCP_HZ data = raw_message[1] if raw_message[0] == ANSIBLE_COMMANDS.STUDENT_UPLOAD: packed_msg = package_confirm(data) elif raw_message[0] == ANSIBLE_COMMANDS.CONSOLE: packed_msg = package_message(data) elif raw_message[0] == ANSIBLE_COMMANDS.TIMESTAMP_UP: packed_msg = package_timestamp(data) else: continue if packed_msg is not None: self.sock.sendall(packed_msg) time.sleep(max(next_call - time.time(), 0)) except Exception as e: bad_things_queue.put(BadThing(sys.exc_info(), "TCP sender crashed with error: " + str(e), event=BAD_EVENTS.TCP_ERROR, printStackTrace=True))
Apache License 2.0
vincent-lg/tsunami
src/secondaires/navigation/__init__.py
Module.ajouter_fiche_matelot
python
def ajouter_fiche_matelot(self, fiche): self.fiches[fiche.cle] = fiche
Ajoute le matelot.
https://github.com/vincent-lg/tsunami/blob/36b3b974f6eefbf15cd5d5f099fc14630e66570b/src/secondaires/navigation/__init__.py#L584-L586
type(importeur).man_logs.creer_logger("navigation", "ordres", "ordres.log") type(importeur).man_logs.creer_logger("navigation", "monstres", "monstres.log") import os from random import randint from vector import * from abstraits.module import * from corps.fonctions import valider_cle from primaires.format.fonctions import format_nb from primaires.salle.chemin import Chemin from primaires.salle.salle import Salle from primaires.vehicule.vecteur import Vecteur from secondaires.navigation.config import CFG_TEXTE from .navire import Navire from .navire_automatique import NavireAutomatique from .elements import types as types_elements from .elements.base import BaseElement from .vent import Vent from .visible import Visible from . import cherchables from . import commandes from . import editeurs from . import masques from . import types from .modele import ModeleNavire from .constantes import * from .equipage.equipage import Equipage from .equipage.fiche import FicheMatelot from .monstre.prototype import PrototypeMonstreMarin, types_monstres from .chantier_naval import ChantierNaval from .navires_vente import NaviresVente from .matelots_vente import MatelotsVente from .repere import Repere from .trajet import Trajet from .prompt import PromptNavigation from .cale import TYPES as CALE_TYPES from .templates.tenir_gouvernail import TenirGouvernail from .templates.tenir_rames import TenirRames class Module(BaseModule): def __init__(self, importeur): BaseModule.__init__(self, importeur, "navigation", "secondaire") self.preparer_apres = ["salle"] self.commandes = [] self.cfg = None self.fichier_suivi = None self.modeles = {} self.nav_logger = type(self.importeur).man_logs.creer_logger( "navigation", "navires", "navires.log") self.navires = {} self.navires_automatiques = {} self.elements = {} self.types_elements = types_elements self.vents = {} self.vents_par_etendue = {} self.fiches = {} self.chantiers = {} self.trajets = {} self.reperes = {} self.matelots = {} self.types_monstres = types_monstres self.monstres = {} self.points_ovservables = { "cotes": Visible.trouver_cotes, "navires": Visible.trouver_navires, "reperes": Repere.trouver_reperes, } type(importeur).espace["navires"] = self.navires def config(self): self.cfg = type(self.importeur).anaconf.get_config("navigation", "navigation/navigation.cfg", "modele navigationt", CFG_TEXTE) self.fichier_suivi = self.cfg.fichier_suivi self.importeur.scripting.a_charger.append(self) his_voile = self.importeur.perso.ajouter_etat("hisser_voile") his_voile.msg_refus = "Vous êtes en train de hisser la voile" his_voile.msg_visible = "hisse une voile ici" his_voile.act_autorisees = ["regarder", "parler"] pli_voile = self.importeur.perso.ajouter_etat("plier_voile") pli_voile.msg_refus = "Vous êtes en train de replier la voile" pli_voile.msg_visible = "replie une voile ici" pli_voile.act_autorisees = ["regarder", "parler"] charger_canon = self.importeur.perso.ajouter_etat("charger_canon") charger_canon.msg_refus = "Vous êtes en train de charger le canon" charger_canon.msg_visible = "charge le canon ici" charger_canon.act_autorisees = ["parler"] self.importeur.perso.ajouter_etat("tenir_gouvernail", TenirGouvernail) u_loch = self.importeur.perso.ajouter_etat("utiliser_loch") u_loch.msg_refus = "Vous êtes en train de manipuler le loch" u_loch.msg_visible = "manipule le loch ici" u_loch.act_autorisees = ["regarder", "parler"] point = self.importeur.perso.ajouter_etat("faire_point") point.msg_refus = "Vous êtes en train de faire votre point" point.msg_visible = "fait le point ici" point.act_autorisees = ["parler"] ancre = self.importeur.perso.ajouter_etat("ancre") ancre.msg_refus = "Vous êtes occué avec l'ancre." ancre.msg_visible = "manipule l'ancre ici" ancre.act_autorisees = ["regarder", "parler"] self.importeur.perso.ajouter_etat("tenir_rames", TenirRames) importeur.perso.ajouter_niveau("navigation", "navigation") importeur.commerce.types_services["navire"] = NaviresVente() importeur.commerce.aides_types["navire"] = "Ce service permet la vente de navires. Vous devez tout " "simplement préciser la clé du modèle de navire. Attention " "cependant : pour que la vente de navires dans ce magasin " "puisse se faire, le magasin doit être relié à un chantier " "naval." importeur.commerce.types_services["matelot"] = MatelotsVente() importeur.commerce.aides_types["matelot"] = "Ce service permet la vente de matelots. Vous devez tout " "simplement préciser la clé du matelot à mettre en vente " "(sa clé de prototype de PNJ). La fiche du matelot " "correspondant à ce prototype doit avoir été définie au " "préalable." importeur.hook.ajouter_hook("navire:sombre", "Hook appelé quand un navire fait naufrage.") BaseModule.config(self) def init(self): self.importeur.scripting.valeurs["navire"] = self.navires importeur.perso.ajouter_prompt(PromptNavigation) self.importeur.hook["salle:regarder"].ajouter_evenement( self.navire_amarre) self.importeur.hook["salle:regarder"].ajouter_evenement( self.navire_accoste) self.importeur.hook["objet:peut_boire"].ajouter_evenement( Navire.peut_boire) self.importeur.interpreteur.categories["navire"] = "Commandes de navigation" self.importeur.hook["pnj:arrive"].ajouter_evenement( self.combat_matelot) self.importeur.hook["pnj:attaque"].ajouter_evenement( self.armer_matelot) self.importeur.hook["pnj:détruit"].ajouter_evenement( self.detruire_pnj) self.importeur.hook["pnj:meurt"].ajouter_evenement( self.meurt_PNJ) self.importeur.hook["pnj:nom"].ajouter_evenement( Equipage.get_nom_matelot) self.importeur.hook["salle:trouver_chemins_droits"].ajouter_evenement( self.trouver_chemins_droits) self.importeur.hook["stats:infos"].ajouter_evenement( self.stats_navigation) self.importeur.hook["personnage:deplacer"].ajouter_evenement( self.modifier_prompt) self.importeur.hook["objet:doit_garder"].ajouter_evenement( self.objets_en_cale) importeur.perso.ajouter_talent("calfeutrage", "calfeutrage", "navigation", 0.5) modeles = self.importeur.supenr.charger_groupe(ModeleNavire) for modele in modeles: self.modeles[modele.cle] = modele nb_modeles = len(modeles) self.nav_logger.info(format_nb(nb_modeles, "{nb} modèle{s} de navire récupéré{s}")) navires = self.importeur.supenr.charger_groupe(Navire) for navire in navires: self.ajouter_navire(navire) nb_navires = len(navires) self.nav_logger.info(format_nb(nb_navires, "{nb} navire{s} récupéré{s}")) fiches = self.importeur.supenr.charger_groupe(NavireAutomatique) for fiche in fiches: self.ajouter_navire_automatique(fiche) nb_autos = len(fiches) self.nav_logger.info(format_nb(nb_autos, "{nb} fiche{s} de navire{s} automatique{s} " "récupérée{s}", fem=True)) elements = self.importeur.supenr.charger_groupe(BaseElement) for element in elements: self.elements[element.cle] = element nb_elements = len(elements) self.nav_logger.info(format_nb(nb_elements, "{nb} élément{s} de navire récupéré{s}")) vents = self.importeur.supenr.charger_groupe(Vent) for vent in vents: self.ajouter_vent(vent) nb_vents = len(self.vents) self.nav_logger.info(format_nb(nb_vents, "{nb} vent{s} récupéré{s}")) fiches = self.importeur.supenr.charger_groupe(FicheMatelot) for fiche in fiches: self.ajouter_fiche_matelot(fiche) nb_mat = len(self.fiches) self.nav_logger.info(format_nb(nb_mat, "{nb} fiche{s} de matelot récupérée{s}", fem=True)) trajets = self.importeur.supenr.charger_groupe(Trajet) for trajet in trajets: self.ajouter_trajet(trajet) nb_trajets = len(self.trajets) self.nav_logger.info(format_nb(nb_trajets, "{nb} trajet{s} maritime{s} récupéré{s}")) reperes = self.importeur.supenr.charger_groupe(Repere) for repere in reperes: self.ajouter_repere(repere) nb_reperes = len(self.reperes) self.nav_logger.info(format_nb(nb_reperes, "{nb} repère{s} récupéré{s}")) chantiers = self.importeur.supenr.charger_groupe(ChantierNaval) for chantier in chantiers: self.ajouter_chantier_naval(chantier) nb_chantiers = len(chantiers) self.nav_logger.info(format_nb(nb_chantiers, "{nb} chantier{s} naval{s} récupéré{s}")) chemin = os.path.join(self.chemin, "monstre", "types") pychemin = "secondaires.navigation.monstre.types" for nom_fichier in os.listdir(chemin): if nom_fichier.startswith("_") or not nom_fichier.endswith(".py"): continue nom_fichier = pychemin + "." + nom_fichier[:-3] __import__(nom_fichier) self.importeur.diffact.ajouter_action("dep_navire", TPS_VIRT, self.avancer_navires) self.importeur.diffact.ajouter_action("vir_navire", 3, self.virer_navires) self.importeur.diffact.ajouter_action("nauffrages", 5, self.nauffrages) self.importeur.diffact.ajouter_action("tick_chantiers", 60, self.tick_chantiers) self.importeur.salle.salles_a_cartographier.append( self.get_navires_presents) importeur.evt.ajouter_evenement("sombre", "Un navire sombre", "Naufrage de {navire.cle}.", "navire:sombre") BaseModule.init(self) def ajouter_commandes(self): self.commandes = [ commandes.allure.CmdAllure(), commandes.amarre.CmdAmarre(), commandes.ancre.CmdAncre(), commandes.cale.CmdCale(), commandes.calfeutrer.CmdCalfeutrer(), commandes.canon.CmdCanon(), commandes.cap.CmdCap(), commandes.chantier.CmdChantier(), commandes.debarquer.CmdDebarquer(), commandes.detailler.CmdDetailler(), commandes.ecoper.CmdEcoper(), commandes.eltedit.CmdEltedit(), commandes.embarquer.CmdEmbarquer(), commandes.equipage.CmdEquipage(), commandes.gouvernail.CmdGouvernail(), commandes.loch.CmdLoch(), commandes.matelot.CmdMatelot(), commandes.navire.CmdNavire(), commandes.navire_automatique.CmdNavireAutomatique(), commandes.passerelle.CmdPasserelle(), commandes.pavillon.CmdPavillon(), commandes.point.CmdPoint(), commandes.rames.CmdRames(), commandes.saborder.CmdSaborder(), commandes.vent.CmdVent(), commandes.voile.CmdVoile(), ] for cmd in self.commandes: self.importeur.interpreteur.ajouter_commande(cmd) self.importeur.interpreteur.ajouter_editeur( editeurs.autonavire.EdtNaedit) self.importeur.interpreteur.ajouter_editeur( editeurs.matedit.EdtMatedit) self.importeur.interpreteur.ajouter_editeur( editeurs.eltedit.EdtEltedit) self.importeur.interpreteur.ajouter_editeur(editeurs.shedit.EdtShedit) def preparer(self): self.nav_logger.info("Mise à jour des navires...") for navire in self.navires.values(): for salle in navire.salles.values(): salle.illuminee = True for element in salle.elements: element.mettre_a_jour_attributs() rames = salle.rames if rames: rames.vitesse = "immobile" rames.centrer() if rames.tenu: rames.tenu.etats.retirer("tenir_rames") navire.construire_depuis_modele(False) if (len(navire.salles) ** 2 - len(navire.salles)) != len(navire.modele.graph): self.nav_logger.info("Calcul du graph du modèle de " "navire {}.".format(navire.modele.cle)) navire.modele.generer_graph() self.nav_logger.info("... mise à jour des navires terminée.") for navire in self.navires.values(): for matelot in navire.equipage.matelots.values(): if matelot.personnage: self.matelots[matelot.personnage.identifiant] = matelot else: matelot.detruire() continue if matelot.ordres: matelot.nettoyer_ordres() matelot.executer_ordres() navire.equipage.points_max = navire.equipage.points_actuels self.nav_logger.info("Mise à jour des matelots terminée.") Navire.obs_recif = ( self.importeur.salle.obstacles["récif"], self.importeur.salle.obstacles["rapide"], self.importeur.salle.obstacles["banc de sable"], self.importeur.salle.obstacles["corail"], ) for obstacle in Navire.obs_recif: obstacle.symbole = "!" for nom_type in importeur.objet.get_types_herites("matériau"): CALE_TYPES[nom_type] = "marchandises" for nom_type in importeur.objet.get_types_herites("outil"): CALE_TYPES[nom_type] = "outils" def creer_modele(self, cle): valider_cle(cle) if cle in self.modeles: raise KeyError("le modèle de navire {} existe déjà".format(cle)) modele = ModeleNavire(cle) self.ajouter_modele(modele) return modele def ajouter_modele(self, modele): self.modeles[modele.cle] = modele def supprimer_modele(self, cle): if cle not in self.modeles: raise KeyError("le modèle de navire de clé {} est inconnue".format( cle)) modele = self.modeles[cle] del self.modeles[cle] modele.detruire() def creer_navire(self, modele): navire = Navire(modele) self.ajouter_navire(navire) return navire def ajouter_navire(self, navire): cle = navire.cle self.navires[cle] = navire self.importeur.diffact.ajouter_action("tick_equipages_{}".format(cle), 1, self.tick_equipages, navire) self.importeur.diffact.ajouter_action("tick_vigies_{}".format(cle), randint(0, 20), self.tick_vigies, navire) self.importeur.diffact.ajouter_action("controle_equipages_{}".format( cle), randint(0, 5), self.controle_equipages, navire) self.importeur.diffact.ajouter_action("objectif_equipages_{}".format( cle), randint(0, 15), self.objectif_equipages, navire) def supprimer_navire(self, cle): if cle not in self.navires: raise KeyError("le navire de clé {} est introuvable".format(cle)) navire = self.navires[cle] self.importeur.diffact.retirer_action("tick_equipages_{}".format(cle), False) self.importeur.diffact.retirer_action("tick_vigies_{}".format(cle), False) self.importeur.diffact.retirer_action("controle_equipages_{}".format( cle), False) self.importeur.diffact.retirer_action("objectif_equipages_{}".format( cle), False) navire.detruire() del self.navires[cle] def creer_navire_automatique(self, cle): fiche = NavireAutomatique(cle) self.ajouter_navire_automatique(fiche) return fiche def ajouter_navire_automatique(self, fiche): self.navires_automatiques[fiche.cle] = fiche def supprimer_navire_automatique(self, cle): self.navires_automatiques.pop(cle).detruire() def creer_element(self, cle, type_elt): elt = type_elt(cle) self.ajouter_element(elt) return elt def ajouter_element(self, element): self.elements[element.cle] = element def supprimer_element(self, cle): if cle not in self.elements: raise KeyError("l'élément de clé {} est introuvable".format(cle)) element = self.elements[cle] element.detruire() del self.elements[cle] def objets_en_cale(self): objets = [] for navire in self.navires.values(): cale = navire.cale if cale: for objet in cale.conteneur: objets.append(objet) print("sauve", objets) return objets def get_vents_etendue(self, cle): return self.vents_par_etendue.get(cle, []) def creer_vent(self, etendue, x, y, z, vitesse=1, direction=0): vent = Vent(etendue, x, y, z, vitesse, direction) self.ajouter_vent(vent) return vent def ajouter_vent(self, vent): self.vents[vent.cle] = vent self.vents_par_etendue[vent.etendue.cle] = self.vents_par_etendue.get( vent.etendue.cle, []) + [vent] def supprimer_vent(self, cle): vent = self.vents[cle] self.vents_par_etendue[vent.etendue.cle].remove(vent) del self.vents[cle] vent.detruire() def creer_fiche_matelot(self, prototype): fiche = FicheMatelot(prototype) self.ajouter_fiche_matelot(fiche) return fiche
BSD 3-Clause New or Revised License
isc-projects/forge
tests/srv_msg.py
wait_for_message_count_in_log
python
def wait_for_message_count_in_log(count, line, timeout=4, log_file=None): log_file, count, line = test_define_value(log_file, count, line) for i in range(timeout + 1): result = multi_protocol_functions.get_line_count_in_log(line, log_file) if count <= result: return if i != timeout: forge_sleep(1, 'second') assert False, 'Timeout reached while waiting for {} x "{}"'.format(count, line)
Wait until a line appears a certain number of times in a log.
https://github.com/isc-projects/forge/blob/dfec8b41003d6b5a229f69ee93616e0e5cc6d71b/tests/srv_msg.py#L448-L460
import sys import json import importlib from forge_cfg import world, step from protosupport import dns, multi_protocol_functions from protosupport.multi_protocol_functions import test_define_value, substitute_vars from scapy.layers.dhcp6 import DUID_LLT, DUID_LL, DUID_EN class Dispatcher(object): def __getattr__(self, attr_name): mod = importlib.import_module("protosupport.%s.srv_msg" % world.f_cfg.proto) return getattr(mod, attr_name) dhcpmsg = Dispatcher() def get_interface(): return world.f_cfg.iface def get_proto_version(): return world.f_cfg.proto def get_server_interface(): return world.f_cfg.server_iface def get_server_path(): return world.f_cfg.software_install_path @step(r'Client requests option (\d+).') def client_requests_option(opt_type): dhcpmsg.client_requests_option(opt_type) @step(r'(Client|RelayAgent) sets (\w+) value to (\S+).') def client_sets_value(sender_type, value_name, new_value): if not isinstance(new_value, (DUID_LLT, DUID_LL, DUID_EN)): value_name, new_value = test_define_value(value_name, new_value) dhcpmsg.client_sets_value(value_name, new_value) @step(r'Through (\S+) interface to address (\S+) client sends (\w+) message.') def client_send_msg_via_interface(iface, addr, msgname): msgname, iface, addr = test_define_value(msgname, iface, addr) dhcpmsg.client_send_msg(msgname, iface, addr) @step(r'Client sends (\w+) message.') def client_send_msg(msgname): dhcpmsg.client_send_msg(msgname, None, None) @step(r'Send (\S+) with raw appending (.+)') def send_raw_message(msg_type="", raw_append=None): dhcpmsg.build_raw(msg=msg_type, append=raw_append) @step(r'Client adds to the message (\S+) with value (\S+).') def client_does_include_with_value(opt_type, value): opt_type, value = test_define_value(opt_type, value) dhcpmsg.client_does_include(None, opt_type, value) @step(r'(\S+) does (NOT )?include (\S+).') def client_does_include(sender_type, opt_type): dhcpmsg.client_does_include(str(sender_type), opt_type, None) @step(r'Relay-agent does include (\S+).') def relay_agent_does_include(opt_type): @step(r'Client chooses (GLOBAL)|(LINK_LOCAL) UNICAST address.') def unicast_addres(addr_type, addr_type2): dhcpmsg.unicast_addres(True if addr_type else False) @step(r'Generate new (\S+).') def generate_new(opt): dhcpmsg.generate_new(opt) @step(r'RelayAgent forwards message encapsulated in (\d+) level(s)?.') def create_relay_forward(level=1): dhcpmsg.create_relay_forward(level) @step(r'(Client|RelayAgent) adds suboption for vendor specific information with code: (\d+) and data: (\S+).') def add_vendor_suboption(sender_type, code, data): dhcpmsg.add_vendor_suboption(int(code), data) @step(r'Before sending a message set filed named (\S+) to (\S+) as type (\S+).') def change_message_filed(message_filed, value, value_type): message_filed, value, value_type = test_define_value(message_filed, value, value_type) dhcpmsg.change_message_field(message_filed, value, value_type) @step(r'Server MUST NOT respond.') def send_dont_wait_for_message(): dhcpmsg.send_wait_for_message("MUST", False, None) @step(r'Server (\S+) (NOT )?respond with (\w+) message.') def send_wait_for_message(server_type, message, expect_response=True): return dhcpmsg.send_wait_for_message(server_type, expect_response, message) @step(r'(Response|Relayed Message) MUST (NOT )?include option (\d+).') def response_check_include_option(opt_code, expect_include=True): dhcpmsg.response_check_include_option(expect_include, opt_code) @step(r'(Response|Relayed Message) MUST (NOT )?contain (\S+) (\S+).') def response_check_content(data_type, value, expected=True): dhcpmsg.response_check_content(expected, data_type, value) @step(r'(Response|Relayed Message) option (\d+) MUST (NOT )?contain (\S+) (\S+).') def response_check_option_content(opt_code, data_type, expected_value, expect_include=True): data_type, expected_value = test_define_value(data_type, expected_value) if data_type == "sub-option": dhcpmsg.response_check_include_suboption(opt_code, expect_include, expected_value) else: dhcpmsg.response_check_option_content(opt_code, expect_include, data_type, expected_value) @step(r'(Response|Relayed Message) sub-option (\d+) from option (\d+) MUST (NOT )?contain (\S+) (\S+).') def response_check_suboption_content(subopt_code, opt_code, data_type, value, expect_include=True): dhcpmsg.response_check_suboption_content(subopt_code, opt_code, expect_include, data_type, value) def get_suboption(opt_code, subopt_code): return dhcpmsg.get_suboption(opt_code, subopt_code) @step(r'Client for DNS Question Record uses address: (\S+) type (\S+) class (\S+).') def dns_question_record(addr, qtype, qclass): dns.dns_question_record(str(addr), qtype, qclass) @step(r'For DNS query client sets (\w+) value to (\S+).') def dns_query_set_value(variable_name, value): dns.set_val() @step(r'Client sends DNS query.') def client_send_dns_query(): dns.prepare_query() @step(r'DNS server (\S+) (NOT )?respond with DNS query.') def send_wait_for_query(type, expect_include=True): dns.send_wait_for_query(type, expect_include) @step(r'Received DNS query MUST (NOT )?contain (\S+) with value (\S+).') def dns_check(expect, data_type, expected_data_value): dns.check_dns_respond(expect, str(data_type), expected_data_value) @step(r'Received DNS query MUST include (NOT )?empty (QUESTION|ANSWER|AUTHORITATIVE_NAMESERVERS|ADDITIONAL_RECORDS) part.') def dns_option(part_name, expect_include=True): dns.check_dns_option(expect_include, str(part_name)) @step(r'Received DNS part (QUESTION|ANSWER|AUTHORITATIVE_NAMESERVERS|ADDITIONAL_RECORDS) MUST (NOT )?contain (\S+) with value (\S+).') def dns_option_content(part_name, value_name, value, expect_include=True): dns.dns_option_content(part_name, expect_include, str(value_name), str(value)) @step(r'Client copies (\S+) option from received message.') def client_copy_option(option_name): assert len(world.srvmsg), "No messages received, nothing to copy." dhcpmsg.client_copy_option(option_name) @step(r'Client saves (\S+) option from received message.') def client_save_option(option_name): assert len(world.srvmsg), "No messages received, nothing to save." dhcpmsg.client_save_option(option_name) @step(r'Client saves into set no. (\d+) (\S+) option from received message.') def client_save_option_count(count, option_name): assert len(world.srvmsg), "No messages received, nothing to save." dhcpmsg.client_save_option(option_name, count) @step(r'Client adds saved options. And (DONT )?Erase.') def client_add_saved_option(erase=False): assert len(world.savedmsg), "No options to add." dhcpmsg.client_add_saved_option(erase) @step(r'Client adds saved options in set no. (\d+). And (DONT )?Erase.') def client_add_saved_option_count(count, erase=False): assert len(world.savedmsg), "No options to add." dhcpmsg.client_add_saved_option(erase, count) @step(r'Save (\S+) value from (\d+) option.') def save_value_from_option(value_name, option_name): dhcpmsg.save_value_from_option(value_name, option_name) @step(r'Received (\S+) value in option (\d+) is the same as saved value.') def compare_values(value_name, option_name): dhcpmsg.compare_values(value_name, option_name) @step(r'Set network variable (\S+) with value (\S+).') def network_variable(value_name, value): value_name, value = test_define_value(value_name, value) multi_protocol_functions.change_network_variables(value_name, value) @step(r'File stored in (\S+) MUST (NOT )?contain line or phrase: (.+)') def file_contains_line(file_path, condition, line): file_path, line = test_define_value(file_path, line) multi_protocol_functions.regular_file_contain(file_path, condition, line) @step(r'DNS log MUST (NOT )?contain line: (.+)') def dns_log_contains(condition, line): line = test_define_value(line)[0] multi_protocol_functions.regular_file_contain(world.cfg["dns_log_file"], condition, line) def log_contains(line, log_file=None): line = test_define_value(line)[0] multi_protocol_functions.log_contains(line, None, log_file) def log_doesnt_contain(line, log_file=None): line = test_define_value(line)[0] multi_protocol_functions.log_contains(line, 'NOT', log_file) def lease_file_contains(line): line = test_define_value(line)[0] multi_protocol_functions.regular_file_contain(world.f_cfg.get_leases_path(), None, line) def lease_file_doesnt_contain(line): line = test_define_value(line)[0] multi_protocol_functions.regular_file_contain(world.f_cfg.get_leases_path(), True, line) @step(r'Remote (\S+) file stored in (\S+) MUST (NOT )?contain line or phrase: (.+)') def remote_log_includes_line(destination, file_path, condition, line): destination, file_path, line = test_define_value(destination, file_path, line) multi_protocol_functions.regular_file_contain(file_path, condition, line, destination=destination) @step(r'Table (\S+) in (\S+) database MUST (NOT )?contain line or phrase: (.+)') def table_contains_line(table_name, db_type, line, expect=True): table_name, db_type, line = test_define_value(table_name, db_type, line) multi_protocol_functions.db_table_contain(table_name, db_type, line=line, expect=expect) @step(r'Remove all records from table (\S+) in (\S+) database.') def remove_from_db_table(table_name, db_type): table_name, db_type = test_define_value(table_name, db_type) multi_protocol_functions.remove_from_db_table(table_name, db_type) @step(r'(\S+) log contains (\d+) of line: (.+)') def log_includes_count(server_type, count, line): count, line = test_define_value(count, line) multi_protocol_functions.log_contains_count(server_type, count, line) @step(r'(\S+) log contains (\d+) of line: (.+)')
ISC License
geoffreynyaga/kenya-one-project
CORE/weissinger/wing.py
Wing.compute_coordinates
python
def compute_coordinates(self): self.yroot = 0.0 self.ytip = self.span / 2.0 self.xroot = [0.0, self.root] xrootc4 = self.root / 4.0 xtipc4 = xrootc4 + self.span / 2.0 * tan(self.sweep * pi / 180.0) self.xtip = [xtipc4 - 0.25 * self.tip, xtipc4 + 0.75 * self.tip]
Computes root and tip x and y coordinates
https://github.com/geoffreynyaga/kenya-one-project/blob/157cd7a3c3d9014e31ef21ca21de43f04d039997/CORE/weissinger/wing.py#L91-L99
from math import pi, tan def slope(y2, y1, x2, x1): return (y2 - y1) / (x2 - x1) class Wing: def __init__(self, span, root, tip, sweep, washout): self.span = span self.root = root self.tip = tip self.sweep = sweep self.washout = washout self.area = None self.aspect_ratio = None self.cbar = None self.xroot = [] self.yroot = None self.xtip = [] self.ytip = None self.compute_geometry() def compute_geometry(self): self.area = 0.5 * (self.root + self.tip) * self.span self.aspect_ratio = self.span ** 2.0 / self.area self.compute_mac() def compute_mac(self): if not self.yroot: self.compute_coordinates() mt = slope(self.xtip[0], self.xroot[0], self.ytip, self.yroot) mb = slope(self.xtip[1], self.xroot[1], self.ytip, self.yroot) bt = self.xroot[0] bb = self.xroot[1] self.cbar = ( 2.0 / self.area * ( 1.0 / 3.0 * self.ytip ** 3.0 * (mb - mt) ** 2.0 + self.ytip ** 2.0 * (mb - mt) * (bb - bt) + self.ytip * (bb - bt) ** 2.0 ) )
MIT License
mabuchilab/qnet
src/qnet/algebra/core/circuit_algebra.py
Component.args
python
def args(self): return ()
Empty tuple (no arguments) See also: :attr:`~CircuitSymbol.sym_args` is a tuple of the keyword argument values.
https://github.com/mabuchilab/qnet/blob/cc20d26dad78691d34c67173e5cd67dcac94208a/src/qnet/algebra/core/circuit_algebra.py#L721-L728
import os import re from abc import ABCMeta, abstractmethod from collections import OrderedDict from functools import reduce import numpy as np import sympy from sympy import I from sympy import Matrix as SympyMatrix from sympy import symbols, sympify from .abstract_algebra import ( Expression, Operation, substitute, ) from .algebraic_properties import ( assoc, check_cdims, filter_neutral, filter_cid, match_replace, match_replace_binary) from .exceptions import ( AlgebraError, BasisNotSetError, CannotConvertToSLH, CannotEliminateAutomatically, CannotVisualize, IncompatibleBlockStructures, WrongCDimError) from .hilbert_space_algebra import LocalSpace, ProductSpace from .matrix_algebra import ( Matrix, block_matrix, identity_matrix, permutation_matrix, vstackm, zerosm) from .operator_algebra import ( IdentityOperator, LocalProjector, LocalSigma, Operator, OperatorPlus, OperatorSymbol, ScalarTimesOperator, ZeroOperator, adjoint, get_coeffs) from ...utils.permutations import ( BadPermutationError, block_perm_and_perms_within_blocks, check_permutation, full_block_perm, invert_permutation, permutation_to_block_permutations, ) from ...utils.singleton import Singleton, singleton_object __all__ = [ 'CPermutation', 'CircuitSymbol', 'Concatenation', 'Feedback', 'Circuit', 'SLH', 'SeriesInverse', 'SeriesProduct', 'FB', 'circuit_identity', 'eval_adiabatic_limit', 'extract_channel', 'getABCD', 'map_channels', 'move_drive_to_H', 'pad_with_identity', 'prepare_adiabatic_limit', 'try_adiabatic_elimination', 'CIdentity', 'CircuitZero', 'Component', ] __private__ = [] class Circuit(metaclass=ABCMeta): @property @abstractmethod def cdim(self) -> int: raise NotImplementedError(self.__class__.__name__) @property def block_structure(self) -> tuple: return self._block_structure @property def _block_structure(self) -> tuple: return tuple((self.cdim, )) def index_in_block(self, channel_index: int) -> int: if channel_index < 0 or channel_index >= self.cdim: raise ValueError() struct = self.block_structure if len(struct) == 1: return channel_index, 0 i = 1 while sum(struct[:i]) <= channel_index and i < self.cdim: i += 1 block_index = i - 1 index_in_block = channel_index - sum(struct[:block_index]) return index_in_block, block_index def get_blocks(self, block_structure=None): if block_structure is None: block_structure = self.block_structure try: return self._get_blocks(block_structure) except IncompatibleBlockStructures as e: raise e def _get_blocks(self, block_structure): if block_structure == self.block_structure: return (self, ) raise IncompatibleBlockStructures("Requested incompatible block " "structure %s" % (block_structure,)) def series_inverse(self) -> 'Circuit': return self._series_inverse() def _series_inverse(self) -> 'Circuit': return SeriesInverse.create(self) def feedback(self, *, out_port=None, in_port=None): if out_port is None: out_port = self.cdim - 1 if in_port is None: in_port = self.cdim - 1 return self._feedback(out_port=out_port, in_port=in_port) def _feedback(self, *, out_port: int, in_port: int) -> 'Circuit': return Feedback.create(self, out_port=out_port, in_port=in_port) def show(self): from IPython.display import Image, display fname = self.render() display(Image(filename=fname)) def render(self, fname=''): import qnet.visualization.circuit_pyx as circuit_visualization from tempfile import gettempdir from time import time, sleep if not fname: tmp_dir = gettempdir() fname = os.path.join(tmp_dir, "tmp_{}.png".format(hash(time))) if circuit_visualization.draw_circuit(self, fname): done = False for k in range(20): if os.path.exists(fname): done = True break else: sleep(.5) if done: return fname raise CannotVisualize() def creduce(self) -> 'Circuit': return self._creduce() @abstractmethod def _creduce(self) -> 'Circuit': return self def toSLH(self) -> 'SLH': return self._toSLH() @abstractmethod def _toSLH(self) -> 'SLH': raise NotImplementedError(self.__class__.__name__) def coherent_input(self, *input_amps) -> 'Circuit': return self._coherent_input(*input_amps) def _coherent_input(self, *input_amps) -> 'Circuit': n_inputs = len(input_amps) if n_inputs != self.cdim: raise WrongCDimError() from qnet.algebra.library.circuit_components import ( CoherentDriveCC as Displace_cc) if n_inputs == 1: concat_displacements = Displace_cc(displacement=input_amps[0]) else: displacements = [ Displace_cc(displacement=amp) if amp != 0 else circuit_identity(1) for amp in input_amps] concat_displacements = Concatenation(*displacements) return self << concat_displacements def __lshift__(self, other): if isinstance(other, Circuit): return SeriesProduct.create(self, other) return NotImplemented def __add__(self, other): if isinstance(other, Circuit): return Concatenation.create(self, other) return NotImplemented class SLH(Circuit, Expression): def __init__(self, S, L, H): if not isinstance(S, Matrix): S = Matrix(S) if not isinstance(L, Matrix): L = Matrix(L) if S.shape[0] != L.shape[0]: raise ValueError('S and L misaligned: S = {!r}, L = {!r}' .format(S, L)) if L.shape[1] != 1: raise ValueError(("L has wrong shape %s. L must be a column vector " "of operators (shape n × 1)") % str(L.shape)) if not all(isinstance(s, Operator) for s in S.matrix.ravel()): S = S * IdentityOperator if not all(isinstance(l, Operator) for l in L.matrix.ravel()): L = L * IdentityOperator if not isinstance(H, Operator): H = H * IdentityOperator self.S = S self.L = L self.H = H super().__init__(S, L, H) @property def args(self): return self.S, self.L, self.H @property def Ls(self): return list(self.L.matrix[:, 0]) @property def cdim(self): return self.S.shape[0] def _creduce(self): return self @property def space(self): args_spaces = (self.S.space, self.L.space, self.H.space) return ProductSpace.create(*args_spaces) @property def free_symbols(self): return set.union( self.S.free_symbols, self.L.free_symbols, self.H.free_symbols) def series_with_slh(self, other): new_S = self.S * other.S new_L = self.S * other.L + self.L def ImAdjoint(m): return (m.H - m) * (I / 2) delta = ImAdjoint(self.L.adjoint() * self.S * other.L) if isinstance(delta, Matrix): new_H = self.H + other.H + delta[0, 0] else: assert delta == 0 new_H = self.H + other.H return SLH(new_S, new_L, new_H) def concatenate_slh(self, other): selfS = self.S otherS = other.S new_S = block_matrix( selfS, zerosm((selfS.shape[0], otherS.shape[1]), dtype=int), zerosm((otherS.shape[0], selfS.shape[1]), dtype=int), otherS) new_L = vstackm((self.L, other.L)) new_H = self.H + other.H return SLH(new_S, new_L, new_H) def _toSLH(self): return self def expand(self): return SLH(self.S.expand(), self.L.expand(), self.H.expand()) def simplify_scalar(self, func=sympy.simplify): return SLH( self.S.simplify_scalar(func=func), self.L.simplify_scalar(func=func), self.H.simplify_scalar(func=func)) def _series_inverse(self): return SLH(self.S.adjoint(), - self.S.adjoint() * self.L, -self.H) def _feedback(self, *, out_port, in_port): if not isinstance(self.S, Matrix) or not isinstance(self.L, Matrix): return Feedback(self, out_port=out_port, in_port=in_port) from sympy.core.numbers import ComplexInfinity, Infinity sympyOne = sympify(1) n = self.cdim - 1 if out_port != n: return ( map_channels({out_port: n}, self.cdim).toSLH() << self ).feedback(in_port=in_port) elif in_port != n: return ( self << map_channels({n: in_port}, self.cdim).toSLH() ).feedback() S, L, H = self.S, self.L, self.H one_minus_Snn = sympyOne - S[n, n] if isinstance(one_minus_Snn, Operator): if one_minus_Snn is IdentityOperator: one_minus_Snn = 1 elif (isinstance(one_minus_Snn, ScalarTimesOperator) and one_minus_Snn.term is IdentityOperator): one_minus_Snn = one_minus_Snn.coeff else: raise AlgebraError('Inversion not implemented for general' ' operators: {}'.format(one_minus_Snn)) one_minus_Snn_inv = sympyOne / one_minus_Snn if one_minus_Snn_inv in [Infinity, ComplexInfinity]: raise AlgebraError( "Ill-posed network: singularity in feedback [%s]%d->%d" % (str(self), out_port, in_port)) new_S = S[:n, :n] + S[:n, n:] * one_minus_Snn_inv * S[n:, :n] new_L = L[:n] + S[:n, n] * one_minus_Snn_inv * L[n] def ImAdjoint(m): return (m.H - m) * (I / 2) delta_H = ImAdjoint( (L.adjoint() * S[:, n:]) * one_minus_Snn_inv * L[n, 0]) if isinstance(delta_H, Matrix): delta_H = delta_H[0, 0] new_H = H + delta_H return SLH(new_S, new_L, new_H) def symbolic_liouvillian(self): from qnet.algebra.core.super_operator_algebra import liouvillian return liouvillian(self.H, self.L) def symbolic_master_equation(self, rho=None): L, H = self.L, self.H if rho is None: rho = OperatorSymbol('rho', hs=self.space) return (-I * (H * rho - rho * H) + sum(Lk * rho * adjoint(Lk) - (adjoint(Lk) * Lk * rho + rho * adjoint(Lk) * Lk) / 2 for Lk in L.matrix.ravel())) def symbolic_heisenberg_eom( self, X=None, noises=None, expand_simplify=True): L, H = self.L, self.H if X is None: X = OperatorSymbol('X', hs=(L.space | H.space)) summands = [I * (H * X - X * H), ] for Lk in L.matrix.ravel(): summands.append(adjoint(Lk) * X * Lk) summands.append(-(adjoint(Lk) * Lk * X + X * adjoint(Lk) * Lk) / 2) if noises is not None: if not isinstance(noises, Matrix): noises = Matrix(noises) LambdaT = (noises.adjoint().transpose() * noises.transpose()).transpose() assert noises.shape == L.shape S = self.S summands.append((adjoint(noises) * S.adjoint() * (X * L - L * X)) .expand()[0, 0]) summand = (((L.adjoint() * X - X * L.adjoint()) * S * noises) .expand()[0, 0]) summands.append(summand) if len(S.space & X.space): comm = (S.adjoint() * X * S - X) summands.append((comm * LambdaT).expand().trace()) ret = OperatorPlus.create(*summands) if expand_simplify: ret = ret.expand().simplify_scalar() return ret def __iter__(self): return iter((self.S, self.L, self.H)) def _coherent_input(self, *input_amps): return super(SLH, self)._coherent_input(*input_amps).toSLH() class CircuitSymbol(Circuit, Expression): _rx_label = re.compile('^[A-Za-z][A-Za-z0-9]*(_[A-Za-z0-9().+-]+)?$') def __init__(self, label, *sym_args, cdim): label = str(label) cdim = int(cdim) self._label = label self._cdim = cdim self._sym_args = tuple(sym_args) if not self._rx_label.match(label): raise ValueError("label '%s' does not match pattern '%s'" % (self.label, self._rx_label.pattern)) super().__init__(label, *sym_args, cdim=cdim) @property def label(self): return self._label @property def args(self): return (self.label, ) + self._sym_args @property def kwargs(self): return {'cdim': self.cdim} @property def sym_args(self): return self._sym_args @property def cdim(self): return self._cdim def _toSLH(self): raise CannotConvertToSLH() def _creduce(self): return self class Component(CircuitSymbol, metaclass=ABCMeta): CDIM = 0 PORTSIN = () PORTSOUT = () ARGNAMES = () DEFAULTS = {} IDENTIFIER = '' def __init__(self, *, label=None, **kwargs): assert isinstance(self.CDIM, int) and self.CDIM > 0 assert len(self.PORTSIN) == self.CDIM assert all([isinstance(name, str) for name in self.PORTSIN]) assert len(self.PORTSOUT) == self.CDIM assert all([isinstance(name, str) for name in self.PORTSOUT]) assert len(self.DEFAULTS.keys()) == len(self.ARGNAMES) assert all([name in self.DEFAULTS.keys() for name in self.ARGNAMES]) assert isinstance(self.IDENTIFIER, str) and len(self.IDENTIFIER) > 0 assert self._has_properties_for_args, "must use properties_for_args class decorater" if label is None: label = self.IDENTIFIER else: label = str(label) for arg_name in kwargs: if arg_name not in self.ARGNAMES: raise TypeError( "%s got an unexpected keyword argument '%s'" % (self.__class__.__name__, arg_name)) self._kwargs = OrderedDict([('label', label)]) self._minimal_kwargs = OrderedDict() if label != self.IDENTIFIER: self._minimal_kwargs['label'] = label args = [] for arg_name in self.ARGNAMES: val = kwargs.get(arg_name, self.DEFAULTS[arg_name]) args.append(val) self.__dict__['_%s' % arg_name] = val self._kwargs[arg_name] = val if val != self.DEFAULTS[arg_name]: self._minimal_kwargs[arg_name] = val super().__init__(label, *args, cdim=self.CDIM) @property
MIT License
luna-klatzer/openhiven.py
openhivenpy/types/user.py
LazyUser.application
python
def application(self) -> Optional[bool]: return super().application
Returns the application string passed. Currently client-limited
https://github.com/luna-klatzer/openhiven.py/blob/9184d6a77bde0ee3847dcb9ea7d399217a36c95d/openhivenpy/types/user.py#L165-L167
from __future__ import annotations import logging from typing import Optional, Union from typing import TYPE_CHECKING from .hiven_type_schemas import UserSchema, get_compiled_validator, LazyUserSchema from ..base_types import BaseUser from ..utils import log_type_exception if TYPE_CHECKING: from .. import HivenClient logger = logging.getLogger(__name__) __all__ = ['LazyUser', 'User'] class LazyUser(BaseUser): _json_schema: dict = LazyUserSchema json_validator = get_compiled_validator(_json_schema) @log_type_exception('LazyUser') def __init__(self, data: dict, client: HivenClient): super().__init__() self._username = data.get('username') self._name = data.get('name') self._bio = data.get('bio') self._id = data.get('id') self._email_verified = data.get('email_verified') self._flags = data.get('flags') self._icon = data.get('icon') self._header = data.get('header') self._bot = data.get('bot', False) self._client = client def __repr__(self) -> str: info = [ ('username', self.username), ('name', self.name), ('id', self.id), ('icon', self.icon), ('header', self.header), ('bot', self.bot) ] return '<LazyUser {}>'.format(' '.join('%s=%s' % t for t in info)) def get_cached_data(self) -> Optional[dict]: return self._client.find_user(self.id) @classmethod def format_obj_data(cls, data: dict) -> dict: data = cls.validate(data) return data @property def username(self) -> Optional[str]: return super().username @property def name(self) -> Optional[str]: return super().name @property def id(self) -> Optional[str]: return super().id @property def bio(self) -> Optional[str]: return super().bio @property def email_verified(self) -> Optional[bool]: return super().email_verified @property def flags(self) -> Optional[Union[int, str]]: return super().flags @property def user_flags(self) -> Optional[Union[int, str]]: return self.flags @property def icon(self) -> Optional[str]: return super().icon @property def header(self) -> Optional[str]: return super().header @property def bot(self) -> Optional[bool]: return super().bot @property def account(self) -> Optional[str]: return super().account @property
MIT License
asahi417/lstmcell
lstm_cell/basic_lstm_cell.py
CustomLSTMCell.__init__
python
def __init__(self, num_units, forget_bias=1.0, activation=None, reuse=None, layer_norm: bool=False, norm_shift: float=0.0, norm_gain: float=1.0, dropout_keep_prob_in: float = 1.0, dropout_keep_prob_h: float=1.0, dropout_keep_prob_out: float=1.0, dropout_keep_prob_gate: float=1.0, dropout_keep_prob_forget: float=1.0, dropout_prob_seed: int=None, variational_dropout: bool=False, recurrent_dropout: bool=False ): super(CustomLSTMCell, self).__init__(_reuse=reuse) self._num_units = num_units self._forget_bias = forget_bias self._activation = activation or math_ops.tanh self._layer_norm = layer_norm self._g = norm_gain self._b = norm_shift self._recurrent_dropout = recurrent_dropout self._variational_dropout = variational_dropout self._seed = dropout_prob_seed self._keep_prob_i = dropout_keep_prob_in self._keep_prob_g = dropout_keep_prob_gate self._keep_prob_f = dropout_keep_prob_forget self._keep_prob_o = dropout_keep_prob_out self._keep_prob_h = dropout_keep_prob_h
Initialize the basic LSTM cell. Args: num_units: int, The number of units in the LSTM cell. forget_bias: float, The bias added to forget gates (see above). Must set to `0.0` manually when restoring from CudnnLSTM-trained checkpoints. activation: Activation function of the inner states. Default: `tanh`. reuse: (optional) Python boolean describing whether to reuse variables in an existing scope. If not `True`, and the existing scope already has the given variables, an error is raised. layer_norm: (optional) If True, apply layer normalization. norm_shift: (optional) Shift parameter for layer normalization. norm_gain: (optional) Gain parameter for layer normalization. dropout_prob_seed: (optional) recurrent_dropout: (optional) dropout_keep_prob_in: (optional) keep probability of variational dropout for input dropout_keep_prob_out: (optional) keep probability of variational dropout for output dropout_keep_prob_gate: (optional) keep probability of variational dropout for gating cell dropout_keep_prob_forget: (optional) keep probability of variational dropout for forget cell dropout_keep_prob_h: (optional) keep probability of recurrent dropout for gated state
https://github.com/asahi417/lstmcell/blob/cb49d176a27f4069a7cbc9954af3022a09ba1ee0/lstm_cell/basic_lstm_cell.py#L38-L94
from tensorflow.python.ops import rnn_cell_impl from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import variable_scope as vs from tensorflow.python.ops import init_ops from tensorflow.python.ops import nn_ops from tensorflow.python.ops import nn_impl _EPSILON = 10**-4 class CustomLSTMCell(rnn_cell_impl.RNNCell):
MIT License
twisted/imaginary
src/imaginary/objects.py
Thing.moveTo
python
def moveTo(self, where, arrivalEventFactory=None): whereContainer = iimaginary.IContainer(where, None) if (whereContainer is iimaginary.IContainer(self.location, None)): return if whereContainer is None: whereThing = None else: whereThing = whereContainer.thing if whereThing is not None and whereThing.location is self: raise eimaginary.ActionFailure(events.ThatDoesntWork( actor=self, actorMessage=[ language.Noun(where.thing).definiteNounPhrase() .capitalizeConcept(), " won't fit inside itself."])) if not self.portable: raise eimaginary.CannotMove(self, where) oldLocation = self.location for restriction in self.powerupsFor(iimaginary.IMovementRestriction): restriction.movementImminent(self, where) if oldLocation is not None: events.DepartureEvent(oldLocation, self).broadcast() if where is not None: where = iimaginary.IContainer(where) where.add(self) if arrivalEventFactory is not None: arrivalEventFactory(self).broadcast() if oldLocation is not None: iimaginary.IContainer(oldLocation).remove(self)
Implement L{iimaginary.IThing.moveTo} to change the C{location} of this L{Thing} to a new L{Thing}, broadcasting an L{events.DepartureEvent} to note this object's departure from its current C{location}. Before moving it, invoke each L{IMovementRestriction} powerup on this L{Thing} to allow them to prevent this movement.
https://github.com/twisted/imaginary/blob/a162488a3166baf19f9ed3ae3b98afa18b8f294d/src/imaginary/objects.py#L265-L310
from __future__ import division import math import attr from zope.interface import implements, implementer from twisted.python import reflect, components from epsilon.remember import remembered from axiom import item, attributes from imaginary import iimaginary, eimaginary, text as T, events, language from imaginary.enhancement import Enhancement as _Enhancement from imaginary.language import Description from imaginary.idea import ( Idea, Link, Proximity, ProviderOf, AlsoKnownAs, CanSee, Vector) class Points(item.Item): max = attributes.integer(doc=""" Maximum number of points. """, allowNone=False) current = attributes.integer(doc=""" Current number of points. """, allowNone=False) def __init__(self, **kw): if 'max' in kw and 'current' not in kw: kw['current'] = kw['max'] super(Points, self).__init__(**kw) def __cmp__(self, other): return cmp(self.current, other) def __str__(self): return '%d/%d' % (self.current, self.max) def __repr__(self): d = {'class': reflect.qual(self.__class__), 'current': self.current, 'max': self.max} return '%(class)s(%(max)d, %(current)d)' % d def increase(self, amount): return self.modify(amount) def decrease(self, amount): return self.modify(-amount) def modify(self, amount): self.current = max(min(self.current + amount, self.max), 0) return self.current class Thing(item.Item): implements(iimaginary.IThing, iimaginary.IVisible, iimaginary.INameable, iimaginary.ILinkAnnotator, iimaginary.ILinkContributor) weight = attributes.integer(doc=""" Units of weight of this object. """, default=1, allowNone=False) location = attributes.reference(doc=""" Direct reference to the location of this object """) portable = attributes.boolean(doc=""" Whether this can be picked up, pushed around, relocated, etc """, default=True, allowNone=False) name = attributes.text(doc=""" The name of this object. """, allowNone=False) description = attributes.text(doc=""" What this object looks like. """, default=u"") gender = attributes.integer(doc=""" The grammatical gender of this thing. One of L{language.Gender.MALE}, L{language.Gender.FEMALE}, or L{language.Gender.NEUTER}. """, default=language.Gender.NEUTER, allowNone=False) proper = attributes.boolean(doc=""" Whether my name is a proper noun. """, default=False, allowNone=False) def destroy(self): if self.location is not None: iimaginary.IContainer(self.location).remove(self) self.deleteFromStore() def links(self): for pup in self.powerupsFor(iimaginary.ILinkContributor): for link in pup.links(): yield link if self.location is not None: l = Link(self.idea, self.location.idea) l.annotate([AlsoKnownAs('here')]) yield l def allAnnotators(self): loc = self while loc is not None: if loc is not None: for pup in loc.powerupsFor(iimaginary.ILocationLinkAnnotator): yield pup loc = loc.location for pup in self.powerupsFor(iimaginary.ILinkAnnotator): yield pup def annotationsFor(self, link, idea): for annotator in self.allAnnotators(): for annotation in annotator.annotationsFor(link, idea): yield annotation @remembered def idea(self): idea = Idea(self) idea.linkers.append(self) idea.annotators.append(self) return idea def obtainOrReportWhyNot(self, retriever): obt = self.idea.obtain(retriever) results = list(obt) if not results: reasons = list(obt.reasonsWhyNot) if reasons: raise eimaginary.ActionFailure(events.ThatDoesntWork( actor=self, actorMessage=reasons[0].tellMeWhyNot())) return results
MIT License
azure-samples/azure-samples-python-management
samples/communication/manage_communication.py
__setup_update_communication_service
python
def __setup_update_communication_service(subparsers, parent_parser): parser = subparsers.add_parser('update', help='Update a Communication Service') parser.add_argument('resource_group_name', type=str) parser.add_argument('resource_name', type=str) parser.add_argument("--keyvalue", action='append', type=lambda kv: kv.split("="), dest='keyvalues') parser.set_defaults(func=__update_communication_service)
Define the parser for the update command. Provide the tags like so: "--keyvalue foo1=bar1 --keyvalue foo2=bar2"
https://github.com/azure-samples/azure-samples-python-management/blob/bff69aec74fa7bc3cf0b68652d5c0a58119909ac/samples/communication/manage_communication.py#L295-L305
import os import argparse from random import * from azure.core.exceptions import HttpResponseError from azure.core.exceptions import ResourceNotFoundError from azure.identity import ClientSecretCredential from azure.mgmt.resource import ResourceManagementClient from azure.mgmt.communication import CommunicationServiceManagementClient from azure.mgmt.communication.models import CommunicationServiceResource from azure.mgmt.communication.models import KeyType from azure.mgmt.communication.models import TaggedResource from azure.mgmt.communication.models import RegenerateKeyParameters def __create_service_principal_credentials(): app_id = os.environ.get("AZURE_CLIENT_ID", None) client_secret = os.environ.get("AZURE_CLIENT_SECRET", None) tenant_id = os.environ.get("AZURE_TENANT_ID", None) if app_id is None or client_secret is None or tenant_id is None: return None return ClientSecretCredential(client_id=app_id, client_secret=client_secret, tenant_id=tenant_id) def __create_resource_management_client(): subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", None) if subscription_id is None: return None return ResourceManagementClient( credential=__create_service_principal_credentials(), subscription_id=subscription_id ) def __create_communication_management_client(credentials): subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", None) if subscription_id is None: return None return CommunicationServiceManagementClient(credentials, subscription_id) def __get_communication_management_client(): credential = __create_service_principal_credentials() if credential is None: raise Exception("Failed to create service principal credentials") client = __create_communication_management_client(credential) if client is None: raise Exception("Failed to create CommunicationServiceManagementClient") return client def __print_resource(resource): print("Name: " + resource.name) print("Provisioning State: " + resource.provisioning_state) print("Immutable Resource ID: " + resource.immutable_resource_id) print("Location: " + resource.location) print("Data Location: " + resource.data_location) print("Notification Hub ID: " + str(resource.notification_hub_id)) print("Tags: " + str(resource.tags)) def __create_communication_service(args): print("\nCreate...") acs_client = __get_communication_management_client() resource = CommunicationServiceResource(location="global", data_location = "UnitedStates") operation = acs_client.communication_service.begin_create_or_update(args.resource_group_name, args.resource_name, resource) print("Issued Create command. Waiting for response...") resource = operation.result(timeout=1) print("Resource Created: ") __print_resource(resource) def __get_communication_service(args): print("\nGet...") acs_client = __get_communication_management_client() try: resource = acs_client.communication_service.get(args.resource_group_name, args.resource_name) __print_resource(resource) except HttpResponseError: print("Resource was not found.") def __update_communication_service(args): print("\nUpdate...") acs_client = __get_communication_management_client() tags = {} if args.keyvalues is not None: tags = {"tags": dict(args.keyvalues)} resource = acs_client.communication_service.update(args.resource_group_name, args.resource_name, TaggedResource(**tags)) print("Resource Updated: ") __print_resource(resource) def __delete_communication_service(args): print("\nDelete...") acs_client = __get_communication_management_client() acs_client.communication_service.begin_delete(args.resource_group_name, args.resource_name) print("Resource Deleted") def __list_communication_service_by_subscription(args): print("\nList by subscription...") acs_client = __get_communication_management_client() resources = acs_client.communication_service.list_by_subscription() print("Found resources: ") for resource in resources: print("") __print_resource(resource) def __list_communication_service_by_resource_group(args): print("\nList by resource group...") acs_client = __get_communication_management_client() resources = acs_client.communication_service.list_by_resource_group(args.resource_group_name) print("Found resources: ") for resource in resources: print("") __print_resource(resource) def __list_keys(args): print("\nList keys...") acs_client = __get_communication_management_client() keys = acs_client.communication_service.list_keys(args.resource_group_name, args.resource_name) print(keys) def __regenerate_key(args): print("\nRegeneration key...") acs_client = __get_communication_management_client() key_type = {"key_type": args.type} key = acs_client.communication_service.regenerate_key(args.resource_group_name, args.resource_name, RegenerateKeyParameters(**key_type)) print(key) def __link_notification_hub(args): print("\nLink Notification Hub...") notification_hub_resource_id = os.environ.get("AZURE_NOTIFICATION_HUB_ID", None) notification_hub_connection_string = os.environ.get("AZURE_NOTIFICATION_HUB_CONNECTION_STRING", None) if notification_hub_resource_id is None or notification_hub_connection_string is None: return None acs_client = __get_communication_management_client() linked_notification_hub = acs_client.communication_service.link_notification_hub(args.resource_group_name, args.resource_name, { 'resource_id': notification_hub_resource_id, 'connection_string': notification_hub_connection_string }) print("Linked: ") print(linked_notification_hub) def __create_resource_group(args): resource_client = __create_resource_management_client() resource_client.resource_groups.create_or_update( args.resource_group_name, {"location": "westus"} ).result() def __delete_resource_group(args): resource_client = __create_resource_management_client() resource_client.resource_groups.begin_delete( args.resource_group_name ).result() def __resource_group_exists(args): resource_client = __create_resource_management_client() try: resource_client.resource_groups.get(args.resource_group_name) except ResourceNotFoundError: return False return True def __run_all(args): resource_group_exists = __resource_group_exists(args) if resource_group_exists is False: __create_resource_group(args) __create_communication_service(args) __get_communication_service(args) __update_communication_service(args) __list_communication_service_by_subscription(args) __list_communication_service_by_resource_group(args) __list_keys(args) __regenerate_key(args) __delete_communication_service(args) if resource_group_exists is False: __delete_resource_group(args) def __setup_create_communication_service(subparsers, parent_parser): parser = subparsers.add_parser('create', help='Create a Communication Service') parser.add_argument('resource_group_name', type=str) parser.add_argument('resource_name', type=str) parser.set_defaults(func=__create_communication_service) def __setup_get_communication_service(subparsers, parent_parser): parser = subparsers.add_parser('get', help='Fetch a Communication Service') parser.add_argument('resource_group_name', type=str) parser.add_argument('resource_name', type=str) parser.set_defaults(func=__get_communication_service)
MIT License
nrel/floris
floris/simulation/wake_velocity/gaussianModels/gaussian_model_base.py
GaussianModel.calc_VW
python
def calc_VW( self, coord, turbine, flow_field, x_locations, y_locations, z_locations ): D = turbine.rotor_diameter HH = turbine.hub_height yaw = turbine.yaw_angle Ct = turbine.Ct TSR = turbine.tsr aI = turbine.aI Uinf = np.mean(flow_field.wind_map.grid_wind_speed) scale = 1.0 vel_top = ( Uinf * ((HH + D / 2) / flow_field.specified_wind_height) ** flow_field.wind_shear ) / Uinf vel_bottom = ( Uinf * ((HH - D / 2) / flow_field.specified_wind_height) ** flow_field.wind_shear ) / Uinf Gamma_top = ( scale * (np.pi / 8) * D * vel_top * Uinf * Ct * sind(yaw) * cosd(yaw) ) Gamma_bottom = ( -scale * (np.pi / 8) * D * vel_bottom * Uinf * Ct * sind(yaw) * cosd(yaw) ) Gamma_wake_rotation = ( 0.25 * 2 * np.pi * D * (aI - aI ** 2) * turbine.average_velocity / TSR ) eps = self.eps_gain * D lmda = D / 8 kappa = 0.41 lm = kappa * z_locations / (1 + kappa * z_locations / lmda) z = np.linspace( z_locations.min(), z_locations.max(), flow_field.u_initial.shape[2] ) dudz_initial = np.gradient(flow_field.u_initial, z, axis=2) nu = lm ** 2 * np.abs(dudz_initial[0, :, :]) yLocs = y_locations + 0.01 - (coord.x2) zT = z_locations + 0.01 - (HH + D / 2) rT = yLocs ** 2 + zT ** 2 V1 = ( (zT * Gamma_top) / (2 * np.pi * rT) * (1 - np.exp(-rT / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) W1 = ( (-yLocs * Gamma_top) / (2 * np.pi * rT) * (1 - np.exp(-rT / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) zB = z_locations + 0.01 - (HH - D / 2) rB = yLocs ** 2 + zB ** 2 V2 = ( (zB * Gamma_bottom) / (2 * np.pi * rB) * (1 - np.exp(-rB / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) W2 = ( ((-yLocs * Gamma_bottom) / (2 * np.pi * rB)) * (1 - np.exp(-rB / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) yLocs = y_locations + 0.01 - (coord.x2) zLocs = z_locations + 0.01 + (HH + D / 2) V3 = ( ( ((zLocs * -Gamma_top) / (2 * np.pi * (yLocs ** 2 + zLocs ** 2))) * (1 - np.exp(-(yLocs ** 2 + zLocs ** 2) / (eps ** 2))) + 0.0 ) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) W3 = ( ((-yLocs * -Gamma_top) / (2 * np.pi * (yLocs ** 2 + zLocs ** 2))) * (1 - np.exp(-(yLocs ** 2 + zLocs ** 2) / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) yLocs = y_locations + 0.01 - (coord.x2) zLocs = z_locations + 0.01 + (HH - D / 2) V4 = ( ( ((zLocs * -Gamma_bottom) / (2 * np.pi * (yLocs ** 2 + zLocs ** 2))) * (1 - np.exp(-(yLocs ** 2 + zLocs ** 2) / (eps ** 2))) + 0.0 ) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) W4 = ( ((-yLocs * -Gamma_bottom) / (2 * np.pi * (yLocs ** 2 + zLocs ** 2))) * (1 - np.exp(-(yLocs ** 2 + zLocs ** 2) / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) zC = z_locations + 0.01 - (HH) rC = yLocs ** 2 + zC ** 2 V5 = ( (zC * Gamma_wake_rotation) / (2 * np.pi * rC) * (1 - np.exp(-rC / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) W5 = ( (-yLocs * Gamma_wake_rotation) / (2 * np.pi * rC) * (1 - np.exp(-rC / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) yLocs = y_locations + 0.01 - coord.x2 zLocs = z_locations + 0.01 + HH V6 = ( ( ( (zLocs * -Gamma_wake_rotation) / (2 * np.pi * (yLocs ** 2 + zLocs ** 2)) ) * (1 - np.exp(-(yLocs ** 2 + zLocs ** 2) / (eps ** 2))) + 0.0 ) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) W6 = ( ((-yLocs * -Gamma_wake_rotation) / (2 * np.pi * (yLocs ** 2 + zLocs ** 2))) * (1 - np.exp(-(yLocs ** 2 + zLocs ** 2) / (eps ** 2))) * eps ** 2 / (4 * nu * (x_locations - coord.x1) / Uinf + eps ** 2) ) V = V1 + V2 + V3 + V4 + V5 + V6 W = W1 + W2 + W3 + W4 + W5 + W6 V[ x_locations < coord.x1 - 1 ] = 0.0 W[ x_locations < coord.x1 - 1 ] = 0.0 W[W < 0] = 0 return V, W
This method calculates the V- and W-component velocities using methods developed in [1]. # TODO add reference to 1 Args: coord (:py:obj:`floris.simulation.turbine_map.TurbineMap.coords`): Spatial coordinates of wind turbine. turbine (:py:class:`floris.simulation.turbine.Turbine`): Turbine object. flow_field ([type]): [description] x_locations (np.array): Streamwise locations in wake. y_locations (np.array): Spanwise locations in wake. z_locations (np.array): Vertical locations in wake. Returns: np.array, np.array: - V-component velocity deficits across the flow field. - W-component velocity deficits across the flow field.
https://github.com/nrel/floris/blob/ef4934ec7feb7afd2615772d364a1eaa28db93e9/floris/simulation/wake_velocity/gaussianModels/gaussian_model_base.py#L182-L383
import numpy as np from ....utilities import cosd, sind, tand from ..base_velocity_deficit import VelocityDeficit class GaussianModel(VelocityDeficit): def __init__(self, parameter_dictionary): super().__init__(parameter_dictionary) def yaw_added_turbulence_mixing( self, coord, turbine, flow_field, x_locations, y_locations, z_locations ): if self.use_yaw_added_recovery: V, W = self.calc_VW( coord, turbine, flow_field, x_locations, y_locations, z_locations ) v_prime = flow_field.v + V w_prime = flow_field.w + W u_prime = turbine.u_prime() idx = np.where( (np.abs(x_locations - coord.x1) <= turbine.rotor_diameter / 4) & (np.abs(y_locations - coord.x2) < turbine.rotor_diameter) ) TKE = (1 / 2) * ( u_prime ** 2 + np.mean(v_prime[idx]) ** 2 + np.mean(w_prime[idx]) ** 2 ) TI_total = turbine.TKE_to_TI(TKE) TI_mixing = np.array(TI_total) - turbine.current_turbulence_intensity else: TI_mixing = 0.0 return TI_mixing def calculate_VW( self, V, W, coord, turbine, flow_field, x_locations, y_locations, z_locations ): if self.use_yaw_added_recovery: if not self.calculate_VW_velocities: err_msg = ( "It appears that 'use_yaw_added_recovery' is set " + "to True and 'calculate_VW_velocities' is set to False. " + "This configuration is not valid. Please set " + "'calculate_VW_velocities' to True if you wish to use " + "yaw-added recovery." ) self.logger.error(err_msg, stack_info=True) raise ValueError(err_msg) if self.calculate_VW_velocities: V, W = self.calc_VW( coord, turbine, flow_field, x_locations, y_locations, z_locations ) return V, W def yaw_added_recovery_correction( self, U_local, U, W, x_locations, y_locations, turbine, turbine_coord ): U1 = U_local - U D = turbine.rotor_diameter xLocs = x_locations - turbine_coord.x1 ky = self.ka * turbine.current_turbulence_intensity + self.kb U2 = (np.mean(W) * xLocs) / ((ky * xLocs + D / 2)) U_total = U1 + np.nan_to_num(U2) U = U_local - U_total U[x_locations < turbine_coord.x1] = 0 return U
Apache License 2.0
pelioniot/mbed-cloud-sdk-python
src/mbed_cloud/foundation/entities/accounts/account.py
Account.end_market
python
def end_market(self, value): self._end_market.set(value)
Set value of `end_market` :param value: value to set :type value: str
https://github.com/pelioniot/mbed-cloud-sdk-python/blob/71dc67fc2a8d1aff31e35ec781fb328e6a60639c/src/mbed_cloud/foundation/entities/accounts/account.py#L708-L715
from __future__ import unicode_literals from builtins import str from builtins import super from mbed_cloud.foundation.common.entity_base import Entity from mbed_cloud.foundation.common import fields from mbed_cloud.foundation import enums class Account(Entity): _api_fieldnames = [ "address_line1", "address_line2", "admin_email", "admin_full_name", "admin_id", "admin_key", "admin_name", "admin_password", "aliases", "city", "company", "contact", "contract_number", "country", "created_at", "custom_fields", "customer_number", "display_name", "email", "end_market", "expiration", "expiration_warning_threshold", "id", "idle_timeout", "limits", "mfa_status", "notification_emails", "parent_account", "parent_id", "password_policy", "password_recovery_expiration", "phone_number", "policies", "postal_code", "reason", "reference_note", "sales_contact", "state", "status", "template_id", "tier", "updated_at", "upgraded_at", ] _sdk_fieldnames = _api_fieldnames _renames = {} _renames_to_api = {} def __init__( self, _client=None, address_line1=None, address_line2=None, admin_email=None, admin_full_name=None, admin_id=None, admin_key=None, admin_name=None, admin_password=None, aliases=None, city=None, company=None, contact=None, contract_number=None, country=None, created_at=None, custom_fields=None, customer_number=None, display_name=None, email=None, end_market=None, expiration=None, expiration_warning_threshold=None, id=None, idle_timeout=None, limits=None, mfa_status=None, notification_emails=None, parent_account=None, parent_id=None, password_policy=None, password_recovery_expiration=None, phone_number=None, policies=None, postal_code=None, reason=None, reference_note=None, sales_contact=None, state=None, status=None, template_id=None, tier=None, updated_at=None, upgraded_at=None, ): super().__init__(_client=_client) from mbed_cloud.foundation.entities.accounts.parent_account import ParentAccount from mbed_cloud.foundation.entities.accounts.password_policy import PasswordPolicy from mbed_cloud.foundation.entities.accounts.policy import Policy self._address_line1 = fields.StringField(value=address_line1) self._address_line2 = fields.StringField(value=address_line2) self._admin_email = fields.StringField(value=admin_email) self._admin_full_name = fields.StringField(value=admin_full_name) self._admin_id = fields.StringField(value=admin_id) self._admin_key = fields.StringField(value=admin_key) self._admin_name = fields.StringField(value=admin_name) self._admin_password = fields.StringField(value=admin_password) self._aliases = fields.ListField(value=aliases) self._city = fields.StringField(value=city) self._company = fields.StringField(value=company) self._contact = fields.StringField(value=contact) self._contract_number = fields.StringField(value=contract_number) self._country = fields.StringField(value=country) self._created_at = fields.DateTimeField(value=created_at) self._custom_fields = fields.DictField(value=custom_fields) self._customer_number = fields.StringField(value=customer_number) self._display_name = fields.StringField(value=display_name) self._email = fields.StringField(value=email) self._end_market = fields.StringField(value=end_market) self._expiration = fields.DateTimeField(value=expiration) self._expiration_warning_threshold = fields.IntegerField(value=expiration_warning_threshold) self._id = fields.StringField(value=id) self._idle_timeout = fields.IntegerField(value=idle_timeout) self._limits = fields.DictField(value=limits) self._mfa_status = fields.StringField(value=mfa_status, enum=enums.AccountMfaStatusEnum) self._notification_emails = fields.ListField(value=notification_emails) self._parent_account = fields.DictField(value=parent_account, entity=ParentAccount) self._parent_id = fields.StringField(value=parent_id) self._password_policy = fields.DictField(value=password_policy, entity=PasswordPolicy) self._password_recovery_expiration = fields.IntegerField(value=password_recovery_expiration) self._phone_number = fields.StringField(value=phone_number) self._policies = fields.ListField(value=policies, entity=Policy) self._postal_code = fields.StringField(value=postal_code) self._reason = fields.StringField(value=reason) self._reference_note = fields.StringField(value=reference_note) self._sales_contact = fields.StringField(value=sales_contact) self._state = fields.StringField(value=state) self._status = fields.StringField(value=status, enum=enums.AccountStatusEnum) self._template_id = fields.StringField(value=template_id) self._tier = fields.StringField(value=tier) self._updated_at = fields.DateTimeField(value=updated_at) self._upgraded_at = fields.DateTimeField(value=upgraded_at) @property def address_line1(self): return self._address_line1.value @address_line1.setter def address_line1(self, value): self._address_line1.set(value) @property def address_line2(self): return self._address_line2.value @address_line2.setter def address_line2(self, value): self._address_line2.set(value) @property def admin_email(self): return self._admin_email.value @admin_email.setter def admin_email(self, value): self._admin_email.set(value) @property def admin_full_name(self): return self._admin_full_name.value @admin_full_name.setter def admin_full_name(self, value): self._admin_full_name.set(value) @property def admin_id(self): return self._admin_id.value @property def admin_key(self): return self._admin_key.value @property def admin_name(self): return self._admin_name.value @admin_name.setter def admin_name(self, value): self._admin_name.set(value) @property def admin_password(self): return self._admin_password.value @admin_password.setter def admin_password(self, value): self._admin_password.set(value) @property def aliases(self): return self._aliases.value @aliases.setter def aliases(self, value): self._aliases.set(value) @property def city(self): return self._city.value @city.setter def city(self, value): self._city.set(value) @property def company(self): return self._company.value @company.setter def company(self, value): self._company.set(value) @property def contact(self): return self._contact.value @contact.setter def contact(self, value): self._contact.set(value) @property def contract_number(self): return self._contract_number.value @contract_number.setter def contract_number(self, value): self._contract_number.set(value) @property def country(self): return self._country.value @country.setter def country(self, value): self._country.set(value) @property def created_at(self): return self._created_at.value @property def custom_fields(self): return self._custom_fields.value @custom_fields.setter def custom_fields(self, value): self._custom_fields.set(value) @property def customer_number(self): return self._customer_number.value @customer_number.setter def customer_number(self, value): self._customer_number.set(value) @property def display_name(self): return self._display_name.value @display_name.setter def display_name(self, value): self._display_name.set(value) @property def email(self): return self._email.value @email.setter def email(self, value): self._email.set(value) @property def end_market(self): return self._end_market.value @end_market.setter
Apache License 2.0
derfies/panda3d-editor
src/pandaEditor/nodes/tests/testmixin.py
TestMixin.tearDown
python
def tearDown(self): for name in ('cam', 'camera', 'cam2d', 'camera2d'): np = getattr(self.base, name) np.removeNode() setattr(self.base, name, None) self.base.setupRender() render2d = pc.NodePath('render2d') aspect2d = render2d.attachNewNode(pc.PGTop('aspect2d')) ShowBaseGlobal.render2d = render2d ShowBaseGlobal.aspect2d = aspect2d self.base.setupRender2d() __builtins__['render'] = self.base.render __builtins__['render2d'] = self.base.render2d __builtins__['aspect2d'] = self.base.aspect2d __builtins__['pixel2d'] = self.base.pixel2d self.base.makeCamera(self.base.win) self.base.makeCamera2d(self.base.win) __builtins__['camera'] = self.base.camera self.base.render.setShaderAuto()
Remove all default nodes and recreate them.
https://github.com/derfies/panda3d-editor/blob/a50939bd4bfa5c22d27a9ddee090717e8d95f404/src/pandaEditor/nodes/tests/testmixin.py#L18-L50
import panda3d.core as pc from direct.showbase import ShowBaseGlobal from direct.showbase.PythonUtil import getBase as get_base from pandaEditor.scene import Scene from pandaEditor.game.showbase import ShowBase class TestMixin: def setUp(self): try: self.base = get_base() except NameError: self.base = ShowBase() self.base.scene = Scene()
MIT License
hpe-container-platform-community/hpecp-python-library
hpecp/cli/k8scluster.py
K8sClusterProxy.k8smanifest
python
def k8smanifest(self): response = base.get_client().k8s_cluster.k8smanifest() print( yaml.dump( yaml.load( json.dumps(response), Loader=yaml.FullLoader, ) ) )
Retrieve the k8smanifest.
https://github.com/hpe-container-platform-community/hpecp-python-library/blob/625fb25c99698a2203b394ef39a253e2b4f0d7c9/hpecp/cli/k8scluster.py#L275-L285
from __future__ import print_function import base64 import json import six import sys import yaml from textwrap import dedent from hpecp.k8s_cluster import ( K8sCluster, K8sClusterStatus, K8sClusterHostConfig, ) from hpecp.cli import base class K8sClusterProxy(base.BaseProxy): def __dir__(self): return [ "add_addons", "admin_kube_config", "create", "dashboard_url", "dashboard_token", "delete", "examples", "get", "get_available_addons", "get_installed_addons", "import_cluster", "import_generic_cluster", "import_generic_cluster_with_json", "k8smanifest", "k8s_supported_versions", "list", "statuses", "upgrade_cluster", "wait_for_status", ] def __init__(self): super(K8sClusterProxy, self).new_instance("k8s_cluster", K8sCluster) @base.intercept_exception def create( self, name, k8shosts_config, description=None, k8s_version=None, pod_network_range="10.192.0.0/12", service_network_range="10.96.0.0/12", pod_dns_domain="cluster.local", persistent_storage_local=False, persistent_storage_nimble_csi=False, addons=[], external_identity_server={}, ext_id_svr_bind_pwd=None, ext_id_svr_user_attribute=None, ext_id_svr_bind_type=None, ext_id_svr_bind_dn=None, ext_id_svr_host=None, ext_id_svr_group_attribute=None, ext_id_svr_security_protocol=None, ext_id_svr_base_dn=None, ext_id_svr_verify_peer=None, ext_id_svr_type=None, ext_id_svr_port=None, external_groups=[], datafabric=False, datafabric_name=None, ): host_config = [ K8sClusterHostConfig.create_from_list(h.split(":")) for h in k8shosts_config.split(",") ] if external_identity_server: if not isinstance(external_identity_server, dict): print( ( "Could not parse 'external_identity_server' parameter" " - is it valid json?\n" "Received: " + external_identity_server + "\n" ), file=sys.stderr, ) sys.exit(1) else: external_identity_server = {} if ext_id_svr_bind_pwd is not None: external_identity_server["bind_pwd"] = ext_id_svr_bind_pwd if ext_id_svr_user_attribute is not None: external_identity_server[ "user_attribute" ] = ext_id_svr_user_attribute if ext_id_svr_bind_type is not None: external_identity_server["bind_type"] = ext_id_svr_bind_type if ext_id_svr_bind_dn is not None: external_identity_server["bind_dn"] = ext_id_svr_bind_dn if ext_id_svr_host is not None: external_identity_server["host"] = ext_id_svr_host if ext_id_svr_group_attribute is not None: external_identity_server[ "group_attribute" ] = ext_id_svr_group_attribute if ext_id_svr_security_protocol is not None: external_identity_server[ "security_protocol" ] = ext_id_svr_security_protocol if ext_id_svr_base_dn is not None: external_identity_server["base_dn"] = ext_id_svr_base_dn if ext_id_svr_verify_peer is not None: external_identity_server["verify_peer"] = json.loads( ext_id_svr_verify_peer.lower() ) if ext_id_svr_type is not None: external_identity_server["type"] = ext_id_svr_type if ext_id_svr_port is not None: external_identity_server["port"] = int(ext_id_svr_port) print( base.get_client().k8s_cluster.create( name=name, description=description, k8s_version=k8s_version, pod_network_range=pod_network_range, service_network_range=service_network_range, pod_dns_domain=pod_dns_domain, persistent_storage_local=persistent_storage_local, persistent_storage_nimble_csi=persistent_storage_nimble_csi, k8shosts_config=host_config, addons=addons, external_identity_server=external_identity_server, external_groups=external_groups, datafabric=datafabric, datafabric_name=datafabric_name, ) ) def admin_kube_config(self, id): print( base.get_client() .k8s_cluster.get(id) .admin_kube_config.replace( "\\n", "\n", ) ) def dashboard_url( self, id, ): url = ( base.get_client().k8s_cluster.get(id=id).dashboard_endpoint_access ) print(url) def dashboard_token( self, id, ): token = base.get_client().k8s_cluster.get(id=id).dashboard_token if six.PY2: print(base64.b64decode(token.encode())) else: print(base64.b64decode(token.encode()).decode("utf-8")) def examples(self): print( dedent( """\ # retrieve id of k8s cluster with name 'c1' $ hpecp k8scluster list --query "[?label.name == 'c1'] | [0] | [_links.self.href]" --output text /api/v2/k8scluster/1 """ ) ) @base.intercept_exception
MIT License
ucb-art/bag_framework
bag/math/__init__.py
si_string_to_float
python
def si_string_to_float(si_str): if si_str[-1] in si_pre: idx = si_pre.index(si_str[-1]) return float(si_str[:-1]) * 10**si_mag[idx] else: return float(si_str)
Converts the given string with SI prefix to float. Parameters ---------- si_str : str the string to convert Returns ------- ans : float the floating point value of the given string.
https://github.com/ucb-art/bag_framework/blob/8efa57ad719b2b02a005e234d87ad6f0e5e7a3de/bag/math/__init__.py#L48-L65
from typing import Iterable import numpy as np from . import interpolate __all__ = ['lcm', 'gcd', 'interpolate', 'float_to_si_string', 'si_string_to_float'] si_mag = [-18, -15, -12, -9, -6, -3, 0, 3, 6, 9, 12] si_pre = ['a', 'f', 'p', 'n', 'u', 'm', '', 'k', 'M', 'G', 'T'] def float_to_si_string(num, precision=6): if abs(num) < 1e-21: return '0' exp = np.log10(abs(num)) pre_idx = len(si_mag) - 1 for idx in range(len(si_mag)): if exp < si_mag[idx]: pre_idx = idx - 1 break fmt = '%%.%dg%%s' % precision res = 10.0 ** (si_mag[pre_idx]) return fmt % (num / res, si_pre[pre_idx])
BSD 3-Clause New or Revised License
edx/edx-lint
edx_lint/pylint/annotations_check.py
FeatureToggleAnnotationChecker.check_annotation_group
python
def check_annotation_group(self, search, annotations, node): if not annotations: return target_removal_date = None temporary_use_case = False toggle_name = "" toggle_description = "" toggle_default = None line_number = None for annotation in annotations: if line_number is None: line_number = annotation["line_number"] self.current_module_annotation_group_line_numbers.append(line_number) if annotation["annotation_token"] == ".. toggle_name:": toggle_name = annotation["annotation_data"] self.current_module_annotated_toggle_names.add(toggle_name) elif annotation["annotation_token"] == ".. toggle_description:": toggle_description = annotation["annotation_data"].strip() elif annotation["annotation_token"] == ".. toggle_use_cases:": if "temporary" in annotation["annotation_data"]: temporary_use_case = True elif annotation["annotation_token"] == ".. toggle_target_removal_date:": target_removal_date = annotation["annotation_data"] elif annotation["annotation_token"] == ".. toggle_default:": toggle_default = annotation["annotation_data"] if not toggle_name: self.add_message( self.NO_NAME_MESSAGE_ID, node=node, line=line_number, ) if not toggle_description: self.add_message( self.EMPTY_DESCRIPTION_MESSAGE_ID, args=(toggle_name,), node=node, line=line_number, ) if temporary_use_case and not target_removal_date: self.add_message( self.MISSING_TARGET_REMOVAL_DATE_MESSAGE_ID, args=(toggle_name,), node=node, line=line_number, ) if toggle_default not in ["True", "False"]: self.add_message( self.NON_BOOLEAN_DEFAULT_VALUE, args=(toggle_name,), node=node, line=line_number, )
Perform checks on a single annotation group.
https://github.com/edx/edx-lint/blob/aad020dcf166f16d5bd7288cd837fb7f60ca269f/edx_lint/pylint/annotations_check.py#L417-L473
import os import re import pkg_resources from astroid.node_classes import Const, Name from code_annotations import annotation_errors from code_annotations.base import AnnotationConfig from code_annotations.find_static import StaticSearch from pylint.checkers import BaseChecker, utils from pylint.interfaces import IAstroidChecker from .common import BASE_ID, check_visitors def register_checkers(linter): linter.register_checker(FeatureToggleChecker(linter)) linter.register_checker(CodeAnnotationChecker(linter)) linter.register_checker(FeatureToggleAnnotationChecker(linter)) linter.register_checker(SettingAnnotationChecker(linter)) def check_all_messages(msgs): def store_messages(func): func.checks_msgs = [message[1] for message in msgs] return func return store_messages class AnnotationLines: _ANNOTATION_REGEX = re.compile(r"[\s]*#[\s]*\.\.[\s]*(toggle)") def __init__(self, module_node): module_as_binary = module_node.stream().read() file_encoding = module_node.file_encoding if file_encoding is None: file_encoding = "UTF-8" module_as_string = module_as_binary.decode(file_encoding) self._list_of_string_lines = module_as_string.split("\n") def is_line_annotated(self, line_number): if line_number < 1 or self._line_count() < line_number: return False return bool(self._ANNOTATION_REGEX.match(self._get_line_contents(line_number))) def _line_count(self): return len(self._list_of_string_lines) def _get_line_contents(self, line_number): return self._list_of_string_lines[line_number - 1] @check_visitors class FeatureToggleChecker(BaseChecker): __implements__ = (IAstroidChecker,) name = "feature-toggle-checker" TOGGLE_NOT_ANNOTATED_MESSAGE_ID = "feature-toggle-needs-doc" ILLEGAL_WAFFLE_MESSAGE_ID = "illegal-waffle-usage" _CHECK_CAPITAL_REGEX = re.compile(r"[A-Z]") _WAFFLE_TOGGLE_CLASSES = ("WaffleFlag", "WaffleSwitch", "CourseWaffleFlag") _ILLEGAL_WAFFLE_FUNCTIONS = ["flag_is_active", "switch_is_active"] msgs = { ("E%d40" % BASE_ID): ( "feature toggle (%s) is missing annotation", TOGGLE_NOT_ANNOTATED_MESSAGE_ID, "feature toggle is missing annotation", ), ("E%d41" % BASE_ID): ( "illegal waffle usage with (%s): use utility classes {}.".format( ", ".join(_WAFFLE_TOGGLE_CLASSES) ), ILLEGAL_WAFFLE_MESSAGE_ID, "illegal waffle usage: use utility classes {}.".format( ", ".join(_WAFFLE_TOGGLE_CLASSES) ), ), } def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._lines = None def visit_module(self, node): self._lines = AnnotationLines(node) def check_waffle_class_annotated(self, node): if not hasattr(node.func, "name"): return starts_with_capital = self._CHECK_CAPITAL_REGEX.match(node.func.name) if not starts_with_capital: return if not node.func.name.endswith(self._WAFFLE_TOGGLE_CLASSES): return if not self._lines.is_line_annotated(node.lineno - 1): feature_toggle_name = "UNKNOWN" if node.keywords is not None: for node_key in node.keywords: if node_key.arg == "flag_name": feature_toggle_name = node_key.value.value if feature_toggle_name == "UNKNOWN": if len(node.args) >= 2: feature_toggle_name = node.args[1].as_string() self.add_message( self.TOGGLE_NOT_ANNOTATED_MESSAGE_ID, args=(feature_toggle_name,), node=node, ) def check_configuration_model_annotated(self, node): if "ConfigurationModel" not in node.basenames: return if not self._lines.is_line_annotated(node.lineno - 1): config_model_subclass_name = node.name self.add_message( self.TOGGLE_NOT_ANNOTATED_MESSAGE_ID, args=(config_model_subclass_name,), node=node, ) def check_django_feature_flag_annotated(self, node): try: parent_target_name = node.parent.targets[0].name except AttributeError: return if parent_target_name == "FEATURES": for key, _ in node.items: if not self._lines.is_line_annotated(key.lineno - 1): django_feature_toggle_name = key.value self.add_message( self.TOGGLE_NOT_ANNOTATED_MESSAGE_ID, args=(django_feature_toggle_name,), node=node, ) def check_illegal_waffle_usage(self, node): if not hasattr(node.func, "name"): return if node.func.name in self._ILLEGAL_WAFFLE_FUNCTIONS: feature_toggle_name = "UNKNOWN" if len(node.args) >= 1: feature_toggle_name = node.args[0].as_string() self.add_message( self.ILLEGAL_WAFFLE_MESSAGE_ID, args=(feature_toggle_name,), node=node ) @utils.check_messages(TOGGLE_NOT_ANNOTATED_MESSAGE_ID, ILLEGAL_WAFFLE_MESSAGE_ID) def visit_call(self, node): self.check_waffle_class_annotated(node) self.check_illegal_waffle_usage(node) @utils.check_messages(TOGGLE_NOT_ANNOTATED_MESSAGE_ID) def visit_classdef(self, node): self.check_configuration_model_annotated(node) @utils.check_messages(TOGGLE_NOT_ANNOTATED_MESSAGE_ID) def visit_dict(self, node): self.check_django_feature_flag_annotated(node) @check_visitors class AnnotationBaseChecker(BaseChecker): CONFIG_FILENAMES = [] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.config_search = [] for config_filename in self.CONFIG_FILENAMES: config_path = pkg_resources.resource_filename( "code_annotations", os.path.join("contrib", "config", config_filename), ) config = AnnotationConfig(config_path, verbosity=-1) search = StaticSearch(config) self.config_search.append((config, search)) self.current_module_annotations = [] def check_module(self, node): for config, search in self.config_search: config.source_path = node.path[0] all_results = search.search() for _file_name, results in all_results.items(): for annotations_group in search.iter_groups(results): self.current_module_annotations.append(annotations_group) self.check_annotation_group(search, annotations_group, node) def leave_module(self, _node): self.current_module_annotations.clear() def check_annotation_group(self, search, annotations, node): raise NotImplementedError class CodeAnnotationChecker(AnnotationBaseChecker): CONFIG_FILENAMES = ["feature_toggle_annotations.yaml", "setting_annotations.yaml"] __implements__ = (IAstroidChecker,) name = "code-annotations" msgs = { ("E%d%d" % (BASE_ID, index + 50)): ( error_type.message, error_type.symbol, error_type.description, ) for index, error_type in enumerate(annotation_errors.TYPES) } @check_all_messages(msgs) def visit_module(self, node): self.check_module(node) def check_annotation_group(self, search, annotations, node): search.check_group(annotations) for (annotation, AnnotationErrorType, args) in search.annotation_errors: self.add_message( AnnotationErrorType.symbol, args=args, node=node, line=annotation["line_number"], ) search.annotation_errors.clear() class FeatureToggleAnnotationChecker(AnnotationBaseChecker): CONFIG_FILENAMES = ["feature_toggle_annotations.yaml"] __implements__ = (IAstroidChecker,) name = "toggle-annotations" NO_NAME_MESSAGE_ID = "toggle-no-name" EMPTY_DESCRIPTION_MESSAGE_ID = "toggle-empty-description" MISSING_TARGET_REMOVAL_DATE_MESSAGE_ID = "toggle-missing-target-removal-date" NON_BOOLEAN_DEFAULT_VALUE = "toggle-non-boolean-default-value" MISSING_ANNOTATION = "toggle-missing-annotation" INVALID_DJANGO_WAFFLE_IMPORT = "invalid-django-waffle-import" msgs = { ("E%d60" % BASE_ID): ( "feature toggle has no name", NO_NAME_MESSAGE_ID, "Feature toggle name must be present and be the first annotation", ), ("E%d61" % BASE_ID): ( "feature toggle (%s) does not have a description", EMPTY_DESCRIPTION_MESSAGE_ID, "Feature toggles must include a thorough description", ), ("E%d62" % BASE_ID): ( "temporary feature toggle (%s) has no target removal date", MISSING_TARGET_REMOVAL_DATE_MESSAGE_ID, "Temporary feature toggles must include a target removal date", ), ("E%d63" % BASE_ID): ( "feature toggle (%s) default value must be boolean ('True' or 'False')", NON_BOOLEAN_DEFAULT_VALUE, "Feature toggle default values must be boolean", ), ("E%d64" % BASE_ID): ( "missing feature toggle annotation", MISSING_ANNOTATION, ( "When a WaffleFlag/Switch object is created, a corresponding annotation must be present above in the" " same module and with a matching name", ) ), ("E%d65" % BASE_ID): ( "invalid Django Waffle import", INVALID_DJANGO_WAFFLE_IMPORT, ( "Do not directly access Django Waffle objects and methods. Instead, import from" " edx_toggles.toggles.", ) ), } LEGACY_TOGGLE_FUNC_NAMES = ["LegacyWaffleFlag", "LegacyWaffleSwitch"] TOGGLE_FUNC_NAMES = [ "WaffleFlag", "NonNamespacedWaffleFlag", "WaffleSwitch", "NonNamespacedWaffleSwitch", "CourseWaffleFlag", "ExperimentWaffleFlag", ] + LEGACY_TOGGLE_FUNC_NAMES def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.current_module_annotated_toggle_names = set() self.current_module_annotation_group_line_numbers = [] @check_all_messages(msgs) def visit_module(self, node): self.check_module(node) def leave_module(self, _node): self.current_module_annotated_toggle_names.clear() self.current_module_annotation_group_line_numbers.clear()
Apache License 2.0
facelessuser/rummage
rummage/lib/gui/dialogs/settings_dialog.py
SettingsDialog.on_editor_change
python
def on_editor_change(self, event): self.editor = self.m_editor_text.GetValue() Settings.set_editor(self.editor) self.m_editor_text.SetValue(self.editor)
Show editor dialog and update setting on return.
https://github.com/facelessuser/rummage/blob/17970fc3e4dfae23842782fedfae9d5c8ee3266e/rummage/lib/gui/dialogs/settings_dialog.py#L455-L460
import wx import re import json import functools from ..settings import Settings from .file_ext_dialog import FileExtDialog from .generic_dialogs import errormsg from ..localization import _ from .. import gui from .. import notify from ..notify.util import which from .. import data from ... import rumcore from .. import util from ..controls import webview EDITOR_HELP = _(""" Enter in the appropriate command to open files in your editor. Double quote paths with spaces and parameters that *may* contain spaces after substitution. Use the following variables for parameter substitution: Variable | Description --------- | ----------- `{$file}` | Insert the file path. `{$line}` | Insert the line number. `{$col}` | Insert the line column. `{$col0}` | Insert the line column offset by one so the first column is zero instead of one. !!! example "Example" ```bash "C:\\Program Files\\Sublime Text 3\\subl.exe" "{$file}:{$line}:{$col}" ``` """) BACKUP_VALIDATOR = re.compile(r'^[-_a-zA-Z\d.]+$') MINIMUM_COL_SIZE = 100 COLUMN_SAMPLE_SIZE = 100 class SettingsDialog(webview.WebViewMixin, gui.SettingsDialog): def __init__(self, parent): super().__init__(parent) self.setup_html(self.m_help_html) if util.platform() == "windows": self.m_general_panel.SetDoubleBuffered(True) self.m_search_panel.SetDoubleBuffered(True) self.m_editor_panel.SetDoubleBuffered(True) self.m_notify_panel.SetDoubleBuffered(True) self.m_history_panel.SetDoubleBuffered(True) self.m_backup_panel.SetDoubleBuffered(True) self.m_encoding_list.Bind(wx.EVT_LEFT_DCLICK, self.on_dclick) self.alert_player = False self.update_tab_page_colors() self.localize() self.set_keybindings( [(wx.ACCEL_CMD if util.platform() == "macos" else wx.ACCEL_CTRL, ord('A'), self.on_textctrl_selectall)] ) self.Bind(wx.EVT_SYS_COLOUR_CHANGED, self.on_color_change) self.history_types = [ "target", "regex_search", "literal_search", "regex_folder_exclude", "folder_exclude", "regex_file_search", "file_search", "replace_plugin" ] history_records = Settings.get_history_record_count(self.history_types) self.history_records_cleared = False self.editor = Settings.get_editor() if isinstance(self.editor, (tuple, list)): self.m_editor_text.SetValue(" ".join(self.editor) if len(self.editor) != 0 else "") else: self.m_editor_text.SetValue(self.editor if self.editor else "") self.m_single_checkbox.SetValue(Settings.get_single_instance()) self.m_time_output_checkbox.SetValue(Settings.get_international_time()) self.m_history_label.SetLabel(self.RECORDS % history_records) self.m_cache_textbox.SetValue(self.get_history()) self.m_history_clear_button.Enable(history_records > 0) mode = Settings.get_regex_mode() self.m_regex_radio.SetValue(mode in rumcore.REGEX_MODES) self.m_re_radio.SetValue(mode in rumcore.RE_MODES) self.m_regex_ver_choice.SetSelection(Settings.get_regex_version()) self.m_backrefs_checkbox.SetValue(mode in rumcore.BACKREFS_MODES) if Settings.is_regex_available(): self.m_regex_radio.Enable(True) self.m_regex_ver_choice.Enable(True) self.m_extmatch_checkbox.SetValue(Settings.get_extmatch()) self.m_brace_checkbox.SetValue(Settings.get_brace_expansion()) self.m_case_checkbox.SetValue(Settings.get_file_case_sensitive()) self.m_fullpath_checkbox.SetValue(Settings.get_full_exclude_path()) self.m_fullfile_checkbox.SetValue(Settings.get_full_file_path()) self.m_globstar_checkbox.SetValue(Settings.get_globstar()) self.m_matchbase_checkbox.SetValue(Settings.get_matchbase()) self.m_minusnegate_checkbox.SetValue(Settings.get_minusnegate()) self.m_visual_alert_checkbox.SetValue(Settings.get_notify()) self.m_audio_alert_checkbox.SetValue(Settings.get_alert()) self.alert_methods = Settings.get_platform_notify() self.m_notify_choice.Clear() for a in self.alert_methods: self.m_notify_choice.Append(a) self.m_notify_choice.SetStringSelection(Settings.get_notify_method()) self.m_lang_choice.Clear() for l in Settings.get_languages(): self.m_lang_choice.Append(l) locale = Settings.get_language() if locale is None: locale = "en_US" self.m_lang_choice.SetStringSelection(locale) self.m_term_note_picker.SetPath(Settings.get_term_notifier()) if util.platform() != "macos": self.m_term_note_label.Hide() self.m_term_note_picker.Hide() else: is_native = Settings.get_notify_method() == "default" self.m_term_note_label.Enable(is_native) self.m_term_note_picker.Enable(is_native) self.pattern_limit = str(Settings.get_pattern_limit()) self.backup_ext = Settings.get_backup_ext() self.backup_folder = Settings.get_backup_folder() self.m_pattern_limit_textbox.SetValue(self.pattern_limit) self.m_back_ext_textbox.SetValue(self.backup_ext) self.m_back_folder_textbox.SetValue(self.backup_folder) self.m_back2folder_checkbox.SetValue(bool(Settings.get_backup_type())) self.m_back_ext_button.Enable(False) self.m_back_folder_button.Enable(False) self.m_pattern_limit_button.Enable(False) self.m_update_checkbox.SetValue(bool(Settings.get_check_updates())) self.m_prerelease_checkbox.SetValue(bool(Settings.get_prerelease())) self.m_alt_row_checkbox.SetValue(bool(Settings.get_alt_list_color())) alert_choices = Settings.get_available_players() for x in alert_choices: self.m_sound_player_choice.Append(x) index = alert_choices.index(Settings.get_notify_player()) self.m_sound_player_choice.SetSelection(index) self.m_sound_picker.SetPath(Settings.get_notify_sound()) self.refresh_localization() self.finalize_size() self.call_later = wx.CallLater(100, functools.partial(self.on_loaded, resize=util.platform() == "linux")) self.call_later.Start() def on_color_change(self, event): self.update_tab_page_colors() if event: event.Skip() def update_tab_page_colors(self): for x in range(self.m_settings_notebook.GetPageCount()): page = self.m_settings_notebook.GetPage(x) if util.platform() == "linux": self.m_settings_notebook.SetBackgroundColour(wx.NullColour) bg = self.m_settings_notebook.GetBackgroundColour() else: page.SetBackgroundColour(wx.NullColour) bg = page.GetBackgroundColour() if util.platform() == "macos" and not util.MAC_OLD: factor = util.MAC_LIGHT if data.RGBA(util.to_rgb(bg.GetRGB())).get_luminance() > 127 else util.MAC_DARK bg = bg.ChangeLightness(factor) page.SetBackgroundColour(bg) if x == 0 and util.platform() != "linux": self.m_settings_notebook.SetBackgroundColour(wx.Colour(bg)) def on_loaded(self, resize=False, **kwargs): self.call_later.Stop() if resize: self.finalize_size() def finalize_size(self): self.m_general_panel.Fit() self.m_search_panel.Fit() self.m_encoding_panel.Fit() self.m_editor_panel.Fit() self.m_notify_panel.Fit() self.m_history_panel.Fit() self.m_backup_panel.Fit() self.m_settings_notebook.Fit() self.m_settings_panel.Fit() self.Fit() max_width = self.GetParent().GetMinSize()[0] min_width = 600 if self.GetSize()[0] < min_width: self.SetSize(wx.Size(min_width, self.GetSize()[1])) elif self.GetSize()[0] > max_width: self.SetSize(wx.Size(max_width, self.GetSize()[1])) self.SetMinSize(wx.Size(min_width, self.GetSize()[1])) self.Centre() self.alert_player = True def localize(self): self.TITLE = _("Preferences") self.GENERAL_TAB = _("General") self.SEARCH_TAB = _("Search") self.EDITOR_TAB = _("Editor") self.NOTIFICATIONS_TAB = _("Notifications") self.HISTORY_TAB = _("History") self.SINGLE_INSTANCE = _("Single Instance (applies to new instances)") self.INTERNATIONAL_TIME = _("International time format for file results") self.NOTIFY_TEST_TITLE = _("Rummage Test") self.NOTIFY_TEST_MSG = _("Test complete!") self.NOTIFY_POPUP = _("Notification popup") self.ALERT = _("Alert Sound") self.TERM_NOTIFY_PATH = _("Path to terminal-notifier") self.TEST = _("Test") self.LANGUAGE = _("Language (restart required)") self.EXTMATCH = _("Extended match") self.BRACES = _("Brace expansion") self.CASE = _("Case sensitive") self.FULL_PATH = _("Full path directory match") self.FULL_FILE = _("Full path file match") self.GLOBSTAR = _("Globstar (full path)") self.MATCHBASE = _("Match base (full path)") self.MINUSNEGATE = _("Exclude with '-' (instead of '!')") self.REGEX_GROUP = _("Regular Expressions") self.PATTERN_LIMIT = _("Pattern Limit") self.FILE_MATCH_GROUP = _("File/Folder Matching") self.RE = _("Use Re module") self.REGEX = _("Use Regex module") self.BACKREFS = _("Enable Backrefs") self.CLEAR = _("Clear") self.CLOSE = _("Close") self.SAVE = _("Save") self.RECORDS = _("%d Records") self.BACK_EXT = _("Backup extension") self.BACK_FOLDER = _("Backup folder") self.BACK_2_FOLDER = _("Backup to folder") self.ERR_INVALID_EXT = _( "Invalid extension! Please enter a valid extension.\n\n" "Extensions must be alphanumeric and can contain\n" "hypens, underscores, and dots." ) self.ERR_INVALID_FOLDER = _( "Invalid folder! Please enter a valid folder.\n\n" "Folders must be alphanumeric and can contain\n" "hypens, underscores, and dots." ) self.ERR_PLAYER = _( "Can't find the player '{}'!" ) self.ERR_INVALID_PATTERN_LIMIT = _( "'{}' is not a valid number" ) self.CHECK_UPDATES = _("Check updates daily") self.PRERELEASES = _("Include pre-releases") self.CHECK_NOW = _("Check now") self.ENCODING = _("Encoding") self.CHARDET_CHOICE = [ _("Fastest"), _("chardet (pure python)"), _("cchardet (C)") ] self.ALERT_PLAYER = _("Alert player") self.SPECIAL = _("Special file types:") self.EDITOR_HELP = EDITOR_HELP self.ALT_ROW_COLOR = _("Show alternate row colors in lists") def refresh_localization(self): self.SetTitle(self.TITLE) self.m_settings_notebook.SetPageText(0, self.GENERAL_TAB) self.m_settings_notebook.SetPageText(1, self.SEARCH_TAB) self.m_settings_notebook.SetPageText(2, self.ENCODING) self.m_settings_notebook.SetPageText(3, self.EDITOR_TAB) self.m_settings_notebook.SetPageText(4, self.NOTIFICATIONS_TAB) self.m_settings_notebook.SetPageText(5, self.HISTORY_TAB) self.m_search_panel.GetSizer().GetItem(0).GetSizer().GetStaticBox().SetLabel(self.REGEX_GROUP) self.m_search_panel.GetSizer().GetItem(1).GetSizer().GetStaticBox().SetLabel(self.FILE_MATCH_GROUP) self.m_single_checkbox.SetLabel(self.SINGLE_INSTANCE) self.m_time_output_checkbox.SetLabel(self.INTERNATIONAL_TIME) self.m_visual_alert_checkbox.SetLabel(self.NOTIFY_POPUP) self.m_audio_alert_checkbox.SetLabel(self.ALERT) self.m_sound_player_label.SetLabel(self.ALERT_PLAYER) self.m_term_note_label.SetLabel(self.TERM_NOTIFY_PATH) self.m_notify_test_button.SetLabel(self.TEST) self.m_language_label.SetLabel(self.LANGUAGE) self.m_re_radio.SetLabel(self.RE) self.m_regex_radio.SetLabel(self.REGEX) self.m_backrefs_checkbox.SetLabel(self.BACKREFS) self.m_extmatch_checkbox.SetLabel(self.EXTMATCH) self.m_brace_checkbox.SetLabel(self.BRACES) self.m_case_checkbox.SetLabel(self.CASE) self.m_fullpath_checkbox.SetLabel(self.FULL_PATH) self.m_fullfile_checkbox.SetLabel(self.FULL_FILE) self.m_globstar_checkbox.SetLabel(self.GLOBSTAR) self.m_matchbase_checkbox.SetLabel(self.MATCHBASE) self.m_minusnegate_checkbox.SetLabel(self.MINUSNEGATE) self.m_pattern_limit_label.SetLabel(self.PATTERN_LIMIT) self.m_editor_button.SetLabel(self.SAVE) self.m_history_clear_button.SetLabel(self.CLEAR) self.m_back_ext_label.SetLabel(self.BACK_EXT) self.m_back_folder_label.SetLabel(self.BACK_FOLDER) self.m_back2folder_checkbox.SetLabel(self.BACK_2_FOLDER) self.m_close_button.SetLabel(self.CLOSE) self.m_back_ext_button.SetLabel(self.SAVE) self.m_back_folder_button.SetLabel(self.SAVE) self.m_update_checkbox.SetLabel(self.CHECK_UPDATES) self.m_prerelease_checkbox.SetLabel(self.PRERELEASES) self.m_check_update_button.SetLabel(self.CHECK_NOW) self.m_filetype_label.SetLabel(self.SPECIAL) self.m_alt_row_checkbox.SetLabel(self.ALT_ROW_COLOR) self.load_help(self.EDITOR_HELP) encoding = Settings.get_chardet_mode() cchardet_available = Settings.is_cchardet_available() options = self.CHARDET_CHOICE if cchardet_available else self.CHARDET_CHOICE[:1] for x in options: self.m_encoding_choice.Append(x) self.m_encoding_choice.SetSelection(encoding) self.reload_list() self.Fit() def load_help(self, text): self.load_html(self.m_help_html, text, 'Editor Settings', webview.MARKDOWN_STRING) def set_keybindings(self, keybindings): tbl = [] for binding in keybindings: keyid = wx.NewId() self.Bind(wx.EVT_MENU, binding[2], id=keyid) tbl.append((binding[0], binding[1], keyid)) if len(keybindings): self.SetAcceleratorTable(wx.AcceleratorTable(tbl)) def on_textctrl_selectall(self, event): text = self.FindFocus() if isinstance(text, wx.TextCtrl): text.SelectAll() event.Skip() def reload_list(self): self.m_encoding_list.reset_list() encoding_ext = Settings.get_encoding_ext() keys = sorted(encoding_ext.keys()) for key in keys: self.m_encoding_list.set_item_map(key, key, ', '.join(encoding_ext[key])) self.m_encoding_list.load_list(True) def get_history(self): return json.dumps( Settings.get_history(self.history_types), sort_keys=True, indent=4, separators=(',', ': ') ) + '\n' def history_cleared(self): return self.history_records_cleared def on_chardet(self, event): Settings.set_chardet_mode(self.m_encoding_choice.GetCurrentSelection()) def on_check(self, event): self.GetParent().on_check_update(event) def on_pattern_limit_click(self, event): try: value = int(self.m_pattern_limit_textbox.GetValue()) except Exception: value = '' if isinstance(value, int): Settings.set_pattern_limit(value) self.pattern_limit = self.m_pattern_limit_textbox.GetValue() self.m_pattern_limit_button.Enable(False) else: errormsg(self.ERR_INVALID_PATTERN_LIMIT.format(self.m_pattern_limit_textbox.GetValue())) Settings.set_pattern_limit(self.m_pattern_limit_textbox.GetValue()) def on_pattern_limit_changed(self, event): self.m_pattern_limit_button.Enable(self.m_pattern_limit_textbox.GetValue() != self.pattern_limit) def on_editor_changed(self, event): self.m_editor_button.Enable(self.m_editor_text.GetValue() != self.editor)
MIT License
ulikoehler/uliengineering
UliEngineering/Electronics/Resistors.py
power_dissipated_in_resistor_by_voltage
python
def power_dissipated_in_resistor_by_voltage(resistor, voltage) -> Unit("W"): resistor = normalize_numeric(resistor) voltage = normalize_numeric(voltage) current = current_through_resistor(resistor, voltage) return np.abs(current * voltage)
Compute the power that is dissipated in a resistor using P=VI given its resistance and the current flowing through it Parameters ---------- resistor : float or Engineer string The resistor in Ohms current : float or Engineer string The current flowing through the resistor
https://github.com/ulikoehler/uliengineering/blob/58eadc8a848854b154a4be20ee2d2d1c614d802f/UliEngineering/Electronics/Resistors.py#L91-L107
import itertools import numpy as np from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit __all__ = ["e96", "e48", "e24", "e12", "resistor_range", "standard_resistors", "parallel_resistors", "current_through_resistor", "power_dissipated_in_resistor_by_current", "power_dissipated_in_resistor_by_voltage", "voltage_across_resistor", "series_resistors", "nearest_resistor", "resistor_by_voltage_and_current"] e96 = np.asarray([ 1.00, 1.02, 1.05, 1.07, 1.10, 1.13, 1.15, 1.18, 1.21, 1.24, 1.27, 1.30, 1.33, 1.37, 1.40, 1.43, 1.47, 1.50, 1.54, 1.58, 1.62, 1.65, 1.69, 1.74, 1.78, 1.82, 1.87, 1.91, 1.96, 2.00, 2.05, 2.10, 2.15, 2.21, 2.26, 2.32, 2.37, 2.43, 2.49, 2.55, 2.61, 2.67, 2.74, 2.80, 2.87, 2.94, 3.01, 3.09, 3.16, 3.24, 3.32, 3.40, 3.48, 3.57, 3.65, 3.74, 3.83, 3.92, 4.02, 4.12, 4.22, 4.32, 4.42, 4.53, 4.64, 4.75, 4.87, 4.99, 5.11, 5.23, 5.36, 5.49, 5.62, 5.76, 5.90, 6.04, 6.19, 6.34, 6.49, 6.65, 6.81, 6.98, 7.15, 7.32, 7.50, 7.68, 7.87, 8.06, 8.25, 8.45, 8.66, 8.87, 9.09, 9.31, 9.53, 9.76]) e48 = np.asarray([ 1.00, 1.05, 1.10, 1.15, 1.21, 1.27, 1.33, 1.40, 1.47, 1.54, 1.62, 1.69, 1.78, 1.87, 1.96, 2.05, 2.15, 2.26, 2.37, 2.49, 2.61, 2.74, 2.87, 3.01, 3.16, 3.32, 3.48, 3.65, 3.83, 4.02, 4.22, 4.42, 4.64, 4.87, 5.11, 5.36, 5.62, 5.90, 6.19, 6.49, 6.81, 7.15, 7.50, 7.87, 8.25, 8.66, 9.09, 9.53]) e24 = np.asarray([1.0, 1.1, 1.2, 1.3, 1.5, 1.6, 1.8, 2.0, 2.2, 2.4, 2.7, 3.0, 3.3, 3.6, 3.9, 4.3, 4.7, 5.1, 5.6, 6.2, 6.8, 7.5, 8.2, 9.1]) e12 = np.asarray([1.0, 1.2, 1.5, 1.8, 2.2, 2.7, 3.3, 3.9, 4.7, 5.6, 6.8, 8.2]) def current_through_resistor(resistor, voltage) -> Unit("A"): resistor = normalize_numeric(resistor) voltage = normalize_numeric(voltage) return voltage / resistor def voltage_across_resistor(resistor, current) -> Unit("A"): resistor = normalize_numeric(resistor) current = normalize_numeric(current) return resistor * current def power_dissipated_in_resistor_by_current(resistor, current) -> Unit("W"): resistor = normalize_numeric(resistor) current = normalize_numeric(current) return np.abs(resistor * current * current)
Apache License 2.0
oxford-quantum-group/discopy
test/test_drawing.py
spiral
python
def spiral(n_cups): x = Ty('x') unit, counit = Box('unit', Ty(), x), Box('counit', x, Ty()) cup, cap = Box('cup', x @ x, Ty()), Box('cap', Ty(), x @ x) for box in [unit, counit, cup, cap]: box.draw_as_spider, box.color, box.drawing_name = True, "black", "" result = unit for i in range(n_cups): result = result >> Id(x ** i) @ cap @ Id(x ** (i + 1)) result = result >> Id(x ** n_cups) @ counit @ Id(x ** n_cups) for i in range(n_cups): result = result >> Id(x ** (n_cups - i - 1)) @ cup @ Id(x ** (n_cups - i - 1)) return result
Implements the asymptotic worst-case for normal_form, see arXiv:1804.07832.
https://github.com/oxford-quantum-group/discopy/blob/3b253e0ba4a7f077b523f39eaad7e9b1d39b3781/test/test_drawing.py#L49-L65
import os from pytest import raises from PIL import Image, ImageChops from matplotlib import pyplot as plt from matplotlib.testing.compare import compare_images from discopy import * from discopy.drawing import * IMG_FOLDER, TIKZ_FOLDER, TOL = 'test/imgs/', 'test/tikz/', 10 def draw_and_compare(file, folder=IMG_FOLDER, tol=TOL, draw=Diagram.draw, **params): def decorator(func): def wrapper(): true_path = os.path.join(folder, file) test_path = os.path.join(folder, '.' + file) draw(func(), path=test_path, show=False, **params) test = compare_images(true_path, test_path, tol) assert test is None os.remove(test_path) return wrapper return decorator def tikz_and_compare(file, folder=TIKZ_FOLDER, draw=Diagram.draw, **params): def decorator(func): def wrapper(): true_paths = [os.path.join(folder, file)] test_paths = [os.path.join(folder, '.' + file)] if params.get("use_tikzstyles", DEFAULT['use_tikzstyles']): true_paths.append( true_paths[0].replace('.tikz', '.tikzstyles')) test_paths.append( test_paths[0].replace('.tikz', '.tikzstyles')) draw(func(), path=test_paths[0], **dict(params, to_tikz=True)) for true_path, test_path in zip(true_paths, test_paths): with open(true_path, "r") as true: with open(test_path, "r") as test: assert true.read() == test.read() os.remove(test_path) return wrapper return decorator
BSD 3-Clause New or Revised License
kultprok/pythonista-drafts-recipes
string_hashes/strings_hashes.py
parse_input
python
def parse_input(data): parser = argparse.ArgumentParser(description='input a string to hash.') parser.add_argument('inputstring', metavar='STRING', nargs='*', help='the string to hash') parser.add_argument('-hs', '--hs', '-hash', '--hash', metavar='HASH-NAME', default='sha1', dest='hash', help='the hash function of hashlib to use. defaults to sha1') parser.add_argument('-u', '--u', '-url', '--url', metavar='URL', default='drafts://', dest='url', help='url scheme to call after hashing. use to call an app.') args = parser.parse_args(data) hash_data(' '.join(args.inputstring), args.hash, args.url)
Parse input from Drafts command-line-like.
https://github.com/kultprok/pythonista-drafts-recipes/blob/0d09256583db5facae4e8ea31b729d557d90f6b2/string_hashes/strings_hashes.py#L29-L55
 import argparse import clipboard import hashlib from sys import argv import webbrowser def hash_data(hashstring, hashfunction, url): if hasattr(hashlib, hashfunction): hash_method = getattr(hashlib, hashfunction) else: hash_method = hashlib.sha1 if hashstring: clipboard.set(hash_method(hashstring).hexdigest()) else: raise ValueError webbrowser.open(url)
MIT License
project-monai/monailabel
monailabel/interfaces/datastore.py
Datastore.get_labels_by_image_id
python
def get_labels_by_image_id(self, image_id: str) -> Dict[str, str]: pass
Retrieve all label ids for the given image id :param image_id: the desired image's id :return: label ids mapped to the appropriate `LabelTag` as Dict[LabelTag, str]
https://github.com/project-monai/monailabel/blob/f7eaeea08ea1ba7698668f1f93b568091e6d1111/monailabel/interfaces/datastore.py#L70-L77
from abc import ABCMeta, abstractmethod from enum import Enum from typing import Any, Dict, List class DefaultLabelTag(str, Enum): ORIGINAL = "original" FINAL = "final" class Datastore(metaclass=ABCMeta): @abstractmethod def name(self) -> str: pass @abstractmethod def set_name(self, name: str): pass @abstractmethod def description(self) -> str: pass @abstractmethod def set_description(self, description: str): pass @abstractmethod def datalist(self) -> List[Dict[str, str]]: pass @abstractmethod
Apache License 2.0
ali5h/rules_pip
third_party/py/setuptools/_distutils/cmd.py
Command.debug_print
python
def debug_print(self, msg): from distutils.debug import DEBUG if DEBUG: print(msg) sys.stdout.flush()
Print 'msg' to stdout if the global DEBUG (taken from the DISTUTILS_DEBUG environment variable) flag is true.
https://github.com/ali5h/rules_pip/blob/fb02cb7bf5c03bc8cd4269679e4aea2e1839b501/third_party/py/setuptools/_distutils/cmd.py#L184-L191
import sys, os, re from distutils.errors import DistutilsOptionError from distutils import util, dir_util, file_util, archive_util, dep_util from distutils import log class Command: sub_commands = [] def __init__(self, dist): from distutils.dist import Distribution if not isinstance(dist, Distribution): raise TypeError("dist must be a Distribution instance") if self.__class__ is Command: raise RuntimeError("Command is an abstract class") self.distribution = dist self.initialize_options() self._dry_run = None self.verbose = dist.verbose self.force = None self.help = 0 self.finalized = 0 def __getattr__(self, attr): if attr == 'dry_run': myval = getattr(self, "_" + attr) if myval is None: return getattr(self.distribution, attr) else: return myval else: raise AttributeError(attr) def ensure_finalized(self): if not self.finalized: self.finalize_options() self.finalized = 1 def initialize_options(self): raise RuntimeError("abstract method -- subclass %s must override" % self.__class__) def finalize_options(self): raise RuntimeError("abstract method -- subclass %s must override" % self.__class__) def dump_options(self, header=None, indent=""): from distutils.fancy_getopt import longopt_xlate if header is None: header = "command options for '%s':" % self.get_command_name() self.announce(indent + header, level=log.INFO) indent = indent + " " for (option, _, _) in self.user_options: option = option.translate(longopt_xlate) if option[-1] == "=": option = option[:-1] value = getattr(self, option) self.announce(indent + "%s = %s" % (option, value), level=log.INFO) def run(self): raise RuntimeError("abstract method -- subclass %s must override" % self.__class__) def announce(self, msg, level=1): log.log(level, msg)
MIT License
monzop/bioblender
bin/pdb2pqr-1.6/src/hydrogens.py
Optimize.tryPositionsWithTwoBondsLP
python
def tryPositionsWithTwoBondsLP(self, acc, donor, newname, loc1, loc2): bestangle = 180.00 bestcoords = [] residue = acc.residue if not self.isHbond(donor, acc): return 0 for donorhatom in donor.bonds: if donorhatom.isHydrogen() and self.getHbondangle(acc, donor, donorhatom) < ANGLE_CUTOFF: break residue.createAtom(newname, loc1) newatom = residue.getAtom(newname) angle = abs(self.getHbondangle(donorhatom, acc, newatom)) if angle < bestangle: bestangle = angle bestcoords = loc1 newatom.x = loc2[0] newatom.y = loc2[1] newatom.z = loc2[2] angle = self.getHbondangle(donorhatom, acc, newatom) if angle < bestangle: bestcoords = loc2 if bestangle > (ANGLE_CUTOFF * 2.0): residue.removeAtom(newname) return 0 newatom.x = bestcoords[0] newatom.y = bestcoords[1] newatom.z = bestcoords[2] self.routines.cells.addCell(newatom) if newatom not in acc.bonds: acc.bonds.append(newatom) if acc not in newatom.bonds: newatom.bonds.append(acc) return 1
Try placing an LP on a tetrahedral geometry with two existing bonds. If this isn't a hydrogen bond it can return - otherwise ensure that the H(D)-A-LP angle is minimized.
https://github.com/monzop/bioblender/blob/57a6ed4dffaa8e43f39fcfa5481048b8f7cc369c/bin/pdb2pqr-1.6/src/hydrogens.py#L631-L691
import os import string import math from definitions import * from utilities import * from quatfit import * from routines import * import topology __date__ = "22 April 2009" __author__ = "Todd Dolinsky, Jens Erik Nielsen, Yong Huang" HDEBUG = 0 HYDPATH = "dat/HYDROGENS.xml" TOPOLOGYPATH = "dat/TOPOLOGY.xml" ANGLE_CUTOFF = 20.0 DIST_CUTOFF = 3.3 class HydrogenHandler(sax.ContentHandler): def __init__(self): self.curelement = "" self.curatom = None self.curobj = None self.curholder = None self.map = {} def startElement(self, name, attributes): if name == "class": obj = OptimizationHolder() self.curholder = obj self.curobj = obj elif name == "atom": obj = DefinitionAtom() self.curatom = obj self.curobj = obj else: self.curelement = name return def endElement(self, name): if name == "class": obj = self.curholder if not isinstance(obj, OptimizationHolder): raise ValueError, "Internal error parsing XML!" self.map[obj.name] = obj self.curholder = None self.curobj = None elif name == "atom": atom = self.curatom if not isinstance(atom, DefinitionAtom): raise ValueError, "Internal error parsing XML!" atomname = atom.name if atomname == "": raise ValueError, "Atom name not set in XML!" else: self.curholder.map[atomname] = atom self.curatom = None self.curobj = self.curholder else: self.curelement = "" return self.map def characters(self, text): if text.isspace(): return try: value = float(str(text)) except ValueError: value = str(text) setattr(self.curobj, self.curelement, value) class PotentialBond: def __init__(self, atom1, atom2, dist): self.atom1 = atom1 self.atom2 = atom2 self.dist = dist def __str__(self): txt = "%s %s" % (self.atom1.name, self.atom1.residue) txt += " to " txt += "%s %s" % (self.atom2.name, self.atom2.residue) txt += " (%.2f A)" % self.dist return txt class hydrogenAmbiguity: def __init__(self, residue, hdef, routines): self.residue = residue self.hdef = hdef self.routines = routines def __str__(self): text = "%s %i %s (%s)" % (self.residue.name, self.residue.resSeq, self.residue.chainID, self.hdef.opttype) return text class Optimize: def __init__(self): return def __str__(self): txt = "%s (%s)" % (self.residue, self.optinstance.opttype) return txt def debug(self, txt): if HDEBUG: print txt def getHbondangle(self, atom1, atom2, atom3): angle = 0.0 atom2Coords = atom2.getCoords() coords1 = subtract(atom3.getCoords(), atom2Coords) coords2 = subtract(atom1.getCoords(), atom2Coords) norm1 = normalize(coords1) norm2 = normalize(coords2) dotted = dot(norm1, norm2) if dotted > 1.0: dotted = 1.0 rad = abs(math.acos(dotted)) angle = rad*180.0/math.pi if angle > 180.0: angle = 360.0 - angle return angle def isHbond(self, donor, acc): for donorhatom in donor.bonds: if not donorhatom.isHydrogen(): continue dist = distance(donorhatom.getCoords(), acc.getCoords()) if dist > DIST_CUTOFF: continue flag = 1 for acchatom in acc.bonds: if not acchatom.isHydrogen(): continue flag = 0 hdist = distance(donorhatom.getCoords(), acchatom.getCoords()) if hdist < 1.5: continue angle = self.getHbondangle(donorhatom, acchatom, acc) if angle < 110.0: flag = 1 if flag == 0: continue angle = self.getHbondangle(acc, donor, donorhatom) if angle <= ANGLE_CUTOFF: self.debug("Found HBOND! %.4f %.4f" % (dist, angle)) return 1 return 0 def getPairEnergy(self, donor, acceptor): max_hbond_energy = -10.0 max_ele_energy = -1.0 maxangle = ANGLE_CUTOFF max_dha_dist = DIST_CUTOFF max_ele_dist = 5.0 energy = 0.0 donorhs = [] acceptorhs = [] if not (donor.hdonor and acceptor.hacceptor): return energy for bond in donor.bonds: if bond.isHydrogen(): donorhs.append(bond) for bond in acceptor.bonds: if bond.isHydrogen(): acceptorhs.append(bond) if donorhs == []: return energy for donorhatom in donorhs: dist = distance(donorhatom.getCoords(), acceptor.getCoords()) if dist > max_dha_dist and dist < max_ele_dist: energy += max_ele_energy/(dist*dist) continue if acceptorhs != []: for acceptorhatom in acceptorhs: hdist = distance(donorhatom.getCoords(), acceptorhatom.getCoords()) if hdist < 1.5: energy += -1 * max_hbond_energy continue angle1 = self.getHbondangle(acceptor, donor, donorhatom) if angle1 <= maxangle: angleterm = (maxangle - angle1)/maxangle angle2 = self.getHbondangle(donorhatom, acceptorhatom, acceptor) if angle2 < 110.0: angle2 = 1.0 else: angle2 = (110.0 - angle2)/110.0 energy += max_hbond_energy/pow(dist,3)*angleterm*angle2 else: angle1 = self.getHbondangle(acceptor, donor, donorhatom) if angle1 <= maxangle: angleterm = (maxangle - angle1)/maxangle energy += max_hbond_energy/pow(dist,2)*angleterm return energy def makeAtomWithNoBonds(self, atom, closeatom, addname): newcoords = [] residue = atom.residue vec = subtract(closeatom.getCoords(), atom.getCoords()) dist = distance(atom.getCoords(), closeatom.getCoords()) for i in range(3): newcoords.append(vec[i]/dist + atom.getCoords()[i]) residue.createAtom(addname, newcoords) newatom = residue.getAtom(addname) self.routines.cells.addCell(newatom) if newatom not in atom.bonds: atom.bonds.append(newatom) if atom not in newatom.bonds: newatom.bonds.append(atom) def makeWaterWithOneBond(self, atom, addname): residue = atom.residue nextatom = atom.bonds[0] coords = [atom.getCoords(), nextatom.getCoords()] refcoords = [residue.reference.map[atom.name].getCoords(), residue.reference.map["H1"].getCoords()] refatomcoords = residue.reference.map["H2"].getCoords() newcoords = findCoordinates(2, coords, refcoords, refatomcoords) residue.createAtom(addname, newcoords) newatom = residue.getAtom(addname) if newatom not in atom.bonds: atom.bonds.append(newatom) if atom not in newatom.bonds: newatom.bonds.append(atom) def makeAtomWithOneBondH(self, atom, addname): residue = atom.residue nextatom = atom.bonds[0] coords = [atom.getCoords(), nextatom.getCoords()] refcoords = [residue.reference.map[atom.name].getCoords(), residue.reference.map[nextatom.name].getCoords()] refatomcoords = residue.reference.map[addname].getCoords() newcoords = findCoordinates(2, coords, refcoords, refatomcoords) residue.createAtom(addname, newcoords) def makeAtomWithOneBondLP(self, atom, addname): residue = atom.residue for refname in atom.reference.bonds: if refname.startswith("H"): break nextatom = atom.bonds[0] coords = [atom.getCoords(), nextatom.getCoords()] refcoords = [residue.reference.map[atom.name].getCoords(), residue.reference.map[nextatom.name].getCoords()] refatomcoords = residue.reference.map[refname].getCoords() newcoords = findCoordinates(2, coords, refcoords, refatomcoords) residue.createAtom(addname, newcoords) newatom = residue.getAtom(addname) if newatom not in atom.bonds: atom.bonds.append(newatom) if atom not in newatom.bonds: newatom.bonds.append(atom) def trySingleAlcoholicH(self, donor, acc, newatom): besten = 999.99 bestcoords = [] residue = donor.residue pivot = donor.bonds[0] for i in range(72): residue.rotateTetrahedral(pivot, donor, 5.0) if self.isHbond(donor, acc): energy = self.getPairEnergy(donor, acc) if energy < besten: bestcoords = newatom.getCoords() besten = energy if bestcoords != []: newatom.x = bestcoords[0] newatom.y = bestcoords[1] newatom.z = bestcoords[2] self.routines.cells.addCell(newatom) return 1 else: residue.removeAtom(newatom.name) return 0 def trySingleAlcoholicLP(self, acc, donor, newatom): residue = acc.residue pivot = acc.bonds[0] bestangle = 180.00 bestcoords = [] if not self.isHbond(donor, acc): residue.removeAtom(newatom.name) return 0 for donorhatom in donor.bonds: if donorhatom.isHydrogen() and self.getHbondangle(acc, donor, donorhatom) < ANGLE_CUTOFF: break for i in range(72): residue.rotateTetrahedral(pivot, acc, 5.0) angle = abs(self.getHbondangle(donorhatom, acc, newatom)) if angle < bestangle: bestangle = angle bestcoords = newatom.getCoords() if bestangle > (ANGLE_CUTOFF * 2.0): self.debug("Removing due to geometry %.2f > %.2f" % (bestangle, ANGLE_CUTOFF*2.0)) residue.removeAtom(newatom.name) return 0 newatom.x = bestcoords[0] newatom.y = bestcoords[1] newatom.z = bestcoords[2] self.routines.cells.addCell(newatom) return 1 def getPositionsWithTwoBonds(self, atom): residue = atom.residue fixed = atom.bonds[0] rotate = atom.bonds[1] residue.rotateTetrahedral(fixed, atom, 120) loc1 = rotate.getCoords() residue.rotateTetrahedral(fixed, atom, 120) loc2 = rotate.getCoords() residue.rotateTetrahedral(fixed, atom, 120) return loc1, loc2 def tryPositionsWithTwoBondsH(self, donor, acc, newname, loc1, loc2): besten = 999.99 bestcoords = [] residue = donor.residue residue.createAtom(newname, loc1) if self.isHbond(donor, acc): besten = self.getPairEnergy(donor, acc) bestcoords = loc1 newatom = residue.getAtom(newname) newatom.x = loc2[0] newatom.y = loc2[1] newatom.z = loc2[2] if self.isHbond(donor, acc): energy = self.getPairEnergy(donor, acc) if energy < besten: bestcoords = loc2 if bestcoords != []: newatom.x = bestcoords[0] newatom.y = bestcoords[1] newatom.z = bestcoords[2] self.routines.cells.addCell(newatom) return 1 else: residue.removeAtom(newname) return 0
BSD 2-Clause Simplified License
botfront/rasa-for-botfront
rasa/core/channels/console.py
record_messages
python
async def record_messages( sender_id, server_url=DEFAULT_SERVER_URL, auth_token="", max_message_limit=None, use_response_stream=True, ) -> int: exit_text = INTENT_MESSAGE_PREFIX + "stop" rasa.shared.utils.cli.print_success( "Bot loaded. Type a message and press enter " "(use '{}' to exit): ".format(exit_text) ) num_messages = 0 previous_response = None await asyncio.sleep(0.5) while not utils.is_limit_reached(num_messages, max_message_limit): text = get_user_input(previous_response) if text == exit_text or text is None: break if use_response_stream: bot_responses = send_message_receive_stream( server_url, auth_token, sender_id, text ) previous_response = None async for response in bot_responses: if previous_response is not None: print_bot_output(previous_response) previous_response = response else: bot_responses = await send_message_receive_block( server_url, auth_token, sender_id, text ) previous_response = None for response in bot_responses: if previous_response is not None: print_bot_output(previous_response) previous_response = response num_messages += 1 await asyncio.sleep(0) return num_messages
Read messages from the command line and print bot responses.
https://github.com/botfront/rasa-for-botfront/blob/6e0e48d0059e197b5f686df1e27935769c3641b7/rasa/core/channels/console.py#L152-L198
import asyncio import json import logging import os import aiohttp import questionary from aiohttp import ClientTimeout from prompt_toolkit.styles import Style from typing import Any from typing import Text, Optional, Dict, List import rasa.shared.utils.cli import rasa.shared.utils.io from rasa.cli import utils as cli_utils from rasa.core import utils from rasa.core.channels.rest import RestInput from rasa.core.constants import DEFAULT_SERVER_URL from rasa.shared.constants import INTENT_MESSAGE_PREFIX from rasa.shared.utils.io import DEFAULT_ENCODING logger = logging.getLogger(__name__) STREAM_READING_TIMEOUT_ENV = "RASA_SHELL_STREAM_READING_TIMEOUT_IN_SECONDS" DEFAULT_STREAM_READING_TIMEOUT_IN_SECONDS = 10 def print_buttons( message: Dict[Text, Any], is_latest_message: bool = False, color=rasa.shared.utils.io.bcolors.OKBLUE, ) -> Optional[questionary.Question]: if is_latest_message: choices = cli_utils.button_choices_from_message_data( message, allow_free_text_input=True ) question = questionary.select( message.get("text"), choices, style=Style([("qmark", "#6d91d3"), ("", "#6d91d3"), ("answer", "#b373d6")]), ) return question else: rasa.shared.utils.cli.print_color("Buttons:", color=color) for idx, button in enumerate(message.get("buttons")): rasa.shared.utils.cli.print_color( cli_utils.button_to_string(button, idx), color=color ) def print_bot_output( message: Dict[Text, Any], is_latest_message: bool = False, color=rasa.shared.utils.io.bcolors.OKBLUE, ) -> Optional[questionary.Question]: if "buttons" in message: question = print_buttons(message, is_latest_message, color) if question: return question if "text" in message: rasa.shared.utils.cli.print_color(message.get("text"), color=color) if "image" in message: rasa.shared.utils.cli.print_color("Image: " + message.get("image"), color=color) if "attachment" in message: rasa.shared.utils.cli.print_color( "Attachment: " + message.get("attachment"), color=color ) if "elements" in message: rasa.shared.utils.cli.print_color("Elements:", color=color) for idx, element in enumerate(message.get("elements")): rasa.shared.utils.cli.print_color( cli_utils.element_to_string(element, idx), color=color ) if "quick_replies" in message: rasa.shared.utils.cli.print_color("Quick Replies:", color=color) for idx, element in enumerate(message.get("quick_replies")): rasa.shared.utils.cli.print_color( cli_utils.button_to_string(element, idx), color=color ) if "custom" in message: rasa.shared.utils.cli.print_color("Custom json:", color=color) rasa.shared.utils.cli.print_color( json.dumps(message.get("custom"), indent=2), color=color ) def get_user_input(previous_response: Optional[Dict[str, Any]]) -> Optional[Text]: button_response = None if previous_response is not None: button_response = print_bot_output(previous_response, is_latest_message=True) if button_response is not None: response = cli_utils.payload_from_button_question(button_response) if response == cli_utils.FREE_TEXT_INPUT_PROMPT: response = get_user_input({}) else: response = questionary.text( "", qmark="Your input ->", style=Style([("qmark", "#b373d6"), ("", "#b373d6")]), ).ask() return response.strip() if response is not None else None async def send_message_receive_block( server_url, auth_token, sender_id, message ) -> List[Dict[Text, Any]]: payload = {"sender": sender_id, "message": message} url = f"{server_url}/webhooks/rest/webhook?token={auth_token}" async with aiohttp.ClientSession() as session: async with session.post(url, json=payload, raise_for_status=True) as resp: return await resp.json() async def send_message_receive_stream( server_url: Text, auth_token: Text, sender_id: Text, message: Text ): payload = {"sender": sender_id, "message": message} url = f"{server_url}/webhooks/rest/webhook?stream=true&token={auth_token}" timeout = _get_stream_reading_timeout() async with aiohttp.ClientSession(timeout=timeout) as session: async with session.post(url, json=payload, raise_for_status=True) as resp: async for line in resp.content: if line: yield json.loads(line.decode(DEFAULT_ENCODING)) def _get_stream_reading_timeout() -> ClientTimeout: timeout_in_seconds = int( os.environ.get( STREAM_READING_TIMEOUT_ENV, DEFAULT_STREAM_READING_TIMEOUT_IN_SECONDS ) ) return ClientTimeout(timeout_in_seconds)
Apache License 2.0
sherlock-project/sherlock
sherlock/sherlock.py
sherlock
python
def sherlock(username, site_data, query_notify, tor=False, unique_tor=False, proxy=None, timeout=None): query_notify.start(username) if tor or unique_tor: underlying_request = TorRequest() underlying_session = underlying_request.session else: underlying_session = requests.session() underlying_request = requests.Request() if len(site_data) >= 20: max_workers=20 else: max_workers=len(site_data) session = SherlockFuturesSession(max_workers=max_workers, session=underlying_session) results_total = {} for social_network, net_info in site_data.items(): results_site = {} results_site['url_main'] = net_info.get("urlMain") headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0', } if "headers" in net_info: headers.update(net_info["headers"]) url = net_info["url"].format(username) regex_check = net_info.get("regexCheck") if regex_check and re.search(regex_check, username) is None: results_site['status'] = QueryResult(username, social_network, url, QueryStatus.ILLEGAL) results_site["url_user"] = "" results_site['http_status'] = "" results_site['response_text'] = "" query_notify.update(results_site['status']) else: results_site["url_user"] = url url_probe = net_info.get("urlProbe") if url_probe is None: url_probe = url else: url_probe = url_probe.format(username) if (net_info["errorType"] == 'status_code' and net_info.get("request_head_only", True) == True): request_method = session.head else: request_method = session.get if net_info["errorType"] == "response_url": allow_redirects = False else: allow_redirects = True if proxy is not None: proxies = {"http": proxy, "https": proxy} future = request_method(url=url_probe, headers=headers, proxies=proxies, allow_redirects=allow_redirects, timeout=timeout ) else: future = request_method(url=url_probe, headers=headers, allow_redirects=allow_redirects, timeout=timeout ) net_info["request_future"] = future if unique_tor: underlying_request.reset_identity() results_total[social_network] = results_site for social_network, net_info in site_data.items(): results_site = results_total.get(social_network) url = results_site.get("url_user") status = results_site.get("status") if status is not None: continue error_type = net_info["errorType"] future = net_info["request_future"] r, error_text, expection_text = get_response(request_future=future, error_type=error_type, social_network=social_network) try: response_time = r.elapsed except AttributeError: response_time = None try: http_status = r.status_code except: http_status = "?" try: response_text = r.text.encode(r.encoding) except: response_text = "" if error_text is not None: result = QueryResult(username, social_network, url, QueryStatus.UNKNOWN, query_time=response_time, context=error_text) elif error_type == "message": error_flag = True errors=net_info.get("errorMsg") if isinstance(errors,str): if errors in r.text: error_flag = False else: for error in errors: if error in r.text: error_flag = False break if error_flag: result = QueryResult(username, social_network, url, QueryStatus.CLAIMED, query_time=response_time) else: result = QueryResult(username, social_network, url, QueryStatus.AVAILABLE, query_time=response_time) elif error_type == "status_code": if not r.status_code >= 300 or r.status_code < 200: result = QueryResult(username, social_network, url, QueryStatus.CLAIMED, query_time=response_time) else: result = QueryResult(username, social_network, url, QueryStatus.AVAILABLE, query_time=response_time) elif error_type == "response_url": if 200 <= r.status_code < 300: result = QueryResult(username, social_network, url, QueryStatus.CLAIMED, query_time=response_time) else: result = QueryResult(username, social_network, url, QueryStatus.AVAILABLE, query_time=response_time) else: raise ValueError(f"Unknown Error Type '{error_type}' for " f"site '{social_network}'") query_notify.update(result) results_site['status'] = result results_site['http_status'] = http_status results_site['response_text'] = response_text results_total[social_network] = results_site query_notify.finish() return results_total
Run Sherlock Analysis. Checks for existence of username on various social media sites. Keyword Arguments: username -- String indicating username that report should be created against. site_data -- Dictionary containing all of the site data. query_notify -- Object with base type of QueryNotify(). This will be used to notify the caller about query results. tor -- Boolean indicating whether to use a tor circuit for the requests. unique_tor -- Boolean indicating whether to use a new tor circuit for each request. proxy -- String indicating the proxy URL timeout -- Time in seconds to wait before timing out request. Default is no timeout. Return Value: Dictionary containing results from report. Key of dictionary is the name of the social network site, and the value is another dictionary with the following keys: url_main: URL of main site. url_user: URL of user on site (if account exists). status: QueryResult() object indicating results of test for account existence. http_status: HTTP status code of query which checked for existence on site. response_text: Text that came back from request. May be None if there was an HTTP error when checking for existence.
https://github.com/sherlock-project/sherlock/blob/7d42be795f837dcea7711203973a26a548d6d632/sherlock/sherlock.py#L128-L415
import csv import os import platform import re import sys from argparse import ArgumentParser, RawDescriptionHelpFormatter from time import monotonic import requests from requests_futures.sessions import FuturesSession from torrequest import TorRequest from result import QueryStatus from result import QueryResult from notify import QueryNotifyPrint from sites import SitesInformation module_name = "Sherlock: Find Usernames Across Social Networks" __version__ = "0.14.0" class SherlockFuturesSession(FuturesSession): def request(self, method, url, hooks={}, *args, **kwargs): start = monotonic() def response_time(resp, *args, **kwargs): resp.elapsed = monotonic() - start return try: if isinstance(hooks['response'], list): hooks['response'].insert(0, response_time) elif isinstance(hooks['response'], tuple): hooks['response'] = list(hooks['response']) hooks['response'].insert(0, response_time) else: hooks['response'] = [response_time, hooks['response']] except KeyError: hooks['response'] = [response_time] return super(SherlockFuturesSession, self).request(method, url, hooks=hooks, *args, **kwargs) def get_response(request_future, error_type, social_network): response = None error_context = "General Unknown Error" expection_text = None try: response = request_future.result() if response.status_code: error_context = None except requests.exceptions.HTTPError as errh: error_context = "HTTP Error" expection_text = str(errh) except requests.exceptions.ProxyError as errp: error_context = "Proxy Error" expection_text = str(errp) except requests.exceptions.ConnectionError as errc: error_context = "Error Connecting" expection_text = str(errc) except requests.exceptions.Timeout as errt: error_context = "Timeout Error" expection_text = str(errt) except requests.exceptions.RequestException as err: error_context = "Unknown Error" expection_text = str(err) return response, error_context, expection_text
MIT License
microsoft/sparsesc
src/SparseSC/utils/penalty_utils.py
get_max_v_pen
python
def get_max_v_pen(X, Y, w_pen=None, X_treat=None, Y_treat=None, **kwargs): try: X = np.float64(X) except ValueError: raise ValueError("X is not coercible to a matrix") try: Y = np.float64(Y) except ValueError: raise ValueError("Y is not coercible to a matrix") Y = np.asmatrix(Y) X = np.asmatrix(X) if (X_treat is None) != (Y_treat is None): raise ValueError( "parameters `X_treat` and `Y_treat` must both be Matrices or None" ) if X.shape[1] == 0: raise ValueError("X.shape[1] == 0") if Y.shape[1] == 0: raise ValueError("Y.shape[1] == 0") if X.shape[0] != Y.shape[0]: raise ValueError( "X and Y have different number of rows (%s and %s)" % (X.shape[0], Y.shape[0]) ) if w_pen is None: w_pen = np.mean(np.var(X, axis=0)) if X_treat is not None: if not isinstance(X_treat, np.matrix): raise TypeError("X_treat is not a matrix") if not isinstance(Y_treat, np.matrix): raise TypeError("Y_treat is not a matrix") if X_treat.shape[1] == 0: raise ValueError("X_treat.shape[1] == 0") if Y_treat.shape[1] == 0: raise ValueError("Y_treat.shape[1] == 0") if X_treat.shape[0] != Y_treat.shape[0]: raise ValueError( "X_treat and Y_treat have different number of rows (%s and %s)" % (X.shape[0], Y.shape[0]) ) control_units = np.arange(X.shape[0]) treated_units = np.arange(X.shape[0], X.shape[0] + X_treat.shape[0]) try: _v_pen = iter(w_pen) except TypeError: return ct_v_matrix( X=np.vstack((X, X_treat)), Y=np.vstack((Y, Y_treat)), w_pen=w_pen, control_units=control_units, treated_units=treated_units, return_max_v_pen=True, gradient_message=_GRADIENT_MESSAGE, **kwargs ) else: return [ ct_v_matrix( X=np.vstack((X, X_treat)), Y=np.vstack((Y, Y_treat)), control_units=control_units, treated_units=treated_units, return_max_v_pen=True, gradient_message=_GRADIENT_MESSAGE, w_pen=_w_pen, **kwargs ) for _w_pen in w_pen ] else: try: _v_pen = iter(w_pen) except TypeError: if "grad_splits" in kwargs: return fold_v_matrix( X=X, Y=Y, w_pen=w_pen, return_max_v_pen=True, gradient_message=_GRADIENT_MESSAGE, **kwargs ) try: return loo_v_matrix( X=X, Y=Y, w_pen=w_pen, return_max_v_pen=True, gradient_message=_GRADIENT_MESSAGE, **kwargs ) except MemoryError: raise RuntimeError( "MemoryError encountered. Try setting `grad_splits` " "parameter to reduce memory requirements." ) else: if "grad_splits" in kwargs: return [ fold_v_matrix( X=X, Y=Y, w_pen=_w_pen, return_max_v_pen=True, gradient_message=_GRADIENT_MESSAGE, **kwargs ) for _w_pen in w_pen ] try: return [ loo_v_matrix( X=X, Y=Y, w_pen=_w_pen, return_max_v_pen=True, gradient_message=_GRADIENT_MESSAGE, **kwargs ) for _w_pen in w_pen ] except MemoryError: raise RuntimeError( "MemoryError encountered. Try setting `grad_splits` " "parameter to reduce memory requirements." )
Calculates maximum value of v_pen for which the elements of tensor matrix (V) are not all zero conditional on the provided w_pen. If w_pen is not provided, a guestimate is used. Provides a unified wrapper to the various \*_v_matrix functions, passing the parameter ``return_max_v_pen = True`` in order to obtain the gradient instead of he matrix
https://github.com/microsoft/sparsesc/blob/4cf0a98858919d50c6127be782a145e49d96897e/src/SparseSC/utils/penalty_utils.py#L32-L186
from SparseSC.fit_loo import loo_v_matrix from SparseSC.fit_ct import ct_v_matrix from SparseSC.fit_fold import fold_v_matrix import numpy as np _GRADIENT_MESSAGE = "Calculating maximum covariate penalty (i.e. the gradient at zero)" def w_pen_guestimate(X): return np.mean(np.var(X, axis=0)) def get_max_w_pen(X,Y,v_pen,**kwargs): return get_max_v_pen(X,Y,w_pen=1,**kwargs) / v_pen
MIT License
unified-moderation-network/salamander
src/extensions/rolemanagement/cog.py
RoleManagement.mrole_search
python
async def mrole_search(self, ctx: SalamanderContext, *, _query: ComplexSearchConverter): members = set(ctx.guild.members) query = _query.parsed members = await member_search_filter(members, query) if len(members) < 50 and not query["csv"]: def chunker(memberset, size=3): ret_str = "" for i, m in enumerate(memberset, 1): ret_str += m.mention ret_str += "\n" if i % size == 0 else " " return ret_str description = chunker(members) embed = discord.Embed(description=description) if ctx.guild: embed.color = ctx.guild.me.color await ctx.send(embed=embed, content=f"Search results for {ctx.author.mention}") else: await self.send_maybe_chunked_csv(ctx, list(members))
Searches for users with the specified role criteria --has-all roles --has-none roles --has-any roles --has-no-roles --has-exactly-nroles number --has-more-than-nroles number --has-less-than-nroles number --has-perm permissions --any-perm permissions --not-perm permissions --above role --below role --only-humans --only-bots --everyone --csv csv output will be used if output would exceed embed limits, or if flag is provided
https://github.com/unified-moderation-network/salamander/blob/a6e1210c40ee6e75b6bbc2a08636c5bbfceb0bbf/src/extensions/rolemanagement/cog.py#L168-L217
from __future__ import annotations import csv import io import logging import re from typing import Iterator, Optional, Union import discord from discord.ext import commands from ...bot import Salamander, SalamanderContext, UserFeedbackError from ...checks import admin_or_perms, mod_or_perms from ...utils import add_variation_selectors_to_emojis, resolve_as_roles, strip_variation_selectors from .converters import ( ComplexActionConverter, ComplexSearchConverter, EmojiRolePairConverter, RoleSettingsConverter, RoleSyntaxConverter, ) from .db_abstractions import NoSuchRecord, ReactionRoleRecord, RoleSettings, get_member_sticky, update_member_sticky from .member_search import search_filter as member_search_filter log = logging.getLogger("salamander.extensions.rolemanagement") EMOJI_REGEX: re.Pattern = re.compile(r"<(?:a?):(?:[a-zA-Z0-9_]{2,32}):([0-9]{15,20})>$") def normalize_emoji(s: str) -> str: if m := EMOJI_REGEX.match(s): return m.group(1) else: return strip_variation_selectors(s) class RoleManagement(commands.Cog): def __init__(self, bot: Salamander): self.bot: Salamander = bot async def all_are_valid_roles(self, ctx, *roles: discord.Role, detailed: bool = False) -> bool: author = ctx.author guild = ctx.guild if guild.owner != author: auth_top = author.top_role if not (all(auth_top > role for role in roles) or await ctx.bot.is_owner(ctx.author)): if detailed: raise UserFeedbackError( custom_message="You can't give away roles which are not below your top role." ) return False if not guild.me.guild_permissions.manage_roles: if detailed: raise UserFeedbackError(custom_message="I can't manage roles.") return False if guild.me != guild.owner: bot_top = guild.me.top_role if any(bot_top <= role for role in roles): if detailed: raise UserFeedbackError(custom_message="I can't give away roles which are not below my top role.") return False if any(role.managed for role in roles): if detailed: raise UserFeedbackError(custom_message="Managed roles can't be assigned by this.") return False return True async def update_roles_atomically( self, *, who: discord.Member, give: list[discord.Role] = None, remove: list[discord.Role] = None, ): me = who.guild.me give = give or [] remove = remove or [] hierarchy_testing = give + remove user_roles = who.roles roles = [r for r in user_roles if r not in remove] roles.extend([r for r in give if r not in roles]) if sorted(roles) == user_roles: return if any(r >= me.top_role for r in hierarchy_testing) or not me.guild_permissions.manage_roles: raise UserFeedbackError(custom_message="Can't do that.") await who.edit(roles=roles) @mod_or_perms(manage_roles=True) @commands.guild_only() @commands.group(name="massrole", aliases=["mrole"]) async def mrole(self, ctx: SalamanderContext): if ctx.invoked_subcommand is None: await ctx.send_help() @admin_or_perms(manage_roles=True) @commands.bot_has_guild_permissions(manage_roles=True) @mrole.command(name="user") async def mrole_user( self, ctx: SalamanderContext, users: commands.Greedy[discord.Member], *, _query: RoleSyntaxConverter, ) -> None: query = _query.parsed apply = query["add"] + query["remove"] if not await self.all_are_valid_roles(ctx, *apply, detailed=True): return for user in users: await self.update_roles_atomically(who=user, give=query["add"], remove=query["remove"]) await ctx.send("Done.") @mrole.command(name="search")
Apache License 2.0
pypyr/pypyr
pypyr/steps/safeshell.py
run_step
python
def run_step(context): logger.debug("started") pypyr.steps.cmd.run_step(context) logger.debug("done")
Run command, program or executable. Context is a dictionary or dictionary-like. Context must contain the following keys: cmd: <<cmd string>> (command + args to execute.) OR, as a dict cmd: run: str. mandatory. <<cmd string>> command + args to execute. save: bool. defaults False. save output to cmdOut. Will execute the command string in the shell as a sub-process. Escape curly braces: if you want a literal curly brace, double it like {{ or }}. If save is True, will save the output to context as follows: cmdOut: returncode: 0 stdout: 'stdout str here. None if empty.' stderr: 'stderr str here. None if empty.' cmdOut.returncode is the exit status of the called process. Typically 0 means OK. A negative value -N indicates that the child was terminated by signal N (POSIX only). context['cmd'] will interpolate anything in curly braces for values found in context. So if your context looks like this: key1: value1 key2: value2 cmd: mything --arg1 {key1} The cmd passed to the shell will be "mything --arg value1"
https://github.com/pypyr/pypyr/blob/9de0476ebba114c26ff3bf38ae23ebb69bc3e087/pypyr/steps/safeshell.py#L16-L55
import logging import pypyr.steps.cmd logger = logging.getLogger(__name__)
Apache License 2.0
fkie/multimaster_fkie
fkie_node_manager/src/fkie_node_manager/launch_list_model.py
LaunchListModel._set_new_list
python
def _set_new_list(self, root_path, items, add_history=True): _, path = nmdurl.split(root_path) self._current_path = root_path if not self._is_root(root_path): self._add_path(root_path, PathItem.ROOT, 0, 0, '') if path in ['', os.path.sep]: if add_history: self._add_history() for path, path_id, mtime, size, name in items: self._add_path(path, path_id, mtime, size, name)
Sets the list to the given path and insert the items. If the root path is not empty the additional item '<-' to go back will be inserted. :see: :meth:`_listed_path` :param str root_path: the root directory :param items: the list with characterized items :type items: list(tuple(item, path, id))
https://github.com/fkie/multimaster_fkie/blob/386ebf27f41bffdb1896bbcfdccb7c5290ac0eb4/fkie_node_manager/src/fkie_node_manager/launch_list_model.py#L711-L730
import rospy from python_qt_binding.QtCore import QMimeData, Qt, Signal try: from python_qt_binding.QtGui import QApplication, QInputDialog, QLineEdit except Exception: from python_qt_binding.QtWidgets import QApplication, QInputDialog, QLineEdit from python_qt_binding.QtGui import QIcon, QPixmap, QStandardItem, QStandardItemModel import os import fkie_node_manager as nm from fkie_master_discovery.common import masteruri_from_master from fkie_node_manager_daemon import url as nmdurl from fkie_node_manager_daemon.common import isstring, utf8 from fkie_node_manager_daemon.host import get_hostname from fkie_node_manager_daemon.file_item import FileItem from .common import package_name from .detailed_msg_box import MessageBox class PathItem(QStandardItem): ITEM_TYPE = QStandardItem.UserType + 40 NOT_FOUND = -1 NOTHING = 0 ROOT = 1 PROFILE = 5 RECENT_PROFILE = 6 RECENT_FILE = 7 LAUNCH_FILE = 11 CFG_FILE = 12 FILE = 13 PACKAGE = 20 STACK = 21 FOLDER = 22 REMOTE_DAEMON = 23 def __init__(self, path, path_id, mtime, size, name, isnew=False): self._path = path self.id = self._identify_path_on_ext(path) if path_id in [self.FILE] else path_id self._isnew = isnew pathname = name if pathname == 'src': pathname = '%s (src)' % os.path.basename(os.path.dirname(path)) self._name = pathname QStandardItem.__init__(self, self._name) self.mtime = mtime self.size = size if self.id == self.RECENT_FILE or self.id == self.RECENT_PROFILE: pname = package_name(path)[0] if pname is None: pname, _ = nmdurl.split(path, with_scheme=True) self.package_name = pname self._update_icon() def _identify_path_on_ext(self, path, default=10): _filename, file_extension = os.path.splitext(path) if not file_extension: return default if file_extension == '.launch' or path.find('.launch.') > 0: return PathItem.LAUNCH_FILE elif file_extension == '.nmprofile': return PathItem.PROFILE elif file_extension in nm.settings().launch_view_file_ext: return PathItem.CFG_FILE return default def _update_icon(self): if self.id in [self.NOTHING, self.NOT_FOUND]: return icon_pixmap = '' if self.id == self.FOLDER: icon_pixmap = nm.settings().pixmap('crystal_clear_folder.png') elif self.id == self.PACKAGE: icon_pixmap = nm.settings().pixmap('crystal_clear_package.png') elif self.id == self.LAUNCH_FILE: icon_pixmap = nm.settings().pixmap('crystal_clear_launch_file.png') elif self.id == self.RECENT_FILE: icon_pixmap = nm.settings().pixmap('crystal_clear_launch_file_recent.png') elif self.id == self.STACK: icon_pixmap = nm.settings().pixmap('crystal_clear_stack.png') elif self.id == self.PROFILE: icon_pixmap = nm.settings().pixmap('crystal_clear_profile.png') elif self.id == self.RECENT_PROFILE: icon_pixmap = nm.settings().pixmap('crystal_clear_profile_recent.png') elif self.id == self.REMOTE_DAEMON: icon_pixmap = nm.settings().pixmap('stock_connect.png') elif self.id == self.ROOT: icon_pixmap = nm.settings().pixmap('back.png') if icon_pixmap: self.setIcon(QIcon(icon_pixmap.scaled(16, 16))) @property def name(self): return self._name @name.setter def name(self, new_name): self._name = new_name self.setText(self._name) @property def path(self): return self._path def type(self): return PathItem.ITEM_TYPE def data(self, role): if role == Qt.DisplayRole: if self.id == PathItem.RECENT_FILE or self.id == PathItem.RECENT_PROFILE: return "%s [%s]" % (self.name, self.package_name) elif self.id == PathItem.REMOTE_DAEMON: return "//%s" % self.name else: return "%s" % self.name elif role == Qt.ToolTipRole: result = "%s" % self.path if self.id == PathItem.RECENT_FILE or self.id == PathItem.RECENT_PROFILE: result = "%s\nPress 'Delete' to remove the entry from the history list\nShift+'double click' goes to the file location" % self.path return result elif role == Qt.EditRole: return "%s" % self.name else: return QStandardItem.data(self, role) def setData(self, value, role=Qt.EditRole): if role == Qt.EditRole: if (self.name != value or self._isnew) and self.id in [self.RECENT_FILE, self.LAUNCH_FILE, self.RECENT_PROFILE, self.PROFILE, self.CFG_FILE, self.FOLDER]: if self.name != value: if self.model()._exists(value): result = MessageBox.question(self.model().viewobj, "File exists", "File '%s' exists. Override?" % value, buttons=MessageBox.Yes | MessageBox.No) if result == MessageBox.No: return QStandardItem.setData(self, value, role) if self.id not in [self.FOLDER]: _filename, file_extension = os.path.splitext(value) if file_extension not in nm.settings().launch_view_file_ext: result = MessageBox.question(self.model().viewobj, "Unknown extension", "New name has unknown extension '%s'. Rename anyway?" % file_extension, buttons=MessageBox.Yes | MessageBox.No) if result == MessageBox.No: return QStandardItem.setData(self, value, role) new_path = os.path.join(os.path.dirname(self.path), value) try: content = b'' new_id = self._identify_path_on_ext(new_path, self.id) if self._isnew: if new_id in [self.FOLDER]: nm.nmd().file.new(new_path, 1) elif new_id in [self.LAUNCH_FILE]: content = (b'<launch>\n' b' <arg name="robot_ns" default="my_robot"/>\n' b' <group ns="$(arg robot_ns)">\n' b' <node pkg="my_pkg" type="my_node" name="my_name" >\n' b' <param name="capability_group" value="MY_GROUP"/>\n' b' </node>\n' b' </group>\n' b'</launch>\n') nm.nmd().file.save_file(new_path, content, 0) else: nm.nmd().file.new(new_path, 0) self._isnew = False else: nm.nmd().file.rename(self.path, new_path) if new_id != self.id: self.id = new_id self._update_icon() if self.name != value and self.id in [self.RECENT_FILE, self.RECENT_PROFILE]: nm.settings().launch_history_add(new_path, replace=self.path) self.name = value self._path = new_path except Exception as err: import traceback rospy.logwarn("Error while save new file: %s" % traceback.format_exc()) MessageBox.warning(None, "Rename failed", utf8(err)) return QStandardItem.setData(self, value, role) @classmethod def create_row_items(self, path, path_id, mtime, size, name, isnew=False): items = [] item = PathItem(path, path_id, mtime, size, name, isnew) items.append(item) return items def is_launch_file(self): return self.path is not None and self.id in [self.LAUNCH_FILE, self.RECENT_FILE] and self.path.endswith('.launch') def is_config_file(self): return self.id == self.CFG_FILE def is_profile_file(self): return self.path is not None and self.id in [self.PROFILE, self.RECENT_PROFILE, self.RECENT_FILE] and self.path.endswith('.nmprofile') def is_file(self): return self.path is not None and self.id in [self.PROFILE, self.RECENT_PROFILE, self.RECENT_FILE, self.LAUNCH_FILE, self.CFG_FILE, self.FILE] def __eq__(self, item): if isstring(item): return self.path.lower() == item.lower() elif not (item is None): return self.path.lower() == item.path.lower() return False def __gt__(self, item): if isstring(item): return self.path.lower() > item.lower() elif not (item is None): return self.path.lower() > item.path.lower() return False class LaunchListModel(QStandardItemModel): pathlist_handled = Signal(str) error_on_path = Signal(str, Exception) header = [('Name', -1)] def __init__(self, parent=None, progress_queue=None, viewobj=None): QStandardItemModel.__init__(self, parent) self.viewobj = viewobj self.setColumnCount(len(LaunchListModel.header)) self.setHorizontalHeaderLabels([label for label, _width in LaunchListModel.header]) self.pyqt_workaround = dict() self.items = [] self._roots = {} self._current_path = nmdurl.nmduri() self._current_master = masteruri_from_master() self._current_master_name = '' self.ros_root_paths = {} self.ros_root_paths[self._current_path] = [os.path.normpath(p) for p in os.getenv("ROS_PACKAGE_PATH").split(':')] self._progress_queue = progress_queue nm.nmd().file.listed_path.connect(self._listed_path) nm.nmd().file.packages_available.connect(self._on_new_packages) nm.nmd().file.error.connect(self._nmd_error) @property def current_path(self): return self._current_path @property def current_grpc(self): netloc, _ = nmdurl.split(self._current_path, with_scheme=True) return netloc @property def current_masteruri(self): return self._current_master @property def is_in_root(self): return self._is_root(self._current_path) def _is_root(self, grpc_path): return grpc_path == nmdurl.nmduri() def _is_ros_root(self, grpc_path): url, path = nmdurl.split(grpc_path, with_scheme=True) if url in self.ros_root_paths and path in self.ros_root_paths[url]: return True return False def set_current_master(self, masteruri, mastername): self._current_master = masteruri.rstrip(os.path.sep) self._current_master_name = mastername if self._is_root(self._current_path): nm.nmd().file.list_path_threaded(self._current_path) if nmdurl.equal_uri(self._current_path, masteruri_from_master()): self._add_path(nmdurl.nmduri(self._current_master), PathItem.REMOTE_DAEMON, 0, 0, get_hostname(self._current_master_name)) def is_current_nmd(self, url): return nmdurl.equal_uri(nmdurl.masteruri(url), nmdurl.masteruri(self._current_path)) def _add_history(self): for hitem in nm.settings().launch_history: if not hitem.startswith(os.path.sep): hitem_uri, _ = nmdurl.split(hitem, with_scheme=True) current_uri = nmdurl.nmduri(self._current_path) if nmdurl.equal_uri(hitem_uri, current_uri): self._add_path(hitem, PathItem.RECENT_FILE, 0, 0, os.path.basename(hitem)) def _on_new_packages(self, grpc_url): self.reload_current_path() def _listed_path(self, url, path, result): if not self.is_current_nmd(url): return root = self.invisibleRootItem() while root.rowCount(): root.removeRow(0) self.pyqt_workaround.clear() isroot = path in ['', os.path.sep] if isroot: self.ros_root_paths[url] = [] result_list = [] for path_item in result: if isroot and path_item.type in [FileItem.DIR, FileItem.PACKAGE]: self.ros_root_paths[url].append(path_item.path) item = os.path.normpath(os.path.join(path, path_item.path)) gpath = nmdurl.join(url, item) path_id = PathItem.NOT_FOUND if FileItem.FILE == path_item.type: _, ext = os.path.splitext(path_item.path) if ext in nm.settings().launch_view_file_ext or path_item.path.find('.launch.') > 0: path_id = PathItem.FILE elif FileItem.DIR == path_item.type: path_id = PathItem.FOLDER elif FileItem.SYMLINK == path_item.type: pass elif FileItem.PACKAGE == path_item.type: path_id = PathItem.PACKAGE if path_id != PathItem.NOT_FOUND and not os.path.basename(path_item.path).startswith('.'): result_list.append((gpath, path_id, path_item.mtime, path_item.size, os.path.basename(path_item.path))) root_path = nmdurl.join(url, path) self._set_new_list(root_path, result_list) isroot = self._is_root(self._current_path) if isroot and not nmdurl.equal_uri(self._current_master, masteruri_from_master()): self._add_path(nmdurl.nmduri(self._current_master), PathItem.REMOTE_DAEMON, 0, 0, get_hostname(self._current_master_name)) self.pathlist_handled.emit(root_path) def _nmd_error(self, method, url, path, error): if method != 'list_path' or not self.is_current_nmd(url): return root = self.invisibleRootItem() while root.rowCount(): root.removeRow(0) self.pyqt_workaround.clear() self._add_path(self._current_path, PathItem.ROOT, 0, 0, '') detail_msg = utf8(error) if hasattr(error, 'details'): detail_msg = utf8(error.details()) path_item = PathItem.create_row_items(utf8("%s, please start node manager daemon" % detail_msg), PathItem.NOTHING, 0, 0, 'connecting to daemon...') root.appendRow(path_item) self.pyqt_workaround[path_item[0].name] = path_item[0] self.error_on_path.emit(nmdurl.join(url, path), error) def flags(self, index): if not index.isValid(): return Qt.NoItemFlags try: item = self.itemFromIndex(index) result = Qt.ItemIsSelectable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled if item.id in [PathItem.RECENT_FILE, PathItem.RECENT_PROFILE, PathItem.LAUNCH_FILE, PathItem.CFG_FILE, PathItem.FOLDER, PathItem.PROFILE]: result = result | Qt.ItemIsEditable | Qt.ItemIsDragEnabled return result except Exception: return Qt.ItemIsEnabled | Qt.ItemIsSelectable def mimeTypes(self): return ['text/plain'] def mimeData(self, indexes): mimeData = QMimeData() text = '' for index in indexes: if index.isValid(): item = self.itemFromIndex(index) prev = '%s\n' % text if text else '' text = '%s%s' % (prev, item.path) mimeData.setData('text/plain', text.encode('utf-8')) return mimeData def reload_current_path(self, clear_cache=False): self.expand_item(self._current_path, PathItem.FOLDER, clear_cache) if clear_cache: nm.nmd().clear_cache() nm.nmd().launch.reset_package_path_threaded(self._current_path) def expand_item(self, path, path_id, clear_cache=False): if path_id in [PathItem.NOTHING]: return None has_shift_mod = Qt.ShiftModifier & QApplication.keyboardModifiers() if path_id in [PathItem.LAUNCH_FILE, PathItem.CFG_FILE, PathItem.PROFILE, PathItem.FILE, PathItem.RECENT_FILE, PathItem.LAUNCH_FILE]: if not has_shift_mod: return path root = self.invisibleRootItem() while root.rowCount(): root.removeRow(0) self.pyqt_workaround.clear() if has_shift_mod: if path_id in [PathItem.LAUNCH_FILE, PathItem.CFG_FILE, PathItem.PROFILE, PathItem.FILE, PathItem.RECENT_FILE, PathItem.LAUNCH_FILE]: self._current_path = os.path.dirname(path) else: self._current_path = nmdurl.nmduri() else: if path_id in [PathItem.ROOT]: surl, spath = nmdurl.split(path, with_scheme=True) if self._is_root(path) or spath in ['', os.path.sep]: self._current_path = nmdurl.nmduri() elif self._is_ros_root(path): self._current_path = surl else: dir_path = os.path.dirname(spath) self._current_path = nmdurl.join(surl, dir_path) elif self._current_path != path: self._current_path = path self._add_path(self._current_path, PathItem.ROOT, 0, 0, 'loading...') nm.nmd().file.list_path_threaded(self._current_path, clear_cache) return None def set_path(self, path, path_id=PathItem.FOLDER): toset = path if not path.startswith('grpc://'): toset = nmdurl.join(self.current_grpc, path) self.expand_item(toset, path_id) def show_packages(self, pattern): try: root = self.invisibleRootItem() while root.rowCount(): root.removeRow(0) self.pyqt_workaround.clear() items = [] currurl = self.current_grpc for url, packages in nm.nmd().file.get_packages().items(): if url == currurl: for path, name in packages.items(): if pattern in name: items.append((path, PathItem.PACKAGE, 0, 0, name)) self._set_new_list(self._current_path, items, add_history=False) except Exception: import traceback print(traceback.format_exc(2)) def paste_from_clipboard(self): if QApplication.clipboard().mimeData().hasText() and self._current_path: text = QApplication.clipboard().mimeData().text() if self.current_path and text.startswith('grpc://'): basename = os.path.basename(text) dest_path = os.path.join(self._current_path, basename) try: if text == dest_path: dest_path = self._autorename(dest_path) rospy.logdebug("Autorename destination from %s to %s" % (text, dest_path)) rospy.logdebug("Copy %s to %s" % (text, dest_path)) nm.nmd().file.copy(text, dest_path) self.reload_current_path(clear_cache=True) except Exception as err: MessageBox.warning(None, "Copy failed", "Copy failed: %s" % utf8(err)) import traceback print(traceback.format_exc()) def copy_to_clipboard(self, indexes): mimeData = QMimeData() text = '' for index in indexes: if index.isValid(): item = self.itemFromIndex(index) prev = '%s\n' % text if text else '' text = '%s%s' % (prev, item.path) mimeData.setData('text/plain', text.encode('utf-8')) QApplication.clipboard().setMimeData(mimeData) def add_new_item(self, name='new', path_id=PathItem.LAUNCH_FILE): root = self.invisibleRootItem() new_name = self._autorename(name) try: path_item = PathItem.create_row_items(nmdurl.join(self._current_path, new_name), path_id, 0, 0, new_name, isnew=True) if root.rowCount() > 1: root.insertRow(1, path_item) else: root.appendRow(path_item) self.pyqt_workaround[path_item[0].name] = path_item[0] return path_item except Exception: import traceback rospy.logwarn("Error while add new item: %s" % traceback.format_exc()) return [] def _exists(self, name): root = self.invisibleRootItem() for row in range(root.rowCount()): item = root.child(row) if item.name == name: return True return False
BSD 3-Clause New or Revised License
sofia-netsurv/python-netsurv
env/lib/python3.5/site-packages/astroid/rebuilder.py
TreeRebuilder.visit_return
python
def visit_return(self, node, parent): newnode = nodes.Return(node.lineno, node.col_offset, parent) if node.value is not None: newnode.postinit(self.visit(node.value, newnode)) return newnode
visit a Return node by returning a fresh instance of it
https://github.com/sofia-netsurv/python-netsurv/blob/429fb07a2b06cc505fdd9350148266a6b4e23e64/env/lib/python3.5/site-packages/astroid/rebuilder.py#L799-L804
import sys import astroid from astroid._ast import _parse, _get_parser_module, parse_function_type_comment from astroid import nodes CONST_NAME_TRANSFORMS = {"None": None, "True": True, "False": False} REDIRECT = { "arguments": "Arguments", "comprehension": "Comprehension", "ListCompFor": "Comprehension", "GenExprFor": "Comprehension", "excepthandler": "ExceptHandler", "keyword": "Keyword", } PY3 = sys.version_info >= (3, 0) PY34 = sys.version_info >= (3, 4) PY37 = sys.version_info >= (3, 7) def _binary_operators_from_module(module): binary_operators = { module.Add: "+", module.BitAnd: "&", module.BitOr: "|", module.BitXor: "^", module.Div: "/", module.FloorDiv: "//", module.Mod: "%", module.Mult: "*", module.Pow: "**", module.Sub: "-", module.LShift: "<<", module.RShift: ">>", } if sys.version_info >= (3, 5): binary_operators[module.MatMult] = "@" return binary_operators def _bool_operators_from_module(module): return {module.And: "and", module.Or: "or"} def _unary_operators_from_module(module): return {module.UAdd: "+", module.USub: "-", module.Not: "not", module.Invert: "~"} def _compare_operators_from_module(module): return { module.Eq: "==", module.Gt: ">", module.GtE: ">=", module.In: "in", module.Is: "is", module.IsNot: "is not", module.Lt: "<", module.LtE: "<=", module.NotEq: "!=", module.NotIn: "not in", } def _contexts_from_module(module): return { module.Load: astroid.Load, module.Store: astroid.Store, module.Del: astroid.Del, module.Param: astroid.Store, } def _visit_or_none(node, attr, visitor, parent, visit="visit", **kws): value = getattr(node, attr, None) if value: return getattr(visitor, visit)(value, parent, **kws) return None class TreeRebuilder: def __init__(self, manager, parse_python_two: bool = False): self._manager = manager self._global_names = [] self._import_from_nodes = [] self._delayed_assattr = [] self._visit_meths = {} self._parser_module = _get_parser_module(parse_python_two=parse_python_two) self._unary_op_classes = _unary_operators_from_module(self._parser_module) self._cmp_op_classes = _compare_operators_from_module(self._parser_module) self._bool_op_classes = _bool_operators_from_module(self._parser_module) self._bin_op_classes = _binary_operators_from_module(self._parser_module) self._context_classes = _contexts_from_module(self._parser_module) def _get_doc(self, node): try: if PY37 and hasattr(node, "docstring"): doc = node.docstring return node, doc if ( node.body and isinstance(node.body[0], self._parser_module.Expr) and isinstance(node.body[0].value, self._parser_module.Str) ): doc = node.body[0].value.s node.body = node.body[1:] return node, doc except IndexError: pass return node, None def _get_context(self, node): return self._context_classes.get(type(node.ctx), astroid.Load) def visit_module(self, node, modname, modpath, package): node, doc = self._get_doc(node) newnode = nodes.Module( name=modname, doc=doc, file=modpath, path=[modpath], package=package, parent=None, ) newnode.postinit([self.visit(child, newnode) for child in node.body]) return newnode def visit(self, node, parent): cls = node.__class__ if cls in self._visit_meths: visit_method = self._visit_meths[cls] else: cls_name = cls.__name__ visit_name = "visit_" + REDIRECT.get(cls_name, cls_name).lower() visit_method = getattr(self, visit_name) self._visit_meths[cls] = visit_method return visit_method(node, parent) def _save_assignment(self, node, name=None): if self._global_names and node.name in self._global_names[-1]: node.root().set_local(node.name, node) else: node.parent.set_local(node.name, node) def visit_arguments(self, node, parent): vararg, kwarg = node.vararg, node.kwarg if PY34: newnode = nodes.Arguments( vararg.arg if vararg else None, kwarg.arg if kwarg else None, parent ) else: newnode = nodes.Arguments(vararg, kwarg, parent) args = [self.visit(child, newnode) for child in node.args] defaults = [self.visit(child, newnode) for child in node.defaults] varargannotation = None kwargannotation = None if vararg: if PY34: if node.vararg.annotation: varargannotation = self.visit(node.vararg.annotation, newnode) vararg = vararg.arg if kwarg: if PY34: if node.kwarg.annotation: kwargannotation = self.visit(node.kwarg.annotation, newnode) kwarg = kwarg.arg if PY3: kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs] kw_defaults = [ self.visit(child, newnode) if child else None for child in node.kw_defaults ] annotations = [ self.visit(arg.annotation, newnode) if arg.annotation else None for arg in node.args ] kwonlyargs_annotations = [ self.visit(arg.annotation, newnode) if arg.annotation else None for arg in node.kwonlyargs ] else: kwonlyargs = [] kw_defaults = [] annotations = [] kwonlyargs_annotations = [] newnode.postinit( args=args, defaults=defaults, kwonlyargs=kwonlyargs, kw_defaults=kw_defaults, annotations=annotations, kwonlyargs_annotations=kwonlyargs_annotations, varargannotation=varargannotation, kwargannotation=kwargannotation, ) if vararg: newnode.parent.set_local(vararg, newnode) if kwarg: newnode.parent.set_local(kwarg, newnode) return newnode def visit_assert(self, node, parent): newnode = nodes.Assert(node.lineno, node.col_offset, parent) if node.msg: msg = self.visit(node.msg, newnode) else: msg = None newnode.postinit(self.visit(node.test, newnode), msg) return newnode def check_type_comment(self, node): type_comment = getattr(node, "type_comment", None) if not type_comment: return None try: type_comment_ast = _parse(type_comment) except SyntaxError: return None type_object = self.visit(type_comment_ast.body[0], node) if not isinstance(type_object, nodes.Expr): return None return type_object.value def check_function_type_comment(self, node): type_comment = getattr(node, "type_comment", None) if not type_comment: return None try: type_comment_ast = parse_function_type_comment(type_comment) except SyntaxError: return None returns = None argtypes = [ self.visit(elem, node) for elem in (type_comment_ast.argtypes or []) ] if type_comment_ast.returns: returns = self.visit(type_comment_ast.returns, node) return returns, argtypes def visit_assign(self, node, parent): type_annotation = self.check_type_comment(node) newnode = nodes.Assign(node.lineno, node.col_offset, parent) newnode.postinit( targets=[self.visit(child, newnode) for child in node.targets], value=self.visit(node.value, newnode), type_annotation=type_annotation, ) return newnode def visit_assignname(self, node, parent, node_name=None): newnode = nodes.AssignName( node_name, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) self._save_assignment(newnode) return newnode def visit_augassign(self, node, parent): newnode = nodes.AugAssign( self._bin_op_classes[type(node.op)] + "=", node.lineno, node.col_offset, parent, ) newnode.postinit( self.visit(node.target, newnode), self.visit(node.value, newnode) ) return newnode def visit_repr(self, node, parent): newnode = nodes.Repr(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_binop(self, node, parent): newnode = nodes.BinOp( self._bin_op_classes[type(node.op)], node.lineno, node.col_offset, parent ) newnode.postinit( self.visit(node.left, newnode), self.visit(node.right, newnode) ) return newnode def visit_boolop(self, node, parent): newnode = nodes.BoolOp( self._bool_op_classes[type(node.op)], node.lineno, node.col_offset, parent ) newnode.postinit([self.visit(child, newnode) for child in node.values]) return newnode def visit_break(self, node, parent): return nodes.Break( getattr(node, "lineno", None), getattr(node, "col_offset", None), parent ) def visit_call(self, node, parent): newnode = nodes.Call(node.lineno, node.col_offset, parent) starargs = _visit_or_none(node, "starargs", self, newnode) kwargs = _visit_or_none(node, "kwargs", self, newnode) args = [self.visit(child, newnode) for child in node.args] if node.keywords: keywords = [self.visit(child, newnode) for child in node.keywords] else: keywords = None if starargs: new_starargs = nodes.Starred( col_offset=starargs.col_offset, lineno=starargs.lineno, parent=starargs.parent, ) new_starargs.postinit(value=starargs) args.append(new_starargs) if kwargs: new_kwargs = nodes.Keyword( arg=None, col_offset=kwargs.col_offset, lineno=kwargs.lineno, parent=kwargs.parent, ) new_kwargs.postinit(value=kwargs) if keywords: keywords.append(new_kwargs) else: keywords = [new_kwargs] newnode.postinit(self.visit(node.func, newnode), args, keywords) return newnode def visit_classdef(self, node, parent, newstyle=None): node, doc = self._get_doc(node) newnode = nodes.ClassDef(node.name, doc, node.lineno, node.col_offset, parent) metaclass = None if PY3: for keyword in node.keywords: if keyword.arg == "metaclass": metaclass = self.visit(keyword, newnode).value break if node.decorator_list: decorators = self.visit_decorators(node, newnode) else: decorators = None newnode.postinit( [self.visit(child, newnode) for child in node.bases], [self.visit(child, newnode) for child in node.body], decorators, newstyle, metaclass, [ self.visit(kwd, newnode) for kwd in node.keywords if kwd.arg != "metaclass" ] if PY3 else [], ) return newnode def visit_const(self, node, parent): return nodes.Const( node.value, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) def visit_continue(self, node, parent): return nodes.Continue( getattr(node, "lineno", None), getattr(node, "col_offset", None), parent ) def visit_compare(self, node, parent): newnode = nodes.Compare(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.left, newnode), [ (self._cmp_op_classes[op.__class__], self.visit(expr, newnode)) for (op, expr) in zip(node.ops, node.comparators) ], ) return newnode def visit_comprehension(self, node, parent): newnode = nodes.Comprehension(parent) newnode.postinit( self.visit(node.target, newnode), self.visit(node.iter, newnode), [self.visit(child, newnode) for child in node.ifs], getattr(node, "is_async", None), ) return newnode def visit_decorators(self, node, parent): newnode = nodes.Decorators(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.decorator_list]) return newnode def visit_delete(self, node, parent): newnode = nodes.Delete(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.targets]) return newnode def _visit_dict_items(self, node, parent, newnode): for key, value in zip(node.keys, node.values): rebuilt_value = self.visit(value, newnode) if not key: rebuilt_key = nodes.DictUnpack( rebuilt_value.lineno, rebuilt_value.col_offset, parent ) else: rebuilt_key = self.visit(key, newnode) yield rebuilt_key, rebuilt_value def visit_dict(self, node, parent): newnode = nodes.Dict(node.lineno, node.col_offset, parent) items = list(self._visit_dict_items(node, parent, newnode)) newnode.postinit(items) return newnode def visit_dictcomp(self, node, parent): newnode = nodes.DictComp(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.key, newnode), self.visit(node.value, newnode), [self.visit(child, newnode) for child in node.generators], ) return newnode def visit_expr(self, node, parent): newnode = nodes.Expr(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_ellipsis(self, node, parent): return nodes.Ellipsis( getattr(node, "lineno", None), getattr(node, "col_offset", None), parent ) def visit_emptynode(self, node, parent): return nodes.EmptyNode( getattr(node, "lineno", None), getattr(node, "col_offset", None), parent ) def visit_excepthandler(self, node, parent): newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent) newnode.postinit( _visit_or_none(node, "type", self, newnode), _visit_or_none(node, "name", self, newnode), [self.visit(child, newnode) for child in node.body], ) return newnode def visit_exec(self, node, parent): newnode = nodes.Exec(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.body, newnode), _visit_or_none(node, "globals", self, newnode), _visit_or_none(node, "locals", self, newnode), ) return newnode def visit_extslice(self, node, parent): newnode = nodes.ExtSlice(parent=parent) newnode.postinit([self.visit(dim, newnode) for dim in node.dims]) return newnode def _visit_for(self, cls, node, parent): newnode = cls(node.lineno, node.col_offset, parent) type_annotation = self.check_type_comment(node) newnode.postinit( target=self.visit(node.target, newnode), iter=self.visit(node.iter, newnode), body=[self.visit(child, newnode) for child in node.body], orelse=[self.visit(child, newnode) for child in node.orelse], type_annotation=type_annotation, ) return newnode def visit_for(self, node, parent): return self._visit_for(nodes.For, node, parent) def visit_importfrom(self, node, parent): names = [(alias.name, alias.asname) for alias in node.names] newnode = nodes.ImportFrom( node.module or "", names, node.level or None, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) self._import_from_nodes.append(newnode) return newnode def _visit_functiondef(self, cls, node, parent): self._global_names.append({}) node, doc = self._get_doc(node) newnode = cls(node.name, doc, node.lineno, node.col_offset, parent) if node.decorator_list: decorators = self.visit_decorators(node, newnode) else: decorators = None if PY3 and node.returns: returns = self.visit(node.returns, newnode) else: returns = None type_comment_args = type_comment_returns = None type_comment_annotation = self.check_function_type_comment(node) if type_comment_annotation: type_comment_returns, type_comment_args = type_comment_annotation newnode.postinit( args=self.visit(node.args, newnode), body=[self.visit(child, newnode) for child in node.body], decorators=decorators, returns=returns, type_comment_returns=type_comment_returns, type_comment_args=type_comment_args, ) self._global_names.pop() return newnode def visit_functiondef(self, node, parent): return self._visit_functiondef(nodes.FunctionDef, node, parent) def visit_generatorexp(self, node, parent): newnode = nodes.GeneratorExp(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.elt, newnode), [self.visit(child, newnode) for child in node.generators], ) return newnode def visit_attribute(self, node, parent): context = self._get_context(node) if context == astroid.Del: newnode = nodes.DelAttr(node.attr, node.lineno, node.col_offset, parent) elif context == astroid.Store: newnode = nodes.AssignAttr(node.attr, node.lineno, node.col_offset, parent) if not isinstance(parent, astroid.ExceptHandler): self._delayed_assattr.append(newnode) else: newnode = nodes.Attribute(node.attr, node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_global(self, node, parent): newnode = nodes.Global( node.names, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) if self._global_names: for name in node.names: self._global_names[-1].setdefault(name, []).append(newnode) return newnode def visit_if(self, node, parent): newnode = nodes.If(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.test, newnode), [self.visit(child, newnode) for child in node.body], [self.visit(child, newnode) for child in node.orelse], ) return newnode def visit_ifexp(self, node, parent): newnode = nodes.IfExp(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.test, newnode), self.visit(node.body, newnode), self.visit(node.orelse, newnode), ) return newnode def visit_import(self, node, parent): names = [(alias.name, alias.asname) for alias in node.names] newnode = nodes.Import( names, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) for (name, asname) in newnode.names: name = asname or name parent.set_local(name.split(".")[0], newnode) return newnode def visit_index(self, node, parent): newnode = nodes.Index(parent=parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_keyword(self, node, parent): newnode = nodes.Keyword(node.arg, parent=parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_lambda(self, node, parent): newnode = nodes.Lambda(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode)) return newnode def visit_list(self, node, parent): context = self._get_context(node) newnode = nodes.List( ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent ) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode def visit_listcomp(self, node, parent): newnode = nodes.ListComp(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.elt, newnode), [self.visit(child, newnode) for child in node.generators], ) return newnode def visit_name(self, node, parent): context = self._get_context(node) if context == astroid.Del: newnode = nodes.DelName(node.id, node.lineno, node.col_offset, parent) elif context == astroid.Store: newnode = nodes.AssignName(node.id, node.lineno, node.col_offset, parent) elif node.id in CONST_NAME_TRANSFORMS: newnode = nodes.Const( CONST_NAME_TRANSFORMS[node.id], getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) return newnode else: newnode = nodes.Name(node.id, node.lineno, node.col_offset, parent) if context in (astroid.Del, astroid.Store): self._save_assignment(newnode) return newnode def visit_constant(self, node, parent): return nodes.Const( node.value, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) def visit_str(self, node, parent): return nodes.Const( node.s, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) visit_bytes = visit_str def visit_num(self, node, parent): return nodes.Const( node.n, getattr(node, "lineno", None), getattr(node, "col_offset", None), parent, ) def visit_pass(self, node, parent): return nodes.Pass(node.lineno, node.col_offset, parent) def visit_print(self, node, parent): newnode = nodes.Print(node.nl, node.lineno, node.col_offset, parent) newnode.postinit( _visit_or_none(node, "dest", self, newnode), [self.visit(child, newnode) for child in node.values], ) return newnode def visit_raise(self, node, parent): newnode = nodes.Raise(node.lineno, node.col_offset, parent) newnode.postinit( _visit_or_none(node, "type", self, newnode), _visit_or_none(node, "inst", self, newnode), _visit_or_none(node, "tback", self, newnode), ) return newnode
MIT License
brenns10/tswift
tswift.py
main
python
def main(): parser = argparse.ArgumentParser( description='Search artists, lyrics, and songs!' ) parser.add_argument( 'artist', help='Specify an artist name (Default: Taylor Swift)', default='Taylor Swift', nargs='?', ) parser.add_argument( '-s', '--song', help='Given artist name, specify a song name', required=False, ) parser.add_argument( '-l', '--lyrics', help='Search for song by lyrics', required=False, ) args = parser.parse_args() if args.lyrics: song = Song.find_song(args.lyrics) else: if args.song: song = Song( title=args.song, artist=args.artist, ) else: artist = Artist(args.artist) if artist.songs: song = random.choice(artist.songs) else: print('Couldn\'t find any songs by artist {}!' .format(args.artist)) sys.exit(1) print(song.format())
Run the CLI.
https://github.com/brenns10/tswift/blob/a391008a28d00a4b42d982af91d908e7fb97fad3/tswift.py#L170-L212
from lxml import html from googlesearch import search import argparse import requests import re import random import sys ARTIST_URL = "https://www.metrolyrics.com/{artist}-alpage-{n}.html" SONG_URL = "https://www.metrolyrics.com/{title}-lyrics-{artist}.html" SONG_RE = r'https?://www\.metrolyrics\.com/(.*)-lyrics-(.*)\.html' def slugify(string): return string.replace(' ', '-').lower() def deslugify(string): return string.replace('-', ' ').title() class TswiftError(Exception): class Song(object): def __init__(self, title=None, artist=None, url=None): self._lyrics = None if url is not None: self._url = url self.title, self.artist = re.match(SONG_RE, url).groups() elif title is not None and artist is not None: self.title = title self.artist = artist self._url = SONG_URL.format( title=slugify(title), artist=slugify(artist), ) else: raise ValueError('Must provide either title & artist or URL.') self.title = deslugify(self.title) self.artist = deslugify(self.artist) def load(self): page = requests.get(self._url) if page.status_code > 200: raise TswiftError("No lyrics available for requested song") page.encoding = 'utf-8' tree = html.fromstring(page.text) try: lyric_div = tree.get_element_by_id('lyrics-body-text') verses = [c.text_content() for c in lyric_div.find_class('verse')] except KeyError: raise TswiftError("No lyrics available for requested song") else: self._lyrics = '\n\n'.join(verses) return self @property def lyrics(self): if self._lyrics is None: self.load() return self._lyrics def format(self): return '%s\n%s\n%s\n\n%s' % ( self.title, self.artist, '-' * max(len(self.title), len(self.artist)), self.lyrics, ) def __repr__(self): return 'Song(title=%r, artist=%r)' % (self.title, self.artist) @staticmethod def find_song(lyrics): for url in search('site:www.metrolyrics.com ' + lyrics, stop=20): if re.match(SONG_RE, url): return Song(url=url) return None class Artist(object): def __init__(self, name): self._songs = None self.name = slugify(name) def load(self, verbose=False): self._songs = [] page_num = 1 total_pages = 1 while page_num <= total_pages: if verbose: print('retrieving page %d' % page_num) page = requests.get(ARTIST_URL.format(artist=self.name, n=page_num)) tree = html.fromstring(page.text) song_rows_xp = r'//*[@id="popular"]/div/table/tbody/tr' songlist_pagination_xp = r'//*[@id="main-content"]/div[1]/' 'div[2]/p/span/a' rows = tree.xpath(song_rows_xp) for row in rows: song_link = row.xpath(r'./td/a[contains(@class,"title")]') assert len(song_link) == 1 self._songs.append(Song(url=song_link[0].attrib['href'])) total_pages = len(tree.xpath(songlist_pagination_xp)) page_num += 1 return self @property def songs(self): if self._songs is None: self.load() return self._songs def __repr__(self): return 'Artist(%r)' % self.name
BSD 3-Clause New or Revised License
zzdang/cascade_rcnn_gluon
gluoncv/model_zoo/cascade_rcnn/vgg16_pruned.py
vgg16_pruned
python
def vgg16_pruned(**kwargs): return get_vgg(16, **kwargs)
r"""VGG-16 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition" <https://arxiv.org/abs/1409.1556>`_ paper. Parameters ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters.
https://github.com/zzdang/cascade_rcnn_gluon/blob/b4018001719ec56a688be26c2aab18be664e4bdd/gluoncv/model_zoo/cascade_rcnn/vgg16_pruned.py#L115-L128
import mxnet as mx from mxnet.gluon import nn, HybridBlock from mxnet.initializer import Xavier import os __all__ = ['VGG','get_vgg', 'vgg16_pruned'] class VGG(HybridBlock): def __init__(self, layers, filters, classes=1000, batch_norm=False, **kwargs): super(VGG, self).__init__(**kwargs) assert len(layers) == len(filters) with self.name_scope(): self.features = self._make_features(layers, filters, batch_norm) self.features.add(nn.Dense(2048, activation='relu', weight_initializer='normal', bias_initializer='zeros')) self.features.add(nn.Dropout(rate=0.5)) self.features.add(nn.Dense(2048, activation='relu', weight_initializer='normal', bias_initializer='zeros')) self.features.add(nn.Dropout(rate=0.5)) self.features.add(nn.Dense(2048, activation='relu', weight_initializer='normal', bias_initializer='zeros')) self.features.add(nn.Dropout(rate=0.5)) self.features.add(nn.Dense(2048, activation='relu', weight_initializer='normal', bias_initializer='zeros')) self.features.add(nn.Dropout(rate=0.5)) self.features.add(nn.Dense(2048, activation='relu', weight_initializer='normal', bias_initializer='zeros')) self.features.add(nn.Dropout(rate=0.5)) self.features.add(nn.Dense(2048, activation='relu', weight_initializer='normal', bias_initializer='zeros')) self.features.add(nn.Dropout(rate=0.5)) def _make_features(self, layers, filters, batch_norm): featurizer = nn.HybridSequential(prefix='') for i, num in enumerate(layers): for _ in range(num): featurizer.add(nn.Conv2D(filters[i], kernel_size=3, padding=1, weight_initializer=Xavier(rnd_type='gaussian', factor_type='out', magnitude=2), bias_initializer='zeros', )) if batch_norm: featurizer.add(nn.BatchNorm()) featurizer.add(nn.Activation('relu')) featurizer.add(nn.MaxPool2D(strides=2)) return featurizer def hybrid_forward(self, F, x): x = self.features(x) return x vgg_spec = {11: ([1, 1, 2, 2, 2], [64, 128, 256, 512, 512]), 13: ([2, 2, 2, 2, 2], [64, 128, 256, 512, 512]), 16: ([2, 2, 3, 3, 3], [64, 128, 256, 512, 512]), 19: ([2, 2, 4, 4, 4], [64, 128, 256, 512, 512])} def get_vgg(num_layers, pretrained=False, ctx=mx.cpu(0), root=os.path.join('~', '.mxnet', 'models'), **kwargs): layers, filters = vgg_spec[num_layers] net = VGG(layers, filters, **kwargs) if pretrained: from ..model_store import get_model_file batch_norm_suffix = '_bn' if kwargs.get('batch_norm') else '' net.load_parameters('./models/VGG_16_fc2048_prune_cascade.params', ctx=ctx) for v in net.collect_params(select='init_scale|init_mean').values(): v.initialize(force_reinit=True, ctx=ctx) return net
Apache License 2.0
demisto/demisto-sdk
demisto_sdk/commands/generate_outputs/generate_context/generate_integration_context.py
generate_integration_context
python
def generate_integration_context( input_path: str, examples: Optional[str] = None, insecure: bool = False, verbose: bool = False, ): try: yml_data = get_yaml(input_path) example_dict = generate_example_dict(examples, insecure) for command in example_dict: print_v(f'Building context for the {command} command...', verbose) _, _, outputs = example_dict.get(command) output_with_contexts = dict_from_outputs_str(command, outputs, verbose=verbose) output_contexts = output_with_contexts.get('outputs') yml_data = insert_outputs(yml_data, command, output_contexts) print_success(f'Writing outputs to input file "{input_path}"...') write_yml(input_path, yml_data) except Exception as ex: if verbose: raise else: print_error(f'Error: {str(ex)}') return 1 return 0
Generate integration command contexts in-place. Args: input_path: path to the yaml integration. examples: path to the command examples. insecure: should use insecure. verbose: verbose (debug mode).
https://github.com/demisto/demisto-sdk/blob/8d8767c2dfec77b67c35f4e1022e30ed2893e864/demisto_sdk/commands/generate_outputs/generate_context/generate_integration_context.py#L64-L104
from typing import Dict, List, Optional from demisto_sdk.commands.common.tools import (get_yaml, print_error, print_success, print_v, write_yml) from demisto_sdk.commands.generate_docs.common import build_example_dict from demisto_sdk.commands.generate_docs.generate_integration_doc import get_command_examples from demisto_sdk.commands.generate_outputs.json_to_outputs.json_to_outputs import parse_json def dict_from_outputs_str(command: str, outputs: str, verbose=False): dict_output = parse_json(outputs, command, "", verbose, return_object=True) return dict_output def generate_example_dict(examples_file: Optional[str], insecure=False): command_examples = get_command_examples(examples_file, None) example_dict, build_errors = build_example_dict(command_examples, insecure) if build_errors: raise Exception( f'Command examples had errors: {build_errors}') return example_dict def insert_outputs(yml_data: Dict, command: str, output_with_contexts: List): commands = yml_data['script']['commands'] found = False for cmd in commands: if cmd.get('name') == command: cmd['outputs'] = output_with_contexts found = True break if not found: raise Exception( f'Input YML doesn\'t have the "{command}" command that exists in the examples file.') return yml_data
MIT License
kylebgorman/edittransducer
edit_transducer/edit_transducer.py
EditTransducer.check_wellformed_lattice
python
def check_wellformed_lattice(lattice): if lattice.start() == NO_STATE_ID: raise Error("Lattice is empty")
Raises an error if the lattice is empty. Args: lattice: A lattice FST. Raises: Error: Lattice is empty.
https://github.com/kylebgorman/edittransducer/blob/a288471961197e49bb31c4ce13fc9bf9a3567316/edit_transducer/edit_transducer.py#L114-L124
from __future__ import division from pynini import compose from pynini import invert from pynini import NO_STATE_ID from pynini import shortestdistance from pynini import shortestpath from pynini import string_map from pynini import transducer from pynini import union DEFAULT_INSERT_COST = 1 DEFAULT_DELETE_COST = 1 DEFAULT_SUBSTITUTE_COST = 1 class Error(Exception): pass class EditTransducer(object): DELETE = "<delete>" INSERT = "<insert>" SUBSTITUTE = "<substitute>" def __init__(self, alphabet, insert_cost=DEFAULT_INSERT_COST, delete_cost=DEFAULT_DELETE_COST, substitute_cost=DEFAULT_SUBSTITUTE_COST): match = union(*alphabet).optimize(True) i_insert = transducer("", "[{}]".format(self.INSERT), weight=insert_cost / 2).optimize(True) i_delete = transducer(match, "[{}]".format(self.DELETE), weight=delete_cost / 2).optimize(True) i_substitute = transducer(match, "[{}]".format(self.SUBSTITUTE), weight=substitute_cost / 2).optimize(True) i_ops = union(match, i_insert, i_delete, i_substitute).optimize(True) o_ops = invert(i_ops) syms = o_ops.input_symbols() insert_label = syms.find(self.INSERT) delete_label = syms.find(self.DELETE) o_ops.relabel_pairs(ipairs=((insert_label, delete_label), (delete_label, insert_label))) self._e_i = i_ops.closure().optimize(True) self._e_o = o_ops.closure().optimize(True) @staticmethod
MIT License