repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
cartodb/crankshaft
release/python/0.6.0/crankshaft/crankshaft/regression/glm/base.py
LikelihoodModelResults.load
python
def load(cls, fname): from statsmodels.iolib.smpickle import load_pickle return load_pickle(fname)
load a pickle, (class method) Parameters ---------- fname : string or filehandle fname can be a string to a file path or filename, or a filehandle. Returns ------- unpickled instance
https://github.com/cartodb/crankshaft/blob/494c047563328b092fe5fe0b8b4dd862edfee836/release/python/0.6.0/crankshaft/crankshaft/regression/glm/base.py#L900-L913
from __future__ import print_function import numpy as np from scipy import stats from utils import cache_readonly class Results(object): def __init__(self, model, params, **kwd): self.__dict__.update(kwd) self.initialize(model, params, **kwd) self._data_attr = [] def initialize(self, model, params, **kwd): self.params = params self.model = model if hasattr(model, 'k_constant'): self.k_constant = model.k_constant def predict(self, exog=None, transform=True, *args, **kwargs): if transform and hasattr(self.model, 'formula') and exog is not None: from patsy import dmatrix exog = dmatrix(self.model.data.design_info.builder, exog) if exog is not None: exog = np.asarray(exog) if exog.ndim == 1 and (self.model.exog.ndim == 1 or self.model.exog.shape[1] == 1): exog = exog[:, None] exog = np.atleast_2d(exog) return self.model.predict(self.params, exog, *args, **kwargs) class LikelihoodModelResults(Results): use_t = False def __init__(self, model, params, normalized_cov_params=None, scale=1., **kwargs): super(LikelihoodModelResults, self).__init__(model, params) self.normalized_cov_params = normalized_cov_params self.scale = scale if 'use_t' in kwargs: use_t = kwargs['use_t'] if use_t is not None: self.use_t = use_t if 'cov_type' in kwargs: cov_type = kwargs.get('cov_type', 'nonrobust') cov_kwds = kwargs.get('cov_kwds', {}) if cov_type == 'nonrobust': self.cov_type = 'nonrobust' self.cov_kwds = {'description' : 'Standard Errors assume that the ' + 'covariance matrix of the errors is correctly ' + 'specified.'} else: from statsmodels.base.covtype import get_robustcov_results if cov_kwds is None: cov_kwds = {} use_t = self.use_t get_robustcov_results(self, cov_type=cov_type, use_self=True, use_t=use_t, **cov_kwds) def normalized_cov_params(self): raise NotImplementedError def _get_robustcov_results(self, cov_type='nonrobust', use_self=True, use_t=None, **cov_kwds): from statsmodels.base.covtype import get_robustcov_results if cov_kwds is None: cov_kwds = {} if cov_type == 'nonrobust': self.cov_type = 'nonrobust' self.cov_kwds = {'description' : 'Standard Errors assume that the ' + 'covariance matrix of the errors is correctly ' + 'specified.'} else: get_robustcov_results(self, cov_type=cov_type, use_self=True, use_t=use_t, **cov_kwds) @cache_readonly def llf(self): return self.model.loglike(self.params) @cache_readonly def bse(self): return np.sqrt(np.diag(self.cov_params())) @cache_readonly def tvalues(self): return self.params / self.bse @cache_readonly def pvalues(self): if self.use_t: df_resid = getattr(self, 'df_resid_inference', self.df_resid) return stats.t.sf(np.abs(self.tvalues), df_resid)*2 else: return stats.norm.sf(np.abs(self.tvalues))*2 def cov_params(self, r_matrix=None, column=None, scale=None, cov_p=None, other=None): if (hasattr(self, 'mle_settings') and self.mle_settings['optimizer'] in ['l1', 'l1_cvxopt_cp']): dot_fun = nan_dot else: dot_fun = np.dot if (cov_p is None and self.normalized_cov_params is None and not hasattr(self, 'cov_params_default')): raise ValueError('need covariance of parameters for computing ' '(unnormalized) covariances') if column is not None and (r_matrix is not None or other is not None): raise ValueError('Column should be specified without other ' 'arguments.') if other is not None and r_matrix is None: raise ValueError('other can only be specified with r_matrix') if cov_p is None: if hasattr(self, 'cov_params_default'): cov_p = self.cov_params_default else: if scale is None: scale = self.scale cov_p = self.normalized_cov_params * scale if column is not None: column = np.asarray(column) if column.shape == (): return cov_p[column, column] else: return cov_p[column[:, None], column] elif r_matrix is not None: r_matrix = np.asarray(r_matrix) if r_matrix.shape == (): raise ValueError("r_matrix should be 1d or 2d") if other is None: other = r_matrix else: other = np.asarray(other) tmp = dot_fun(r_matrix, dot_fun(cov_p, np.transpose(other))) return tmp else: return cov_p def t_test(self, r_matrix, cov_p=None, scale=None, use_t=None): from patsy import DesignInfo names = self.model.data.param_names LC = DesignInfo(names).linear_constraint(r_matrix) r_matrix, q_matrix = LC.coefs, LC.constants num_ttests = r_matrix.shape[0] num_params = r_matrix.shape[1] if (cov_p is None and self.normalized_cov_params is None and not hasattr(self, 'cov_params_default')): raise ValueError('Need covariance of parameters for computing ' 'T statistics') if num_params != self.params.shape[0]: raise ValueError('r_matrix and params are not aligned') if q_matrix is None: q_matrix = np.zeros(num_ttests) else: q_matrix = np.asarray(q_matrix) q_matrix = q_matrix.squeeze() if q_matrix.size > 1: if q_matrix.shape[0] != num_ttests: raise ValueError("r_matrix and q_matrix must have the same " "number of rows") if use_t is None: use_t = (hasattr(self, 'use_t') and self.use_t) _t = _sd = None _effect = np.dot(r_matrix, self.params) if num_ttests > 1: _sd = np.sqrt(np.diag(self.cov_params( r_matrix=r_matrix, cov_p=cov_p))) else: _sd = np.sqrt(self.cov_params(r_matrix=r_matrix, cov_p=cov_p)) _t = (_effect - q_matrix) * recipr(_sd) df_resid = getattr(self, 'df_resid_inference', self.df_resid) if use_t: return ContrastResults(effect=_effect, t=_t, sd=_sd, df_denom=df_resid) else: return ContrastResults(effect=_effect, statistic=_t, sd=_sd, df_denom=df_resid, distribution='norm') def f_test(self, r_matrix, cov_p=None, scale=1.0, invcov=None): res = self.wald_test(r_matrix, cov_p=cov_p, scale=scale, invcov=invcov, use_f=True) return res def wald_test(self, r_matrix, cov_p=None, scale=1.0, invcov=None, use_f=None): if use_f is None: use_f = (hasattr(self, 'use_t') and self.use_t) from patsy import DesignInfo names = self.model.data.param_names LC = DesignInfo(names).linear_constraint(r_matrix) r_matrix, q_matrix = LC.coefs, LC.constants if (self.normalized_cov_params is None and cov_p is None and invcov is None and not hasattr(self, 'cov_params_default')): raise ValueError('need covariance of parameters for computing ' 'F statistics') cparams = np.dot(r_matrix, self.params[:, None]) J = float(r_matrix.shape[0]) if q_matrix is None: q_matrix = np.zeros(J) else: q_matrix = np.asarray(q_matrix) if q_matrix.ndim == 1: q_matrix = q_matrix[:, None] if q_matrix.shape[0] != J: raise ValueError("r_matrix and q_matrix must have the same " "number of rows") Rbq = cparams - q_matrix if invcov is None: cov_p = self.cov_params(r_matrix=r_matrix, cov_p=cov_p) if np.isnan(cov_p).max(): raise ValueError("r_matrix performs f_test for using " "dimensions that are asymptotically " "non-normal") invcov = np.linalg.inv(cov_p) if (hasattr(self, 'mle_settings') and self.mle_settings['optimizer'] in ['l1', 'l1_cvxopt_cp']): F = nan_dot(nan_dot(Rbq.T, invcov), Rbq) else: F = np.dot(np.dot(Rbq.T, invcov), Rbq) df_resid = getattr(self, 'df_resid_inference', self.df_resid) if use_f: F /= J return ContrastResults(F=F, df_denom=df_resid, df_num=invcov.shape[0]) else: return ContrastResults(chi2=F, df_denom=J, statistic=F, distribution='chi2', distargs=(J,)) def wald_test_terms(self, skip_single=False, extra_constraints=None, combine_terms=None): from collections import defaultdict result = self if extra_constraints is None: extra_constraints = [] if combine_terms is None: combine_terms = [] design_info = getattr(result.model.data.orig_exog, 'design_info', None) if design_info is None and extra_constraints is None: raise ValueError('no constraints, nothing to do') identity = np.eye(len(result.params)) constraints = [] combined = defaultdict(list) if design_info is not None: for term in design_info.terms: cols = design_info.slice(term) name = term.name() constraint_matrix = identity[cols] for cname in combine_terms: if cname in name: combined[cname].append(constraint_matrix) k_constraint = constraint_matrix.shape[0] if skip_single: if k_constraint == 1: continue constraints.append((name, constraint_matrix)) combined_constraints = [] for cname in combine_terms: combined_constraints.append((cname, np.vstack(combined[cname]))) else: for col, name in enumerate(result.model.exog_names): constraint_matrix = identity[col] for cname in combine_terms: if cname in name: combined[cname].append(constraint_matrix) if skip_single: continue constraints.append((name, constraint_matrix)) combined_constraints = [] for cname in combine_terms: combined_constraints.append((cname, np.vstack(combined[cname]))) use_t = result.use_t distribution = ['chi2', 'F'][use_t] res_wald = [] index = [] for name, constraint in constraints + combined_constraints + extra_constraints: wt = result.wald_test(constraint) row = [wt.statistic.item(), wt.pvalue, constraint.shape[0]] if use_t: row.append(wt.df_denom) res_wald.append(row) index.append(name) col_names = ['statistic', 'pvalue', 'df_constraint'] if use_t: col_names.append('df_denom') from pandas import DataFrame table = DataFrame(res_wald, index=index, columns=col_names) res = WaldTestResults(None, distribution, None, table=table) res.temp = constraints + combined_constraints + extra_constraints return res def conf_int(self, alpha=.05, cols=None, method='default'): bse = self.bse if self.use_t: dist = stats.t df_resid = getattr(self, 'df_resid_inference', self.df_resid) q = dist.ppf(1 - alpha / 2, df_resid) else: dist = stats.norm q = dist.ppf(1 - alpha / 2) if cols is None: lower = self.params - q * bse upper = self.params + q * bse else: cols = np.asarray(cols) lower = self.params[cols] - q * bse[cols] upper = self.params[cols] + q * bse[cols] return np.asarray(lzip(lower, upper)) def save(self, fname, remove_data=False): from statsmodels.iolib.smpickle import save_pickle if remove_data: self.remove_data() save_pickle(self, fname) @classmethod
BSD 3-Clause New or Revised License
polyaxon/polystores
polystores/stores/s3_store.py
S3Store.list_prefixes
python
def list_prefixes(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): results = self.list(bucket_name=bucket_name, prefix=prefix, delimiter=delimiter, page_size=page_size, max_items=max_items, keys=False, prefixes=True) return results['prefixes']
Lists prefixes in a bucket under prefix Args: bucket_name: `str`. the name of the bucket prefix: `str`. a key prefix delimiter: `str`. the delimiter marks key hierarchy. page_size: `int`. pagination size max_items: `int`. maximum items to return
https://github.com/polyaxon/polystores/blob/141789ef75622c80d1f3875cec6952ad3c2d5ec7/polystores/stores/s3_store.py#L273-L291
from __future__ import absolute_import, division, print_function import os from rhea import RheaError from rhea import parser as rhea_parser from six import BytesIO from botocore.exceptions import ClientError from polystores.clients import aws_client from polystores.exceptions import PolyaxonStoresException from polystores.logger import logger from polystores.stores.base_store import BaseStore from polystores.utils import ( append_basename, check_dirname_exists, force_bytes, get_files_in_current_directory ) class S3Store(BaseStore): STORE_TYPE = BaseStore._S3_STORE ENCRYPTION = "AES256" def __init__(self, client=None, resource=None, **kwargs): self._client = client self._resource = resource self._encoding = kwargs.get('encoding', 'utf-8') self._endpoint_url = (kwargs.get('endpoint_url') or kwargs.get('aws_endpoint_url') or kwargs.get('AWS_ENDPOINT_URL')) self._aws_access_key_id = (kwargs.get('access_key_id') or kwargs.get('aws_access_key_id') or kwargs.get('AWS_ACCESS_KEY_ID')) self._aws_secret_access_key = (kwargs.get('secret_access_key') or kwargs.get('aws_secret_access_key') or kwargs.get('AWS_SECRET_ACCESS_KEY')) self._aws_session_token = (kwargs.get('session_token') or kwargs.get('aws_session_token') or kwargs.get('AWS_SECURITY_TOKEN')) self._region_name = (kwargs.get('region') or kwargs.get('aws_region') or kwargs.get('AWS_REGION')) self._aws_verify_ssl = kwargs.get('verify_ssl', kwargs.get('aws_verify_ssl', kwargs.get('AWS_VERIFY_SSL', None))) self._aws_use_ssl = (kwargs.get('use_ssl') or kwargs.get('aws_use_ssl') or kwargs.get('AWS_USE_SSL')) self._aws_legacy_api = (kwargs.get('legacy_api') or kwargs.get('aws_legacy_api') or kwargs.get('AWS_LEGACY_API')) @property def client(self): if self._client is None: self.set_client(endpoint_url=self._endpoint_url, aws_access_key_id=self._aws_access_key_id, aws_secret_access_key=self._aws_secret_access_key, aws_session_token=self._aws_session_token, region_name=self._region_name, aws_use_ssl=self._aws_use_ssl, aws_verify_ssl=self._aws_verify_ssl) return self._client def set_env_vars(self): if self._endpoint_url: os.environ['AWS_ENDPOINT_URL'] = self._endpoint_url if self._aws_access_key_id: os.environ['AWS_ACCESS_KEY_ID'] = self._aws_access_key_id if self._aws_secret_access_key: os.environ['AWS_SECRET_ACCESS_KEY'] = self._aws_secret_access_key if self._aws_session_token: os.environ['AWS_SECURITY_TOKEN'] = self._aws_session_token if self._region_name: os.environ['AWS_REGION'] = self._region_name if self._aws_use_ssl is not None: os.environ['AWS_USE_SSL'] = self._aws_use_ssl if self._aws_verify_ssl is not None: os.environ['AWS_VERIFY_SSL'] = self._aws_verify_ssl if self._aws_legacy_api: os.environ['AWS_LEGACY_API'] = self._aws_legacy_api @property def resource(self): if self._resource is None: self.set_resource(endpoint_url=self._endpoint_url, aws_access_key_id=self._aws_access_key_id, aws_secret_access_key=self._aws_secret_access_key, aws_session_token=self._aws_session_token, region_name=self._region_name) return self._resource def set_client(self, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, aws_use_ssl=True, aws_verify_ssl=None): self._client = aws_client.get_aws_client( 's3', endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, aws_use_ssl=aws_use_ssl, aws_verify_ssl=aws_verify_ssl) def set_resource(self, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None): self._resource = aws_client.get_aws_resource( 's3', endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name) @staticmethod def parse_s3_url(s3_url): try: spec = rhea_parser.parse_s3_path(s3_url) return spec.bucket, spec.key except RheaError as e: raise PolyaxonStoresException(e) @staticmethod def check_prefix_format(prefix, delimiter): if not delimiter or not prefix: return prefix return prefix + delimiter if prefix[-1] != delimiter else prefix def check_bucket(self, bucket_name): try: self.client.head_bucket(Bucket=bucket_name) return True except ClientError as e: logger.info(e.response["Error"]["Message"]) return False def get_bucket(self, bucket_name): return self.resource.Bucket(bucket_name) def ls(self, path): (bucket_name, key) = self.parse_s3_url(path) results = self.list(bucket_name=bucket_name, prefix=key) return {'files': results['keys'], 'dirs': results['prefixes']} def list(self, bucket_name, prefix='', delimiter='/', page_size=None, max_items=None, keys=True, prefixes=True): config = { 'PageSize': page_size, 'MaxItems': max_items, } legacy_api = aws_client.get_legacy_api(legacy_api=self._aws_legacy_api) if legacy_api: paginator = self.client.get_paginator('list_objects') else: paginator = self.client.get_paginator('list_objects_v2') prefix = self.check_prefix_format(prefix=prefix, delimiter=delimiter) response = paginator.paginate(Bucket=bucket_name, Prefix=prefix, Delimiter=delimiter, PaginationConfig=config) def get_keys(contents): list_keys = [] for cont in contents: list_keys.append((cont['Key'][len(prefix):], cont.get('Size'))) return list_keys def get_prefixes(page_prefixes): list_prefixes = [] for pref in page_prefixes: list_prefixes.append(pref['Prefix'][len(prefix): -1]) return list_prefixes results = { 'keys': [], 'prefixes': [] } for page in response: if prefixes: results['prefixes'] += get_prefixes(page.get('CommonPrefixes', [])) if keys: results['keys'] += get_keys(page.get('Contents', [])) return results
MIT License
cuemacro/findatapy
findatapy/timeseries/dataquality.py
DataQuality.percentage_nan_between_start_finish_dates
python
def percentage_nan_between_start_finish_dates(self, df, df_properties, asset_field, start_date_field, finish_date_field): percentage_nan = {} df_properties = df_properties.sort_values(asset_field) df_dates = pandas.DataFrame(index=df_properties[asset_field].values, data=df_properties[[start_date_field, finish_date_field]].values, columns=[start_date_field, finish_date_field]) c_new = [x.split(".")[0] for x in df.columns] index = df_dates.index.searchsorted(c_new) start_date = df_dates[start_date_field][index] finish_date = df_dates[finish_date_field][index] for i in range(0, len(df.columns)): df_sub = df[df.columns[i]] percentage_nan[df.columns[i]] = self.percentage_nan(df_sub[start_date[i]:finish_date[i]]) return percentage_nan
Calculates the percentage of NaN in a DataFrame in a customisable way. For each column it will only check the NaNs between specific start and finish dates. Parameters ---------- df : DataFrame Data to be checked for integrity df_properties : DataFrame Record of each column and the start/finish dates that will be used for NaNs asset_field : str The column in df_properties which contains the column names start_date_field : str The column in df_properties which contains the start date finish_date_field : str The column in df properties which contains the finish date Returns ------- dict Contains column names and the associated percentage of NaNs
https://github.com/cuemacro/findatapy/blob/0c33429cd691f9ca1b3442b4004919d766d5fec2/findatapy/timeseries/dataquality.py#L92-L132
__author__ = 'saeedamen' import datetime import functools import math import numpy import pandas import pandas.tseries.offsets from findatapy.timeseries.filter import Filter, Calendar from pandas import compat class DataQuality(object): def percentage_nan(self, df, start_date = None): if df is None: return 100.0 if start_date is not None: df = df[df.index >= start_date] nan = float(df.isnull().sum().sum()) valid = float(df.count().sum()) total = nan + valid if total == 0: return 0 return round(100.0 * (nan / total), 1) def percentage_nan_by_columns(self, df, start_date = None): if start_date is not None: df = df[df.index >= start_date] nan_dict = {} for c in df.columns: nan_dict = self.percentage_nan(df[c]) return nan_dict
Apache License 2.0
kappa-dev/regraph
regraph/backends/neo4j/cypher_utils/query_analysis.py
execution_time
python
def execution_time(result): avail = result.summary().result_available_after cons = result.summary().result_consumed_after return avail + cons
Return the execution time of a query.
https://github.com/kappa-dev/regraph/blob/bb148a7cbd94e87f622443263e04c3fae2d4d00b/regraph/backends/neo4j/cypher_utils/query_analysis.py#L4-L8
MIT License
wright-group/wrighttools
WrightTools/kit/_array.py
svd
python
def svd(a, i=None) -> tuple: u, s, v = np.linalg.svd(a, full_matrices=False, compute_uv=True) u = u.T if i is None: return u, v, s else: return u[i], v[i], s[i]
Singular Value Decomposition. Factors the matrix `a` as ``u * np.diag(s) * v``, where `u` and `v` are unitary and `s` is a 1D array of `a`'s singular values. Parameters ---------- a : array_like Input array. i : int or slice (optional) What singular value "slice" to return. Default is None which returns unitary 2D arrays. Returns ------- tuple Decomposed arrays in order `u`, `v`, `s`
https://github.com/wright-group/wrighttools/blob/7531965dec9a8f52557fbd3c60e12dcd3b6e000b/WrightTools/kit/_array.py#L294-L318
import numpy as np from .. import exceptions as wt_exceptions __all__ = [ "closest_pair", "diff", "fft", "joint_shape", "orthogonal", "remove_nans_1D", "share_nans", "smooth_1D", "svd", "unique", "valid_index", "mask_reduce", "enforce_mask_shape", ] def closest_pair(arr, give="indicies"): idxs = [idx for idx in np.ndindex(arr.shape)] outs = [] min_dist = arr.max() - arr.min() for idxa in idxs: for idxb in idxs: if idxa == idxb: continue dist = abs(arr[idxa] - arr[idxb]) if dist == min_dist: if not [idxb, idxa] in outs: outs.append([idxa, idxb]) elif dist < min_dist: min_dist = dist outs = [[idxa, idxb]] if give == "indicies": return outs elif give == "distance": return min_dist else: raise KeyError("give not recognized in closest_pair") def diff(xi, yi, order=1) -> np.ndarray: yi = np.array(yi).copy() flip = False if xi[-1] < xi[0]: xi = np.flipud(xi.copy()) yi = np.flipud(yi) flip = True midpoints = (xi[1:] + xi[:-1]) / 2 for _ in range(order): d = np.diff(yi) d /= np.diff(xi) yi = np.interp(xi, midpoints, d) if flip: yi = np.flipud(yi) return yi def fft(xi, yi, axis=0) -> tuple: if xi.ndim != 1: raise wt_exceptions.DimensionalityError(1, xi.ndim) spacing = np.diff(xi) if not np.allclose(spacing, spacing.mean()): raise RuntimeError("WrightTools.kit.fft: argument xi must be evenly spaced") yi = np.fft.fft(yi, axis=axis) d = (xi.max() - xi.min()) / (xi.size - 1) xi = np.fft.fftfreq(xi.size, d=d) xi = np.fft.fftshift(xi) yi = np.fft.fftshift(yi, axes=axis) return xi, yi def joint_shape(*args) -> tuple: if len(args) == 0: return () shape = [] shapes = [a.shape for a in args] ndim = args[0].ndim for i in range(ndim): shape.append(max([s[i] for s in shapes])) return tuple(shape) def orthogonal(*args) -> bool: for i, arg in enumerate(args): if hasattr(arg, "shape"): args[i] = arg.shape for s in zip(*args): if np.product(s) != max(s): return False return True def remove_nans_1D(*args) -> tuple: vals = np.isnan(args[0]) for a in args: vals |= np.isnan(a) return tuple(np.array(a)[~vals] for a in args) def share_nans(*arrs) -> tuple: nans = np.zeros(joint_shape(*arrs)) for arr in arrs: nans *= arr return tuple([a + nans for a in arrs]) def smooth_1D(arr, n=10, smooth_type="flat") -> np.ndarray: if arr.ndim != 1: raise wt_exceptions.DimensionalityError(1, arr.ndim) if arr.size < n: message = "Input array size must be larger than window size." raise wt_exceptions.ValueError(message) if n < 3: return arr if smooth_type == "flat": w = np.ones(n, dtype=arr.dtype) elif smooth_type == "hanning": w = np.hanning(n) elif smooth_type == "hamming": w = np.hamming(n) elif smooth_type == "bartlett": w = np.bartlett(n) elif smooth_type == "blackman": w = np.blackman(n) else: message = "Given smooth_type, {0}, not available.".format(str(smooth_type)) raise wt_exceptions.ValueError(message) out = np.convolve(w / w.sum(), arr, mode="same") return out
MIT License
mogui/pyorient
pyorient/otypes.py
OrientNode.__init__
python
def __init__(self, node_dict=None): self.name = None self.id = None self.started_on = None self.host = None self.port = None if node_dict is not None: self._parse_dict(node_dict)
Represent a server node in a multi clusered configuration TODO: extends this object with different listeners if we're going to support in the driver an abstarction of the HTTP protocol, for now we are not interested in that :param node_dict: dict with starting configs (usaully from a db_open, db_reload record response)
https://github.com/mogui/pyorient/blob/fb74c5da75c14b568c79949b219b98549d1c732a/pyorient/otypes.py#L245-L269
import sys import time from datetime import date, datetime from decimal import Decimal try: basestring except NameError: basestring = str class OrientRecord(object): oRecordData = property(lambda self: self.__o_storage) def __str__(self): rep = "" if self.__o_storage: rep = str( self.__o_storage ) if self.__o_class is not None: rep = "'@" + str(self.__o_class) + "':" + rep + "" if self.__version is not None: rep = rep + ",'version':" + str(self.__version) if self.__rid is not None: rep = rep + ",'rid':'" + str(self.__rid) + "'" return '{' + rep + '}' @staticmethod def addslashes(string): l = [ "\\", '"', "'", "\0", ] for i in l: if i in string: string = string.replace( i, '\\' + i ) return string def __init__(self, content=None): self.__rid = None self.__version = None self.__o_class = None self.__o_storage = {} if not content: content = {} for key in content.keys(): if key == '__rid': self.__rid = content[ key ] elif key == '__version': self.__version = content[key] elif key == '__o_class': self.__o_class = content[ key ] elif key[0:1] == '@': self.__o_class = key[1:] for _key, _value in content[key].items(): if isinstance(_value, basestring): self.__o_storage[_key] = self.addslashes( _value ) else: self.__o_storage[_key] = _value elif key == '__o_storage': self.__o_storage = content[key] else: self.__o_storage[key] = content[key] def _set_keys(self, content=dict): for key in content.keys(): self._set_keys( content[key] ) @property def _in(self): try: return self.__o_storage['in'] except KeyError: return None @property def _out(self): try: return self.__o_storage['out'] except KeyError: return None @property def _rid(self): return self.__rid @property def _version(self): return self.__version @property def _class(self): return self.__o_class def update(self, **kwargs): self.__rid = kwargs.get('__rid', None) self.__version = kwargs.get('__version', None) if self.__o_class is None: self.__o_class = kwargs.get('__o_class', None) """ This method is for backward compatibility when someone use 'getattr(record, a_key)' """ def __getattr__(self, item): try: return self.__o_storage[item] except KeyError: raise AttributeError( "'OrientRecord' object has no attribute " "'" + item + "'" ) class OrientRecordLink(object): def __init__(self, recordlink): cid, rpos = recordlink.split(":") self.__link = recordlink self.clusterID = cid self.recordPosition = rpos def __str__(self): return self.get_hash() def get(self): return self.__link def get_hash(self): return "#%s" % self.__link class OrientBinaryObject(object): def __init__(self, stri): self.b64 = stri def get_hash(self): return "_" + self.b64 + "_" def getBin(self): import base64 return base64.b64decode(self.b64) class OrientCluster(object): def __init__(self, name, cluster_id, cluster_type=None, segment=None): self.name = name self.id = cluster_id self.type = cluster_type self.segment = segment def __str__(self): return "%s: %d" % (self.name, self.id) def __eq__(self, other): return self.name == other.name and self.id == other.id def __ne__(self, other): return self.name != other.name or self.id != other.id class OrientVersion(object): def __init__(self, release): self.release = release self.major = None self.minor = None self.build = None self.subversion = None self._parse_version(release) def _parse_version( self, string_release ): import re if not isinstance(string_release, str): string_release = string_release.decode() try: version_info = string_release.split( "." ) self.major = version_info[0] self.minor = version_info[1] self.build = version_info[2] except IndexError: pass regx = re.match('.*([0-9]+).*', self.major ) self.major = regx.group(1) try: _temp = self.minor.split( "-" ) self.minor = _temp[0] self.subversion = _temp[1] except IndexError: pass try: regx = re.match( '([0-9]+)[\.\- ]*(.*)', self.build ) self.build = regx.group(1) self.subversion = regx.group(2) except TypeError: pass self.major = int( self.major ) self.minor = int( self.minor ) self.build = 0 if self.build is None else int( self.build ) self.subversion = '' if self.subversion is None else str( self.subversion ) def __str__(self): return self.release class OrientNode(object):
Apache License 2.0
moutix/stepmania-server
smserver/plugins/hardcore/plugin.py
HardcoreStartControllerPlugin.handle
python
def handle(self): if not self.room: return if self.room.status != 2 or self.room.mode != "hardcore": return with self.conn.mutex: self.conn.songstats[0]["attack_metter"] = 0 self.conn.songstats[1]["attack_metter"] = 0
Activate the hardocre mode if the room status is in hardocre when a game start
https://github.com/moutix/stepmania-server/blob/cf20b363ed3d7bcb75101b17870e876a857ecd66/smserver/plugins/hardcore/plugin.py#L149-L163
import random from smserver import router from smserver import pluginmanager from smserver import chatplugin from smserver import stepmania_controller from smserver import models from smserver import ability from smserver.smutils.smpacket import smpacket from smserver.smutils.smpacket import smcommand from smserver.smutils import smattack class HardcoreChatPlugin(chatplugin.ChatPlugin): command = "hardcore" helper = "Change room mode to hardcore" room = True permission = ability.Permissions.change_room_settings def __call__(self, serv, message): if serv.room.mode == "hardcore": serv.room.mode = "normal" msg = "The room is now in normal mode" else: serv.room.mode = "hardcore" msg = "The room is now in hardcore mode" serv.session.commit() serv.send_message(msg) class HardcorePlugin(pluginmanager.StepmaniaPlugin): def __init__(self, server): pluginmanager.StepmaniaPlugin.__init__(self, server) self.config = server.config.plugins["hardcore"] if not self.config: self.config = {} self.conf_weight = server.config.score.get("percentWeight") if not self.conf_weight: self.conf_weight = { "not_held": 0, "miss": 0, "bad": 0, "good": 0, "held": 3, "hit_mine": -2, "great": 1, "perfect": 2, "flawless": 3 } def on_nscgsu(self, session, serv, packet): if not serv.room: return player_id = packet["player_id"] if player_id not in (0, 1): return if "attack_metter" not in serv.songstats[player_id]: return self.update_score(serv, player_id, packet["step_id"]) if serv.songstats[player_id]["attack_metter"] > self.config.get("max_metter", 100): self.send_attack(serv, player_id, session) def update_score(self, serv, player_id, stepid): step = models.SongStat.stepid.get(stepid) if not step: return with serv.mutex: serv.songstats[player_id]["attack_metter"] += self.conf_weight.get(step, 0) def send_attack(self, serv, player_id, session): with serv.mutex: serv.songstats[player_id]["attack_metter"] = 0 attack = random.choice(list(smattack.SMAttack)) packet = smpacket.SMPacketServerNSCAttack( time=self.config.get("attack_duration", 3000), attack=attack ) user = models.User.get_from_pos(serv.users, player_id, session) message = smpacket.SMPacketServerNSCSU( message="%s send an attack: %s" % (user.fullname(serv.room), attack.value) ) for conn in self.server.ingame_connections(serv.room): for player in (0, 1): if conn == serv and player_id == player: continue packet["player"] = player conn.send(packet) if self.config.get("notif_on_attack", False): conn.send(message) class HardcoreStartControllerPlugin(stepmania_controller.StepmaniaController): command = smcommand.SMClientCommand.NSCGSR require_login = True
MIT License
ivan-vasilev/atpy
atpy/backtesting/random_strategy.py
RandomStrategy.__init__
python
def __init__(self, listeners, bar_event_stream, portfolio_manager: PortfolioManager, max_buys_per_step=1, max_sells_per_step=1): self.listeners = listeners bar_event_stream += self.on_bar_event self.portfolio_manager = portfolio_manager self.max_buys_per_step = max_buys_per_step self.max_sells_per_step = max_sells_per_step
:param listeners: listeners environment :param bar_event_stream: bar events :param portfolio_manager: Portfolio manager :param max_buys_per_step: maximum buy orders per time step (one bar) :param max_sells_per_step: maximum sell orders per time step (one bar)
https://github.com/ivan-vasilev/atpy/blob/abe72832ae8cec818b0e67989892c25456e9e5f5/atpy/backtesting/random_strategy.py#L13-L26
import logging import random import pandas as pd from atpy.portfolio.portfolio_manager import PortfolioManager, MarketOrder, Type from pyevents.events import EventFilter class RandomStrategy:
MIT License
rapid7/vm-console-client-python
rapid7vmconsole/models/resources_sonar_query.py
ResourcesSonarQuery.links
python
def links(self, links): self._links = links
Sets the links of this ResourcesSonarQuery. Hypermedia links to corresponding or related resources. # noqa: E501 :param links: The links of this ResourcesSonarQuery. # noqa: E501 :type: list[Link]
https://github.com/rapid7/vm-console-client-python/blob/55e1f573967bce27cc9a2d10c12a949b1142c2b3/rapid7vmconsole/models/resources_sonar_query.py#L65-L74
import pprint import re import six class ResourcesSonarQuery(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'links': 'list[Link]', 'resources': 'list[SonarQuery]' } attribute_map = { 'links': 'links', 'resources': 'resources' } def __init__(self, links=None, resources=None): self._links = None self._resources = None self.discriminator = None if links is not None: self.links = links if resources is not None: self.resources = resources @property def links(self): return self._links @links.setter
MIT License
leonardobdes/bigrest
bigrest/bigiq.py
BIGIQ.task_completed
python
def task_completed(self, obj: RESTObject) -> bool: if self.request_token or self.refresh_token is not None: self._check_token() path = self._get_path(obj) url = self._get_url(path) response = self.session.get(url) if response.status_code != 200: raise RESTAPIError(response, self.debug) status = response.json()["status"] if status == "FAILED": raise RESTAPIError(response, self.debug) if status == "FINISHED": return True else: return False
Verifies if the task is completed. Sends an HTTP GET request to the iControl REST API. Arguments: obj: Object that represents the task. Exceptions: RESTAPIError: Raised when iControl REST API returns an error.
https://github.com/leonardobdes/bigrest/blob/7b2d51b346d2602274bd78fc3945365bc0c995b7/bigrest/bigiq.py#L91-L117
from __future__ import annotations import time from .big import BIG from .common.exceptions import RESTAPIError from .common.restobject import RESTObject class BIGIQ(BIG): def task_start(self, path: str, data: dict) -> RESTObject: if self.request_token or self.refresh_token is not None: self._check_token() url = self._get_url(path) response = self.session.post(url, json=data) if response.status_code != 202: raise RESTAPIError(response, self.debug) return RESTObject(response.json()) def task_wait(self, obj: RESTObject, interval: int = 10) -> None: if self.request_token or self.refresh_token is not None: self._check_token() path = self._get_path(obj) url = self._get_url(path) while True: if self.request_token or self.refresh_token is not None: self._check_token() response = self.session.get(url) if response.status_code != 200: raise RESTAPIError(response, self.debug) status = response.json()["status"] if status == "FAILED": raise RESTAPIError(response, self.debug) if status == "FINISHED": return else: time.sleep(interval)
MIT License
byceps/byceps
byceps/services/news/channel_service.py
delete_channel
python
def delete_channel(channel_id: ChannelID) -> None: db.session.query(DbChannel) .filter_by(id=channel_id) .delete() db.session.commit()
Delete a news channel.
https://github.com/byceps/byceps/blob/138f928e98fd1e3d79943e1a8744ea04cef465b5/byceps/services/news/channel_service.py#L37-L43
from __future__ import annotations from typing import Optional, Sequence from ...database import db from ...typing import BrandID from ..brand import service as brand_service from .dbmodels.channel import Channel as DbChannel from .transfer.models import Channel, ChannelID def create_channel( brand_id: BrandID, channel_id: ChannelID, url_prefix: str ) -> Channel: brand = brand_service.find_brand(brand_id) if brand is None: raise ValueError(f'Unknown brand ID "{brand_id}"') channel = DbChannel(channel_id, brand.id, url_prefix) db.session.add(channel) db.session.commit() return _db_entity_to_channel(channel)
BSD 3-Clause New or Revised License
luckydonald/pytgbot
pytgbot/bot/base.py
BotBase._delete_webhook__make_request
python
def _delete_webhook__make_request(self, drop_pending_updates=None): assert_type_or_raise(drop_pending_updates, None, bool, parameter_name="drop_pending_updates") return self.do("deleteWebhook", drop_pending_updates=drop_pending_updates)
Internal function for making the request to the API's deleteWebhook endpoint. Optional keyword parameters: :param drop_pending_updates: Pass True to drop all pending updates :type drop_pending_updates: bool :return: the decoded json :rtype: dict|list|bool
https://github.com/luckydonald/pytgbot/blob/e29a0b5f8f8331bd347c8e2b8e75af19b12d1bc5/pytgbot/bot/base.py#L460-L475
import json import re from abc import abstractmethod from warnings import warn from datetime import timedelta, datetime from urllib.parse import urlparse, urlunparse from luckydonaldUtils.logger import logging from luckydonaldUtils.encoding import unicode_type, to_unicode as u, to_native as n from luckydonaldUtils.exceptions import assert_type_or_raise from ..exceptions import TgApiServerException, TgApiParseException from ..exceptions import TgApiTypeError, TgApiResponseException from ..api_types.sendable.inline import InlineQueryResult from ..api_types.receivable.peer import User from ..api_types import from_array_list, as_array from ..api_types.sendable.files import InputFile from ..api_types.sendable import Sendable __author__ = 'luckydonald' __all__ = ["BotBase"] logger = logging.getLogger(__name__) DEFAULT_BASE_URL = "https://api.telegram.org/bot{api_key}/{command}" DEFAULT_DOWNLOAD_URL = "https://api.telegram.org/file/bot{api_key}/{file}" DEFAULT_TIMEOUT = 60.0 class BotBase(object): def __init__(self, api_key, return_python_objects=True, base_url=None, download_url=None, default_timeout=None): if api_key is None or not api_key: raise ValueError("No api_key given.") self.api_key = api_key self.return_python_objects = return_python_objects self._last_update = datetime.now() self._base_url = DEFAULT_BASE_URL if base_url is None else base_url self._download_url = self.calculate_download_url(self._base_url, download_url) self._default_timeout = DEFAULT_TIMEOUT if default_timeout is None else default_timeout self._me = None @classmethod def calculate_download_url(cls, base_url, download_url): if base_url == DEFAULT_BASE_URL: if download_url is None: return DEFAULT_DOWNLOAD_URL else: return download_url else: if download_url is None: parsed_base_url = urlparse(base_url) parsed_base_url = list(parsed_base_url[:]) parsed_base_url[2] = '/file' + (parsed_base_url[2] or '') download_url = urlunparse(parsed_base_url) download_url = download_url.format(api_key="{api_key}", command="{file}") warn( "Custom server `base_url` set ({base_url!r}), but no custom `download_url`. ".format(base_url=base_url) + "Tried to guess it as {download_url!r}.".format(download_url=download_url) ) return download_url else: return download_url def _prepare_request(self, command, query): params = {} files = {} for key in query.keys(): element = query[key] if element is not None: if isinstance(element, (str, int, float, bool)): params[key] = element elif isinstance(element, InputFile): params[key], file_info = element.get_input_media_referenced_files(key) if file_info is not None: files.update(file_info) else: params[key] = json.dumps(as_array(element)) url = self._base_url.format(api_key=n(self.api_key), command=n(command)) return url, params, files def _postprocess_request(self, request, response, json): from DictObject import DictObject try: logger.debug(json) res = DictObject.objectify(json) except Exception as e: raise TgApiResponseException('Parsing answer as json failed.', response, e) if self.return_python_objects: if res.ok is not True: raise TgApiServerException( error_code=res.error_code if "error_code" in res or hasattr(res, "error_code") else None, response=response, description=res.description if "description" in res or hasattr(res, "description") else None, request=request ) if "result" not in res: raise TgApiParseException('Key "result" is missing.') return res.result return res def _do_fileupload(self, file_param_name, value, _command=None, _file_is_optional=False, **kwargs): from ..api_types.sendable.files import InputFile from luckydonaldUtils.encoding import unicode_type from luckydonaldUtils.encoding import to_native as n if value is None and _file_is_optional: pass elif isinstance(value, str): kwargs[file_param_name] = str(value) elif isinstance(value, unicode_type): kwargs[file_param_name] = n(value) elif isinstance(value, InputFile): files = value.get_request_files(file_param_name) if "files" in kwargs and kwargs["files"]: assert isinstance(kwargs["files"], dict), 'The files should be of type dict, but are of type {}.'.format(type(kwargs["files"])) for key in files.keys(): assert key not in kwargs["files"], '{key} would be overwritten!' kwargs["files"][key] = files[key] else: kwargs["files"] = files else: raise TgApiTypeError("Parameter {key} is not type (str, {text_type}, {input_file_type}), but type {type}".format( key=file_param_name, type=type(value), input_file_type=InputFile, text_type=unicode_type)) if not _command: command = re.sub(r'(?!^)_([a-zA-Z])', lambda m: m.group(1).upper(), "send_" + file_param_name) else: command = _command return self.do(command, **kwargs) def get_download_url(self, file): from ..api_types.receivable.media import File assert_type_or_raise(file, File, str, parameter_name='file') if isinstance(file, File): file_path = file.file_path else: file_path = file return self._download_url.format(api_key=n(self.api_key), file=n(file_path)) @abstractmethod def _load_info(self): raise NotImplementedError('subclass needs to overwrite this.') @property def me(self): if not self._me: self._load_info() return self._me @property def username(self): return self.me.username @property def id(self): return self.me.id def __str__(self): return "{s.__class__.__name__}(username={s.username!r}, id={s.id!r})".format(s=self) @abstractmethod def get_updates(self, offset=None, limit=100, poll_timeout=0, allowed_updates=None, request_timeout=None, delta=timedelta(milliseconds=100), error_as_empty=False): raise NotImplementedError('subclass needs to overwrite this.') @abstractmethod def do(self, command, files=None, use_long_polling=False, request_timeout=None, **query): raise NotImplementedError('subclass needs to overwrite this.') def _get_updates__make_request(self, offset=None, limit=None, timeout=None, allowed_updates=None): assert_type_or_raise(offset, None, int, parameter_name="offset") assert_type_or_raise(limit, None, int, parameter_name="limit") assert_type_or_raise(timeout, None, int, parameter_name="timeout") assert_type_or_raise(allowed_updates, None, list, parameter_name="allowed_updates") return self.do("getUpdates", offset=offset, limit=limit, timeout=timeout, allowed_updates=allowed_updates) def _get_updates__process_result(self, result): if not self.return_python_objects: return result logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import Update try: return Update.from_array_list(result, list_level=1) except TgApiParseException: logger.debug("Failed parsing as api_type Update", exc_info=True) raise TgApiParseException("Could not parse result.") return result def _set_webhook__make_request(self, url, certificate=None, ip_address=None, max_connections=None, allowed_updates=None, drop_pending_updates=None): from pytgbot.api_types.sendable.files import InputFile assert_type_or_raise(url, unicode_type, parameter_name="url") assert_type_or_raise(certificate, None, InputFile, parameter_name="certificate") assert_type_or_raise(ip_address, None, unicode_type, parameter_name="ip_address") assert_type_or_raise(max_connections, None, int, parameter_name="max_connections") assert_type_or_raise(allowed_updates, None, list, parameter_name="allowed_updates") assert_type_or_raise(drop_pending_updates, None, bool, parameter_name="drop_pending_updates") return self.do("setWebhook", url=url, certificate=certificate, ip_address=ip_address, max_connections=max_connections, allowed_updates=allowed_updates, drop_pending_updates=drop_pending_updates) def _set_webhook__process_result(self, result): if not self.return_python_objects: return result logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) raise TgApiParseException("Could not parse result.") return result
MIT License
centerforopenscience/osf.io
api/meetings/serializers.py
MeetingSubmissionSerializer.get_author_name
python
def get_author_name(self, obj): if getattr(obj, 'author_name', None): return obj.author_name else: author = self.get_author(obj) if author: return author.family_name if author.family_name else author.fullname return None
Returns the first bibliographic contributor's family_name if it exists. Otherwise, return its fullname.
https://github.com/centerforopenscience/osf.io/blob/6552a01fe250997cd3eb67cf72fc7157d9bc5af6/api/meetings/serializers.py#L112-L124
from rest_framework import serializers as ser from addons.osfstorage.models import OsfStorageFile from api.base.serializers import ( IDField, JSONAPISerializer, LinksField, RelationshipField, TypeField, VersionedDateTimeField, ) from api.base.utils import absolute_reverse from api.files.serializers import get_file_download_link from api.nodes.serializers import NodeSerializer class MeetingSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'location', ]) id = IDField(source='endpoint', read_only=True) type = TypeField() name = ser.CharField(read_only=True) location = ser.CharField(read_only=True) start_date = VersionedDateTimeField(read_only=True) end_date = VersionedDateTimeField(read_only=True) info_url = ser.URLField(read_only=True) logo_url = ser.URLField(read_only=True) field_names = ser.DictField(read_only=True) submissions_count = ser.SerializerMethodField() active = ser.BooleanField(read_only=True) type_one_submission_email = ser.SerializerMethodField() type_two_submission_email = ser.SerializerMethodField() is_accepting_type_one = ser.BooleanField(source='poster', read_only=True) is_accepting_type_two = ser.BooleanField(source='talk', read_only=True) submissions = RelationshipField( related_view='meetings:meeting-submissions', related_view_kwargs={'meeting_id': '<endpoint>'}, related_meta={'count': 'get_submissions_count'}, ) links = LinksField({ 'self': 'get_absolute_url', 'html': 'get_absolute_html_url', }) def format_submission_email(self, obj, submission_field): if obj.active: return '{}-{}@osf.io'.format(obj.endpoint, obj.field_names.get(submission_field)) return '' def get_type_one_submission_email(self, obj): return self.format_submission_email(obj, 'submission1') def get_type_two_submission_email(self, obj): return self.format_submission_email(obj, 'submission2') def get_absolute_url(self, obj): return absolute_reverse('meetings:meeting-detail', kwargs={'meeting_id': obj.endpoint}) def get_submissions_count(self, obj): if getattr(obj, 'submissions_count', None): return obj.submissions_count else: return obj.valid_submissions.count() class Meta: type_ = 'meetings' class MeetingSubmissionSerializer(NodeSerializer): filterable_fields = frozenset([ 'title', 'meeting_category', 'author_name', ]) author_name = ser.SerializerMethodField() download_count = ser.SerializerMethodField() meeting_category = ser.SerializerMethodField() author = RelationshipField( related_view='users:user-detail', related_view_kwargs={'user_id': 'get_author_id'}, read_only=True, ) links = LinksField({ 'self': 'get_absolute_url', 'html': 'get_absolute_html_url', 'download': 'get_download_link', }) def get_author(self, obj): contrib_queryset = obj.contributor_set.filter(visible=True).order_by('_order') if contrib_queryset: return contrib_queryset.first().user return None def get_author_id(self, obj): if getattr(obj, 'author_id', None): return obj.author_id else: author = self.get_author(obj) return author._id if author else None
Apache License 2.0
deloittedigitaluk/jira-agile-metrics
jira_agile_metrics/calculator.py
Calculator.run
python
def run(self):
Run the calculator and return its results. These will be automatically saved
https://github.com/deloittedigitaluk/jira-agile-metrics/blob/55a4b1a68c767b65aa03036e481a993e1a233da4/jira_agile_metrics/calculator.py#L28-L31
import logging logger = logging.getLogger(__name__) class Calculator(object): def __init__(self, query_manager, settings, results): self.query_manager = query_manager self.settings = settings self._results = results def get_result(self, calculator=None, default=None): return self._results.get(calculator or self.__class__, default)
MIT License
rail-berkeley/rlkit
rlkit/torch/smac/agent.py
SmacAgent.latent_posterior
python
def latent_posterior(self, context, squeeze=False, for_reward_prediction=False): if isinstance(context, np.ndarray): context = ptu.from_numpy(context) if self._debug_use_ground_truth_context: if squeeze: context = context.squeeze(dim=0) return Delta(context) if for_reward_prediction: context_encoder = self.context_encoder_rp else: context_encoder = self.context_encoder params = context_encoder(context) params = params.view(context.size(0), -1, context_encoder.output_size) mu = params[..., :self.latent_dim] sigma_squared = F.softplus(params[..., self.latent_dim:]) z_params = [_product_of_gaussians(m, s) for m, s in zip(torch.unbind(mu), torch.unbind(sigma_squared))] z_means = torch.stack([p[0] for p in z_params]) z_vars = torch.stack([p[1] for p in z_params]) if squeeze: z_means = z_means.squeeze(dim=0) z_vars = z_vars.squeeze(dim=0) if self._debug_do_not_sqrt: return torch.distributions.Normal(z_means, z_vars) else: return torch.distributions.Normal(z_means, torch.sqrt(z_vars))
compute q(z|c) as a function of input context and sample new z from it
https://github.com/rail-berkeley/rlkit/blob/60bdfcd09f48f73a450da139b2ba7910b8cede53/rlkit/torch/smac/agent.py#L176-L201
import copy import numpy as np import torch import torch.nn.functional as F from rlkit.util.wrapper import Wrapper from torch import nn as nn import rlkit.torch.pytorch_util as ptu from rlkit.policies.base import Policy from rlkit.torch.distributions import ( Delta, ) from rlkit.torch.sac.policies import MakeDeterministic def _product_of_gaussians(mus, sigmas_squared): sigmas_squared = torch.clamp(sigmas_squared, min=1e-7) sigma_squared = 1. / torch.sum(torch.reciprocal(sigmas_squared), dim=0) mu = sigma_squared * torch.sum(mus / sigmas_squared, dim=0) return mu, sigma_squared def _mean_of_gaussians(mus, sigmas_squared): mu = torch.mean(mus, dim=0) sigma_squared = torch.mean(sigmas_squared, dim=0) return mu, sigma_squared def _natural_to_canonical(n1, n2): mu = -0.5 * n1 / n2 sigma_squared = -0.5 * 1 / n2 return mu, sigma_squared def _canonical_to_natural(mu, sigma_squared): n1 = mu / sigma_squared n2 = -0.5 * 1 / sigma_squared return n1, n2 class SmacAgent(nn.Module): def __init__(self, latent_dim, context_encoder, policy, reward_predictor, use_next_obs_in_context=False, _debug_ignore_context=False, _debug_do_not_sqrt=False, _debug_use_ground_truth_context=False ): super().__init__() self.latent_dim = latent_dim self.context_encoder = context_encoder self.policy = policy self.reward_predictor = reward_predictor self.deterministic_policy = MakeDeterministic(self.policy) self._debug_ignore_context = _debug_ignore_context self._debug_use_ground_truth_context = _debug_use_ground_truth_context self.use_next_obs_in_context = use_next_obs_in_context self.register_buffer('z', torch.zeros(1, latent_dim)) self.register_buffer('z_means', torch.zeros(1, latent_dim)) self.register_buffer('z_vars', torch.zeros(1, latent_dim)) self.z_means = None self.z_vars = None self.context = None self.z = None self.z_means_rp = None self.z_vars_rp = None self.z_rp = None self.context_encoder_rp = context_encoder self._use_context_encoder_snapshot_for_reward_pred = False self.latent_prior = torch.distributions.Normal( ptu.zeros(self.latent_dim), ptu.ones(self.latent_dim) ) self._debug_do_not_sqrt = _debug_do_not_sqrt def clear_z(self, num_tasks=1): mu = ptu.zeros(num_tasks, self.latent_dim) var = ptu.ones(num_tasks, self.latent_dim) self.z_means = mu self.z_vars = var @property def use_context_encoder_snapshot_for_reward_pred(self): return self._use_context_encoder_snapshot_for_reward_pred @use_context_encoder_snapshot_for_reward_pred.setter def use_context_encoder_snapshot_for_reward_pred(self, value): if value and not self.use_context_encoder_snapshot_for_reward_pred: self.context_encoder_rp = copy.deepcopy(self.context_encoder) self.context_encoder_rp.to(ptu.device) self.reward_predictor = copy.deepcopy(self.reward_predictor) self.reward_predictor.to(ptu.device) self._use_context_encoder_snapshot_for_reward_pred = value def detach_z(self): self.z = self.z.detach() if self.recurrent: self.context_encoder.hidden = self.context_encoder.hidden.detach() self.z_rp = self.z_rp.detach() if self.recurrent: self.context_encoder_rp.hidden = self.context_encoder_rp.hidden.detach() def update_context(self, context, inputs): if self._debug_use_ground_truth_context: return context o, a, r, no, d, info = inputs o = ptu.from_numpy(o[None, None, ...]) a = ptu.from_numpy(a[None, None, ...]) r = ptu.from_numpy(np.array([r])[None, None, ...]) no = ptu.from_numpy(no[None, None, ...]) if self.use_next_obs_in_context: data = torch.cat([o, a, r, no], dim=2) else: data = torch.cat([o, a, r], dim=2) if context is None: context = data else: try: context = torch.cat([context, data], dim=1) except Exception as e: import ipdb; ipdb.set_trace() return context def compute_kl_div(self): prior = torch.distributions.Normal(ptu.zeros(self.latent_dim), ptu.ones(self.latent_dim)) posteriors = [torch.distributions.Normal(mu, torch.sqrt(var)) for mu, var in zip(torch.unbind(self.z_means), torch.unbind(self.z_vars))] kl_divs = [torch.distributions.kl.kl_divergence(post, prior) for post in posteriors] kl_div_sum = torch.sum(torch.stack(kl_divs)) return kl_div_sum def batched_latent_prior(self, batch_size): return torch.distributions.Normal( ptu.zeros(batch_size, self.latent_dim), ptu.ones(batch_size, self.latent_dim) )
MIT License
amossys/fragscapy
fragscapy/modifications/mod.py
Mod.is_deterministic
python
def is_deterministic(self): return True
Is the modification deterministic (no random).
https://github.com/amossys/fragscapy/blob/3ee7f5c73fc6c7eb64858e197c0b8d2b313734e0/fragscapy/modifications/mod.py#L46-L48
import abc class Mod(abc.ABC): name = None doc = None _nb_args = -1 def __init__(self, *args): self.check_args(*args) self.parse_args(*args)
MIT License
madoshakalaka/pipenv-setup
pipenv_setup/setup_updater.py
update_setup
python
def update_setup( dependency_arguments, filename, dev=False ): with open(str(filename), "rb") as setup_file: setup_bytes = setup_file.read() setup_text = setup_bytes.decode(encoding="utf-8") root_node = ast.parse(setup_text) setup_lines = setup_text.splitlines() setup_call_node = get_setup_call_node(root_node) if setup_call_node is None: raise ValueError("No setup() call found in setup.py") setup_call_lineno, setup_call_col_offset = ( setup_call_node.lineno, setup_call_node.col_offset, ) install_requires_lineno = -1 install_requires_col_offset = -1 dependency_links_lineno = -1 dependency_links_col_offset = -1 extras_require_lineno = -1 extras_require_col_offset = -1 for kw in ["install_requires", "dependency_links"]: setup_bytes, setup_lines = clear_kw_list(kw, setup_bytes, setup_lines) if dev: setup_bytes, setup_lines = clear_dev_value(setup_bytes, setup_lines) root_node = ast.parse("\n".join(setup_lines)) node = get_kw_list_node(root_node, "install_requires") if node is not None: install_requires_lineno = node.lineno install_requires_col_offset = node.col_offset node = get_kw_list_node(root_node, "dependency_links") if node is not None: dependency_links_lineno = node.lineno dependency_links_col_offset = node.col_offset extras_require_node = setup_parser.get_extras_require_dict_node(root_node) if extras_require_node is not None: extras_require_lineno = extras_require_node.lineno extras_require_col_offset = extras_require_node.col_offset if install_requires_lineno != -1: insert_at_lineno_col_offset( setup_lines, install_requires_lineno, install_requires_col_offset + 1, str(dependency_arguments["install_requires"])[1:-1], ) elif len(dependency_arguments["install_requires"]) > 0: insert_at_lineno_col_offset( setup_lines, setup_call_lineno, setup_call_col_offset + len("setup("), "install_requires=" + str(dependency_arguments["install_requires"]) + ",", ) if dependency_links_lineno != -1: insert_at_lineno_col_offset( setup_lines, dependency_links_lineno, dependency_links_col_offset + 1, str(dependency_arguments["dependency_links"])[1:-1], ) elif len(dependency_arguments["dependency_links"]) > 0: insert_at_lineno_col_offset( setup_lines, setup_call_lineno, setup_call_col_offset + len("setup("), "dependency_links=" + str(dependency_arguments["dependency_links"]) + ",", ) root_node = ast.parse("\n".join(setup_lines)) if len(dependency_arguments["extras_require"]) > 0 and dev: if extras_require_lineno == -1: insert_at_lineno_col_offset( setup_lines, setup_call_lineno, setup_call_col_offset + len("setup("), 'extras_require = {"dev": []},', ) extras_require_lineno = setup_call_lineno extras_require_col_offset = setup_call_col_offset + len("setup(") root_node = ast.parse("\n".join(setup_lines)) dev_list_node = setup_parser.get_extras_require_dev_list_node(root_node) if dev_list_node is None: insert_at_lineno_col_offset( setup_lines, extras_require_lineno, extras_require_col_offset + 1, '"dev": [],', ) root_node = ast.parse("\n".join(setup_lines)) dev_list_node = setup_parser.get_extras_require_dev_list_node(root_node) assert dev_list_node is not None insert_at_lineno_col_offset( setup_lines, dev_list_node.lineno, dev_list_node.col_offset + 1, str(dependency_arguments["extras_require"])[1:-1] + ",", ) f = codecs.open("setup.py", encoding="utf-8", mode="w") f.write("\n".join(setup_lines)) f.close() format_file(Path("setup.py"))
Clear install_requires and dependency_links argument and fill new ones. Format the code. :param dependency_arguments: :param filename: :param dev: update extras_require or not :raise ValueError: when setup.py is not recognized (malformed)
https://github.com/madoshakalaka/pipenv-setup/blob/2b66c23dbb4657a3b969332fafabba61649fd189/pipenv_setup/setup_updater.py#L16-L140
import ast import codecs import sys import tokenize from io import BytesIO from subprocess import Popen, PIPE from tokenize import OP from typing import Tuple, List, Any from vistir.compat import Path from pipenv_setup import setup_parser from pipenv_setup.setup_parser import get_setup_call_node, get_kw_list_node
MIT License
ecdavis/pants
pants/stream.py
Stream.on_ssl_error
python
def on_ssl_error(self, exception): log.exception(exception) self.close(flush=False)
Placeholder. Called when an error occurs in the underlying SSL implementation. By default, logs the exception and closes the channel. ========== ============ Argument Description ========== ============ exception The exception that was raised. ========== ============
https://github.com/ecdavis/pants/blob/88129d24020e95b71e8d0260a111dc0b457b0676/pants/stream.py#L778-L792
import errno import functools import os import re import socket import ssl import struct from pants._channel import _Channel, HAS_IPV6, sock_type from pants.engine import Engine try: from netstruct import NetStruct as _NetStruct except ImportError: class _NetStruct(object): def __init__(self, *a, **kw): raise NotImplementedError RegexType = type(re.compile("")) Struct = struct.Struct import logging log = logging.getLogger("pants") class Stream(_Channel): SEND_STRING = 0 SEND_FILE = 1 SEND_SSL_HANDSHAKE = 2 def __init__(self, **kwargs): sock = kwargs.get("socket", None) if sock and sock_type(sock) != socket.SOCK_STREAM: raise TypeError("Cannot create a %s with a socket type other than SOCK_STREAM." % self.__class__.__name__) _Channel.__init__(self, **kwargs) self._remote_address = None self._local_address = None self._read_delimiter = None self._recv_buffer = "" self._recv_buffer_size_limit = self._buffer_size self._send_buffer = [] self.connected = False self.connecting = False self._closing = False self.ssl_enabled = False self._ssl_enabling = False self._ssl_socket_wrapped = False self._ssl_handshake_done = False self._ssl_call_on_connect = False if isinstance(kwargs.get("socket", None), ssl.SSLSocket): self._ssl_socket_wrapped = True self.startSSL() elif kwargs.get("ssl_options", None) is not None: self.startSSL(kwargs["ssl_options"]) @property def remote_address(self): if self._remote_address is not None: return self._remote_address elif self._socket: try: return self._socket.getpeername() except socket.error: return None else: return None @remote_address.setter def remote_address(self, val): self._remote_address = val @remote_address.deleter def remote_address(self): self._remote_address = None @property def local_address(self): if self._local_address is not None: return self._local_address elif self._socket: try: return self._socket.getsockname() except socket.error: return None else: return None @local_address.setter def local_address(self, val): self._local_address = val @local_address.deleter def local_address(self): self._local_address = None @property def read_delimiter(self): return self._read_delimiter @read_delimiter.setter def read_delimiter(self, value): if value is None or isinstance(value, basestring) or isinstance(value, RegexType): self._read_delimiter = value self._recv_buffer_size_limit = self._buffer_size elif isinstance(value, (int, long)): self._read_delimiter = value self._recv_buffer_size_limit = max(self._buffer_size, value) elif isinstance(value, Struct): self._read_delimiter = value self._recv_buffer_size_limit = max(self._buffer_size, value.size) elif isinstance(value, _NetStruct): self._read_delimiter = value self._recv_buffer_size_limit = max(self._buffer_size, value.minimum_size) else: raise TypeError("Attempted to set read_delimiter to a value with an invalid type.") self._netstruct_iter = None self._netstruct_needed = None regex_search = True _buffer_size = 2 ** 16 @property def buffer_size(self): return self._buffer_size @buffer_size.setter def buffer_size(self, value): if not isinstance(value, (long, int)): raise TypeError("buffer_size must be an int or a long") self._buffer_size = value if isinstance(self._read_delimiter, (int, long)): self._recv_buffer_size_limit = max(value, self._read_delimiter) elif isinstance(self._read_delimiter, Struct): self._recv_buffer_size_limit = max(value, self._read_delimiter.size) elif isinstance(self._read_delimiter, _NetStruct): self._recv_buffer_size_limit = max(value, self._read_delimiter.minimum_size) else: self._recv_buffer_size_limit = value def startSSL(self, ssl_options={}): if self.ssl_enabled or self._ssl_enabling: raise RuntimeError("startSSL() called on SSL-enabled %r" % self) if self._closed or self._closing: raise RuntimeError("startSSL() called on closed %r" % self) if ssl_options.setdefault("do_handshake_on_connect", False) is not False: raise ValueError("SSL option 'do_handshake_on_connect' must be False.") self._ssl_enabling = True self._send_buffer.append((Stream.SEND_SSL_HANDSHAKE, ssl_options)) if self.connected: self._process_send_buffer() return self def connect(self, address): if self.connected or self.connecting: raise RuntimeError("connect() called on active %r." % self) if self._closed or self._closing: raise RuntimeError("connect() called on closed %r." % self) self.connecting = True address, family, resolved = self._format_address(address) if resolved: self._do_connect(address, family) else: try: result = socket.getaddrinfo(address[0], address[1], family) except socket.error as err: self.close(flush=False) e = StreamConnectError(err.errno, err.strerror) self._safely_call(self.on_connect_error, e) return self result = result[0] self._do_connect(result[-1], result[0]) return self def close(self, flush=True): if self._closed: return if flush and self._send_buffer: self._closing = True return self.read_delimiter = None self._recv_buffer = "" self._send_buffer = [] self.connected = False self.connecting = False self.ssl_enabled = False self._ssl_enabling = False self._ssl_socket_wrapped = False self._ssl_handshake_done = False self._ssl_call_on_connect = False self._safely_call(self.on_close) self._remote_address = None self._local_address = None _Channel.close(self) self._closing = False def write(self, data, flush=False): if self._closed or self._closing: raise RuntimeError("write() called on closed %r." % self) if not self.connected: raise RuntimeError("write() called on disconnected %r." % self) if self._send_buffer and self._send_buffer[-1][0] == Stream.SEND_STRING: data_type, existing_data = self._send_buffer.pop(-1) data = existing_data + data self._send_buffer.append((Stream.SEND_STRING, data)) if flush: self._process_send_buffer() else: self._start_waiting_for_write_event() def write_file(self, sfile, nbytes=0, offset=0, flush=False): if self._closed or self._closing: raise RuntimeError("write_file() called on closed %r." % self) if not self.connected: raise RuntimeError("write_file() called on disconnected %r." % self) self._send_buffer.append((Stream.SEND_FILE, (sfile, offset, nbytes))) if flush: self._process_send_buffer() else: self._start_waiting_for_write_event() def write_packed(self, *data, **kwargs): format = kwargs.get("format") if format: self.write(struct.pack(format, *data), kwargs.get("flush", False)) elif not isinstance(self._read_delimiter, (Struct, _NetStruct)): raise ValueError("No format is available for writing packed data.") else: self.write(self._read_delimiter.pack(*data), kwargs.get("flush", False)) def flush(self): if self._closed or self._closing: raise RuntimeError("flush() called on closed %r." % self) if not self.connected: raise RuntimeError("flush() called on disconnected %r." % self) if not self._send_buffer: return self._stop_waiting_for_write_event() self._process_send_buffer() def on_ssl_handshake(self): pass def on_ssl_handshake_error(self, exception): log.exception(exception) self.close(flush=False)
Apache License 2.0
carla-simulator/traffic-generation-editor
osc_generator/add_maneuvers.py
AddManeuversDockWidget.refresh_entity
python
def refresh_entity(self): self.entity_selection.clear() self.long_ref_entity.clear() self.lateral_ref_entity.clear() self.start_entity_ref_entity.clear() self.stop_entity_ref_entity.clear() entities = [] if QgsProject.instance().mapLayersByName("Vehicles - Ego"): layer = QgsProject.instance().mapLayersByName("Vehicles - Ego")[0] for feature in layer.getFeatures(): veh_id = "Ego_" + str(feature["id"]) entities.append(veh_id) if QgsProject.instance().mapLayersByName("Vehicles"): layer = QgsProject.instance().mapLayersByName("Vehicles")[0] for feature in layer.getFeatures(): veh_id = "Vehicle_" + str(feature["id"]) entities.append(veh_id) if QgsProject.instance().mapLayersByName("Pedestrians"): layer = QgsProject.instance().mapLayersByName("Pedestrians")[0] for feature in layer.getFeatures(): ped_id = "Pedestrian_" + str(feature["id"]) entities.append(ped_id) self.entity_selection.addItems(entities) self.long_ref_entity.addItems(entities) self.lateral_ref_entity.addItems(entities) self.start_entity_ref_entity.addItems(entities) self.stop_entity_ref_entity.addItems(entities)
Gets list of entities spawned on map and populates drop down
https://github.com/carla-simulator/traffic-generation-editor/blob/1f69dfe1a0f3c1e5157ed021e04027f005a6b38e/osc_generator/add_maneuvers.py#L82-L115
import os import math from qgis.PyQt import QtWidgets, uic from qgis.PyQt.QtCore import Qt, pyqtSignal from qgis.gui import QgsMapTool from qgis.utils import iface from qgis.core import (QgsProject, QgsFeature, QgsGeometry, QgsPalLayerSettings, QgsVectorLayerSimpleLabeling, QgsTextFormat, QgsTextBackgroundSettings) from PyQt5.QtGui import QColor from PyQt5.QtWidgets import QInputDialog import ad_map_access as ad from .helper_functions import (layer_setup_maneuvers_waypoint, layer_setup_maneuvers_and_triggers, layer_setup_maneuvers_longitudinal, layer_setup_maneuvers_lateral, verify_parameters, is_float, display_message, get_geo_point) FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'add_maneuvers_widget.ui')) class AddManeuversDockWidget(QtWidgets.QDockWidget, FORM_CLASS): closingPlugin = pyqtSignal() def __init__(self, parent=None): super(AddManeuversDockWidget, self).__init__(parent) self.setupUi(self) self.refresh_entity_button.pressed.connect(self.refresh_entity) self.entity_selection.currentTextChanged.connect(self.update_ref_entity) self.add_maneuver_button.pressed.connect(self.add_maneuvers) self.entity_maneuver_type.currentTextChanged.connect(self.change_maneuver) self.waypoint_orientation_use_lane.stateChanged.connect(self.override_orientation) self.lateral_type.currentTextChanged.connect(self.change_lateral_type) self.long_type.currentTextChanged.connect(self.change_longitudinal_type) self.long_speed_target.currentTextChanged.connect(self.change_longitudinal_speed_target) self.start_condition_type.currentTextChanged.connect(self.update_start_trigger_condition) self.start_value_cond.currentTextChanged.connect(self.update_start_value_cond_parameters) self.start_entity_cond.currentTextChanged.connect(self.update_start_entity_cond_parameters) self.stop_condition_type.currentTextChanged.connect(self.update_stop_trigger_condition) self.stop_value_cond.currentTextChanged.connect(self.update_stop_value_cond_parameters) self.stop_entity_cond.currentTextChanged.connect(self.update_stop_entity_cond_parameters) self.toggle_traffic_light_labels_button.pressed.connect(self.toggle_traffic_light_labels) self.refresh_traffic_light_ids_button.pressed.connect(self.refresh_traffic_lights) self.start_entity_choose_position_button.pressed.connect(self.get_world_position) self.stop_entity_choose_position_button.pressed.connect(self.get_world_position) layer_setup_maneuvers_waypoint() layer_setup_maneuvers_and_triggers() layer_setup_maneuvers_longitudinal() layer_setup_maneuvers_lateral() self._waypoint_layer = QgsProject.instance().mapLayersByName("Waypoint Maneuvers")[0] self._maneuver_layer = QgsProject.instance().mapLayersByName("Maneuvers")[0] self._long_man_layer = QgsProject.instance().mapLayersByName("Longitudinal Maneuvers")[0] self._lat_man_layer = QgsProject.instance().mapLayersByName("Lateral Maneuvers")[0] self._man_id = None self._traffic_labels_on = False self._traffic_labels_setup = False self._traffic_lights_layer = None self.refresh_entity() self.refresh_traffic_lights()
MIT License
sonycslparis/cae-invar
complex_auto/motives_extractor/utils.py
save_results_raw
python
def save_results_raw(csv_patterns, outfile="results.txt"): f = open(outfile, "w") P = 1 for pattern in csv_patterns: f.write("pattern%d\n" % P) for occ in pattern[1:]: to_write = str(np.array(occ)[:2])[1:-1] + "->" + str(np.array( occ)[2:])[1:-1] + "\n" f.write(to_write) P += 1 f.close() print(f"Patterns written to {outfile}.")
Saves the raw results into the output file
https://github.com/sonycslparis/cae-invar/blob/bd78adb12de30f0311345eb1669cf0bb42ea9517/complex_auto/motives_extractor/utils.py#L355-L367
import pickle as cPickle import csv import numpy as np import os import pylab as plt from scipy import spatial import logging CSV_ONTIME = 0 CSV_MIDI = 1 CSV_HEIGHT = 2 CSV_DUR = 3 CSV_STAFF = 4 def ensure_dir(dir): if not os.path.exists(dir): os.makedirs(dir) def plot_matrix(X): plt.imshow(X, interpolation="nearest", aspect="auto") plt.show() def read_cPickle(file): f = open(file, "r") x = cPickle.load(f) f.close() return x def write_cPickle(file, data): f = open(file, "w") cPickle.dump(data, f, protocol=1) f.close() def compute_ssm(X, h, dist="euclidean"): L = int(1. / h) if L % 2 == 0: L += 1 X = median_filter(X, L=L) S = spatial.distance.pdist(X, dist) S = spatial.distance.squareform(S) S /= S.max() S = 1 - S return S def compute_key_inv_ssm(X, h, dist="euclidean"): P = X.shape[1] L = int(1. / h) if L % 2 == 0: L += 1 if L <= 1: L = 9 X = median_filter(X, L=L) N = X.shape[0] SS = np.zeros((P, N, N)) dist = "euclidean" for i in range(P): SS[i] = spatial.distance.cdist(X, np.roll(X, i), dist) S = np.min(SS, axis=0) """ S = spatial.distance.pdist(X, metric=dist) S = spatial.distance.squareform(S) """ S /= S.max() S = 1 - S return S def chroma_to_tonnetz(C): N = C.shape[0] T = np.zeros((N, 6)) r1 = 1 r2 = 1 r3 = 0.5 phi = np.zeros((6, 12)) for i in range(6): for j in range(12): if i % 2 == 0: fun = np.sin else: fun = np.cos if i < 2: phi[i, j] = r1 * fun(j * 7 * np.pi / 6.) elif i >= 2 and i < 4: phi[i, j] = r2 * fun(j * 3 * np.pi / 2.) else: phi[i, j] = r3 * fun(j * 2 * np.pi / 3.) for i in range(N): for d in range(6): denom = float(C[i, :].sum()) if denom == 0: T[i, d] = 0 else: T[i, d] = 1 / denom * (phi[d, :] * C[i, :]).sum() return T def get_smaller_dur_csv(score, thres=False): min_dur = np.min(score[:, CSV_DUR]) if thres: if min_dur < 0.25: min_dur = .25 return min_dur def get_total_dur_csv(score): max_onsets = np.argwhere(score[:, CSV_ONTIME] == np.max(score[:, CSV_ONTIME])) max_dur = np.max(score[max_onsets, CSV_DUR]) min_onset = get_offset(score) if min_onset > 0: min_onset = 0 total_dur = score[max_onsets[0], CSV_ONTIME] + max_dur + np.abs(min_onset) return total_dur def get_number_of_staves(score): return int(np.max(score[:, CSV_STAFF])) + 1 def get_offset(score): return np.min(score[:, CSV_ONTIME]) def midi_to_chroma(pitch): return ((pitch % 12) + 3) % 12 def csv_to_chromagram(score): h = get_smaller_dur_csv(score, thres=True) total_dur = get_total_dur_csv(score) N = np.ceil(total_dur / float(h)) C = np.zeros((N, 12)) offset = np.abs(int(get_offset(score) / float(h))) for row in score: pitch = midi_to_chroma(row[CSV_MIDI]) start = int(row[CSV_ONTIME] / float(h)) + offset end = start + int(row[CSV_DUR] / float(h)) C[start:end, pitch] = 1 return C, h def median_filter(X, L=9): Y = np.ones(X.shape) * X.min() Lh = (L - 1) / 2 for i in np.arange(Lh, X.shape[0] - Lh): Y[i, :] = np.median(X[i - Lh:i + Lh, :], axis=0) return Y def mean_filter(X, L=9): Y = np.ones(X.shape) * X.min() Lh = (L - 1) / 2 for i in np.arange(Lh, X.shape[0] - Lh): Y[i, :] = np.mean(X[i - Lh:i + Lh, :], axis=0) return Y def is_square(X, start_i, start_j, M, th): try: subX = X[start_i:start_i + M, start_j:start_j + M] rho = 1 if subX.trace(offset=rho) >= (M - rho * 2) * th or subX.trace(offset=-rho) >= (M - rho * 2) * th: return True else: return False except: return False def split_patterns(patterns, max_diff, min_dur): s_patterns = [] N = len(patterns) splitted = np.zeros(N) for i in range(N): o1 = patterns[i][0] for j in range(N): if i == j: continue if splitted[j]: continue o2 = patterns[j][0] if o1[0] > o2[0] and o1[1] < o2[1] and ((o2[1] - o1[1]) - (o1[1] - o1[0]) > max_diff): new_p = [] for p in patterns[i]: new_p.append(p) for k, p in enumerate(patterns[j]): if k == 0: continue start_j = p[2] + (o1[0] - o2[0]) end_j = p[3] - (o2[1] - o1[1]) new_p.append([o1[0], o1[1], start_j, end_j]) s_patterns.append(new_p) if o1[0] - o2[0] > min_dur: first_new_p = [] for p in patterns[j]: end_j = p[2] + (o1[0] - p[0]) first_new_p.append([p[0], o1[0], p[2], end_j]) s_patterns.append(first_new_p) if o2[1] - o1[1] > min_dur: last_new_p = [] for p in patterns[j]: start_j = p[3] - (p[1] - o1[1]) last_new_p.append([o1[1], p[1], start_j, p[3]]) s_patterns.append(last_new_p) splitted[i] = 1 splitted[j] = 1 for i in range(N): if splitted[i] == 0: new_p = [] for p in patterns[i]: new_p.append(p) s_patterns.append(new_p) return s_patterns def save_results(csv_patterns, outfile="results.txt"): f = open(outfile, "w") P = 1 for pattern in csv_patterns: f.write("pattern%d\n" % P) O = 1 for occ in pattern: f.write("occurrence%d\n" % O) for row in occ: f.write("%f, %f\n" % (row[0], row[1])) O += 1 P += 1 f.close()
MIT License
soppeng29/simplesb
akad/TalkService.py
Iface.acquireCallTicket
python
def acquireCallTicket(self, to): pass
Parameters: - to
https://github.com/soppeng29/simplesb/blob/ee394235c617f8b7aea78c9d2e59ee131108f73d/akad/TalkService.py#L51-L56
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException import sys import logging from .ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport class Iface(object): def acceptGroupInvitation(self, reqSeq, groupId): pass def acceptGroupInvitationByTicket(self, reqSeq, groupId, ticketId): pass def acceptProximityMatches(self, sessionId, ids): pass def acquireCallRoute(self, to): pass
BSD 3-Clause New or Revised License
tao12345666333/tornado-zh
tornado/util.py
GzipDecompressor.decompress
python
def decompress(self, value, max_length=None): return self.decompressobj.decompress(value, max_length)
Decompress a chunk, returning newly-available data. Some data may be buffered for later processing; `flush` must be called when there is no more input data to ensure that all data was processed. If ``max_length`` is given, some input data may be left over in ``unconsumed_tail``; you must retrieve this value and pass it back to a future call to `decompress` if it is not empty.
https://github.com/tao12345666333/tornado-zh/blob/e9e8519beb147d9e1290f6a4fa7d61123d1ecb1c/tornado/util.py#L59-L70
from __future__ import absolute_import, division, print_function, with_statement import array import os import sys import zlib try: xrange except NameError: xrange = range try: from inspect import getfullargspec as getargspec except ImportError: from inspect import getargspec class ObjectDict(dict): def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): self[name] = value class GzipDecompressor(object): def __init__(self): self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
MIT License
wordnik/serapis
serapis/readability.py
Readability.fleisch_reading_ease
python
def fleisch_reading_ease(self): if self._invalid: return 0 return 0.39 * self.words_per_sentence + 11.8 * self.syllable_count / self.word_count - 15.59
Fleisch Reading Ease. https://en.wikipedia.org/wiki/Flesch%E2%80%93Kincaid_readability_tests Returns: float -- number between 0.0 (harderst to read) and 100 (easiest to read)
https://github.com/wordnik/serapis/blob/6f8214fb226337ef2fd6820b07f9976c5e18b79b/serapis/readability.py#L80-L90
from __future__ import unicode_literals from __future__ import absolute_import __author__ = "Manuel Ebert" __copyright__ = "Copyright 2016, summer.ai" __date__ = "2016-01-12" __email__ = "manuel@summer.ai" import math from unidecode import unidecode from nltk import sent_tokenize, word_tokenize class Readability(object): def __init__(self, doc): self.doc = unidecode(doc) self.sentence_count = len(sent_tokenize(doc)) words = word_tokenize(doc) syllables = [self._count_syllables(word) for word in words] self.char_count = sum(len(word) for word in words) self.syllable_count = sum(syllables) self._invalid = not self.sentence_count or not self.char_count self.complex_word_count = len(filter(lambda s: s >= 4, syllables)) self.word_count = len(words) self.words_per_sentence = 1.0 * self.word_count / self.sentence_count if not self._invalid else 0 def _count_syllables(self, word): vowels = "aeiou" on_vowel = False in_diphthong = False minsyl = 0 maxsyl = 0 lastchar = None word = word.lower() for c in word: is_vowel = c in vowels if on_vowel is None: on_vowel = is_vowel if is_vowel or c == 'y': if not on_vowel: minsyl += 1 maxsyl += 1 elif on_vowel and not in_diphthong and c != lastchar: in_diphthong = True maxsyl += 1 on_vowel = is_vowel lastchar = c if word[-1] == 'e': minsyl -= 1 if word[-1] == 'y' and not on_vowel: maxsyl += 1 return minsyl + maxsyl / 2.0
MIT License
hunch/hunch-gift-app
django/contrib/gis/gdal/feature.py
Feature.__del__
python
def __del__(self): if self._ptr: capi.destroy_feature(self._ptr)
Releases a reference to this object.
https://github.com/hunch/hunch-gift-app/blob/8c7cad24cc0d9900deb4175e6b768c64a3d7adcf/django/contrib/gis/gdal/feature.py#L26-L28
from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import OGRException, OGRIndexError from django.contrib.gis.gdal.field import Field from django.contrib.gis.gdal.geometries import OGRGeometry, OGRGeomType from django.contrib.gis.gdal.srs import SpatialReference from django.contrib.gis.gdal.prototypes import ds as capi, geom as geom_api class Feature(GDALBase): def __init__(self, feat, fdefn): if not feat or not fdefn: raise OGRException('Cannot create OGR Feature, invalid pointer given.') self.ptr = feat self._fdefn = fdefn
MIT License
usyd-blockchain/vandal
tools/creationiser/convert.py
lenhexbytify
python
def lenhexbytify(string): out_str = hexify(len(string) // 2) return ("0" if len(out_str) % 2 else "") + out_str
Length of input string as a hex string with a leading zero if length string length is odd.
https://github.com/usyd-blockchain/vandal/blob/d2b004326fee33920c313e64d0970410b1933990/tools/creationiser/convert.py#L42-L45
import sys def hexify(number): return hex(number)[2:]
BSD 3-Clause New or Revised License
aio-libs/aioredis
aioredis/connection.py
Connection.connect
python
async def connect(self): if self.is_connected: return try: await self._connect() except asyncio.CancelledError: raise except (socket.timeout, asyncio.TimeoutError): raise TimeoutError("Timeout connecting to server") except OSError as e: raise ConnectionError(self._error_message(e)) except Exception as exc: raise ConnectionError(exc) from exc try: await self.on_connect() except RedisError: await self.disconnect() raise for callback in self._connect_callbacks: task = callback(self) if task and inspect.isawaitable(task): await task
Connects to the Redis server if not already connected
https://github.com/aio-libs/aioredis/blob/53d8103cd69668e4a7ab500a9576abab1b359dad/aioredis/connection.py#L647-L674
import asyncio import errno import inspect import io import os import socket import ssl import threading import warnings from distutils.version import StrictVersion from itertools import chain from typing import ( Any, Iterable, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, ) from urllib.parse import ParseResult, parse_qs, unquote, urlparse import async_timeout from .compat import Protocol, TypedDict from .exceptions import ( AuthenticationError, AuthenticationWrongNumberOfArgsError, BusyLoadingError, ChildDeadlockedError, ConnectionError, DataError, ExecAbortError, InvalidResponse, ModuleError, NoPermissionError, NoScriptError, ReadOnlyError, RedisError, ResponseError, TimeoutError, ) from .utils import str_if_bytes NONBLOCKING_EXCEPTION_ERROR_NUMBERS = { BlockingIOError: errno.EWOULDBLOCK, ssl.SSLWantReadError: 2, ssl.SSLWantWriteError: 2, ssl.SSLError: 2, } NONBLOCKING_EXCEPTIONS = tuple(NONBLOCKING_EXCEPTION_ERROR_NUMBERS.keys()) try: import hiredis except (ImportError, ModuleNotFoundError): HIREDIS_AVAILABLE = False else: HIREDIS_AVAILABLE = True hiredis_version = StrictVersion(hiredis.__version__) if hiredis_version < StrictVersion("1.0.0"): warnings.warn( "aioredis supports hiredis @ 1.0.0 or higher. " f"You have hiredis @ {hiredis.__version__}. " "Pure-python parser will be used instead." ) HIREDIS_AVAILABLE = False SYM_STAR = b"*" SYM_DOLLAR = b"$" SYM_CRLF = b"\r\n" SYM_LF = b"\n" SYM_EMPTY = b"" SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server." SENTINEL = object() MODULE_LOAD_ERROR = "Error loading the extension. Please check the server logs." NO_SUCH_MODULE_ERROR = "Error unloading module: no such module with that name" MODULE_UNLOAD_NOT_POSSIBLE_ERROR = "Error unloading module: operation not possible." MODULE_EXPORTS_DATA_TYPES_ERROR = ( "Error unloading module: the module " "exports one or more module-side data " "types, can't unload" ) EncodedT = Union[bytes, memoryview] DecodedT = Union[str, int, float] EncodableT = Union[EncodedT, DecodedT] class Encoder: __slots__ = "encoding", "encoding_errors", "decode_responses" def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool): self.encoding = encoding self.encoding_errors = encoding_errors self.decode_responses = decode_responses def encode(self, value: EncodableT) -> EncodedT: if isinstance(value, (bytes, memoryview)): return value if isinstance(value, bool): raise DataError( "Invalid input of type: 'bool'. " "Convert to a bytes, string, int or float first." ) if isinstance(value, (int, float)): return repr(value).encode() if not isinstance(value, str): typename = value.__class__.__name__ raise DataError( f"Invalid input of type: {typename!r}. " "Convert to a bytes, string, int or float first." ) if isinstance(value, str): return value.encode(self.encoding, self.encoding_errors) return value def decode(self, value: EncodableT, force=False) -> EncodableT: if self.decode_responses or force: if isinstance(value, memoryview): return value.tobytes().decode(self.encoding, self.encoding_errors) if isinstance(value, bytes): return value.decode(self.encoding, self.encoding_errors) return value ExceptionMappingT = Mapping[str, Union[Type[Exception], Mapping[str, Type[Exception]]]] class BaseParser: __slots__ = "_stream", "_buffer", "_read_size" EXCEPTION_CLASSES: ExceptionMappingT = { "ERR": { "max number of clients reached": ConnectionError, "Client sent AUTH, but no password is set": AuthenticationError, "invalid password": AuthenticationError, "wrong number of arguments for 'auth' command": AuthenticationWrongNumberOfArgsError, "wrong number of arguments for 'AUTH' command": AuthenticationWrongNumberOfArgsError, MODULE_LOAD_ERROR: ModuleError, MODULE_EXPORTS_DATA_TYPES_ERROR: ModuleError, NO_SUCH_MODULE_ERROR: ModuleError, MODULE_UNLOAD_NOT_POSSIBLE_ERROR: ModuleError, }, "EXECABORT": ExecAbortError, "LOADING": BusyLoadingError, "NOSCRIPT": NoScriptError, "READONLY": ReadOnlyError, "NOAUTH": AuthenticationError, "NOPERM": NoPermissionError, } def __init__(self, socket_read_size: int): self._stream: Optional[asyncio.StreamReader] = None self._buffer: Optional[SocketBuffer] = None self._read_size = socket_read_size def __del__(self): try: self.on_disconnect() except Exception: pass def parse_error(self, response: str) -> ResponseError: error_code = response.split(" ")[0] if error_code in self.EXCEPTION_CLASSES: response = response[len(error_code) + 1 :] exception_class = self.EXCEPTION_CLASSES[error_code] if isinstance(exception_class, dict): exception_class = exception_class.get(response, ResponseError) return exception_class(response) return ResponseError(response) def on_disconnect(self): raise NotImplementedError() def on_connect(self, connection: "Connection"): raise NotImplementedError() async def can_read(self, timeout: float) -> bool: raise NotImplementedError() async def read_response(self) -> Union[EncodableT, ResponseError, None]: raise NotImplementedError() class SocketBuffer: def __init__( self, stream_reader: asyncio.StreamReader, socket_read_size: int, socket_timeout: float, ): self._stream = stream_reader self.socket_read_size = socket_read_size self.socket_timeout = socket_timeout self._buffer = io.BytesIO() self.bytes_written = 0 self.bytes_read = 0 @property def length(self): return self.bytes_written - self.bytes_read async def _read_from_socket( self, length: Optional[int] = None, timeout: Optional[float] = SENTINEL, raise_on_timeout: bool = True, ) -> bool: buf = self._buffer buf.seek(self.bytes_written) marker = 0 timeout = timeout if timeout is not SENTINEL else self.socket_timeout try: while True: async with async_timeout.timeout(timeout): data = await self._stream.read(self.socket_read_size) if isinstance(data, bytes) and len(data) == 0: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) buf.write(data) data_length = len(data) self.bytes_written += data_length marker += data_length if length is not None and length > marker: continue return True except (socket.timeout, asyncio.TimeoutError): if raise_on_timeout: raise TimeoutError("Timeout reading from socket") return False except NONBLOCKING_EXCEPTIONS as ex: allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1) if not raise_on_timeout and ex.errno == allowed: return False raise ConnectionError(f"Error while reading from socket: {ex.args}") async def can_read(self, timeout: float) -> bool: return bool(self.length) or await self._read_from_socket( timeout=timeout, raise_on_timeout=False ) async def read(self, length: int) -> bytes: length = length + 2 if length > self.length: await self._read_from_socket(length - self.length) self._buffer.seek(self.bytes_read) data = self._buffer.read(length) self.bytes_read += len(data) if self.bytes_read == self.bytes_written: self.purge() return data[:-2] async def readline(self) -> bytes: buf = self._buffer buf.seek(self.bytes_read) data = buf.readline() while not data.endswith(SYM_CRLF): await self._read_from_socket() buf.seek(self.bytes_read) data = buf.readline() self.bytes_read += len(data) if self.bytes_read == self.bytes_written: self.purge() return data[:-2] def purge(self): self._buffer.seek(0) self._buffer.truncate() self.bytes_written = 0 self.bytes_read = 0 def close(self): try: self.purge() self._buffer.close() except Exception: pass self._buffer = None self._stream = None class PythonParser(BaseParser): __slots__ = BaseParser.__slots__ + ("encoder",) def __init__(self, socket_read_size: int): super().__init__(socket_read_size) self.encoder: Optional[Encoder] = None def on_connect(self, connection: "Connection"): self._stream = connection._reader self._buffer = SocketBuffer( self._stream, self._read_size, connection.socket_timeout ) self.encoder = connection.encoder def on_disconnect(self): if self._stream is not None: self._stream = None if self._buffer is not None: self._buffer.close() self._buffer = None self.encoder = None async def can_read(self, timeout: float): return self._buffer and bool(await self._buffer.can_read(timeout)) async def read_response(self) -> Union[EncodableT, ResponseError, None]: if not self._buffer: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) raw = await self._buffer.readline() if not raw: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) response: Any byte, response = raw[:1], raw[1:] if byte not in (b"-", b"+", b":", b"$", b"*"): raise InvalidResponse(f"Protocol Error: {raw!r}") if byte == b"-": response = response.decode("utf-8", errors="replace") error = self.parse_error(response) if isinstance(error, ConnectionError): raise error return error elif byte == b"+": pass elif byte == b":": response = int(response) elif byte == b"$": length = int(response) if length == -1: return None response = await self._buffer.read(length) elif byte == b"*": length = int(response) if length == -1: return None response = [(await self.read_response()) for _ in range(length)] if isinstance(response, bytes): response = self.encoder.decode(response) return response class HiredisParser(BaseParser): __slots__ = BaseParser.__slots__ + ("_next_response", "_reader", "_socket_timeout") def __init__(self, socket_read_size: int): if not HIREDIS_AVAILABLE: raise RedisError("Hiredis is not available.") super().__init__(socket_read_size=socket_read_size) self._next_response = ... self._reader: Optional[hiredis.Reader] = None self._socket_timeout: Optional[float] = None def on_connect(self, connection: "Connection"): self._stream = connection._reader kwargs = { "protocolError": InvalidResponse, "replyError": self.parse_error, } if connection.encoder.decode_responses: kwargs.update( encoding=connection.encoder.encoding, errors=connection.encoder.encoding_errors, ) self._reader = hiredis.Reader(**kwargs) self._next_response = False self._socket_timeout = connection.socket_timeout def on_disconnect(self): self._stream = None self._reader = None self._next_response = False async def can_read(self, timeout: float): if not self._reader: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) if self._next_response is False: self._next_response = self._reader.gets() if self._next_response is False: return await self.read_from_socket(timeout=timeout, raise_on_timeout=False) return True async def read_from_socket( self, timeout: Optional[float] = SENTINEL, raise_on_timeout: bool = True ): timeout = self._socket_timeout if timeout is SENTINEL else timeout try: async with async_timeout.timeout(timeout): buffer = await self._stream.read(self._read_size) if not isinstance(buffer, bytes) or len(buffer) == 0: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None self._reader.feed(buffer) return True except asyncio.CancelledError: raise except (socket.timeout, asyncio.TimeoutError): if raise_on_timeout: raise TimeoutError("Timeout reading from socket") from None return False except NONBLOCKING_EXCEPTIONS as ex: allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1) if not raise_on_timeout and ex.errno == allowed: return False raise ConnectionError(f"Error while reading from socket: {ex.args}") async def read_response(self) -> EncodableT: if not self._stream or not self._reader: self.on_disconnect() raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None if self._next_response is not False: response = self._next_response self._next_response = False return response response = self._reader.gets() while response is False: await self.read_from_socket() response = self._reader.gets() if isinstance(response, ConnectionError): raise response elif ( isinstance(response, list) and response and isinstance(response[0], ConnectionError) ): raise response[0] return response DefaultParser: Type[Union[PythonParser, HiredisParser]] if HIREDIS_AVAILABLE: DefaultParser = HiredisParser else: DefaultParser = PythonParser class ConnectCallbackProtocol(Protocol): def __call__(self, connection: "Connection"): ... class AsyncConnectCallbackProtocol(Protocol): async def __call__(self, connection: "Connection"): ... ConnectCallbackT = Union[ConnectCallbackProtocol, AsyncConnectCallbackProtocol] class Connection: __slots__ = ( "pid", "host", "port", "db", "username", "client_name", "password", "socket_timeout", "socket_connect_timeout", "socket_keepalive", "socket_keepalive_options", "socket_type", "retry_on_timeout", "health_check_interval", "next_health_check", "last_active_at", "encoder", "ssl_context", "_reader", "_writer", "_parser", "_connect_callbacks", "_buffer_cutoff", "_lock", "__dict__", ) def __init__( self, *, host: str = "localhost", port: Union[str, int] = 6379, db: Union[str, int] = 0, password: Optional[str] = None, socket_timeout: Optional[float] = None, socket_connect_timeout: Optional[float] = None, socket_keepalive: bool = False, socket_keepalive_options: Optional[dict] = None, socket_type: int = 0, retry_on_timeout: bool = False, encoding: str = "utf-8", encoding_errors: str = "strict", decode_responses: bool = False, parser_class: Type[BaseParser] = DefaultParser, socket_read_size: int = 65536, health_check_interval: int = 0, client_name: Optional[str] = None, username: Optional[str] = None, encoder_class: Type[Encoder] = Encoder, ): self.pid = os.getpid() self.host = host self.port = int(port) self.db = db self.username = username self.client_name = client_name self.password = password self.socket_timeout = socket_timeout self.socket_connect_timeout = socket_connect_timeout or socket_timeout or None self.socket_keepalive = socket_keepalive self.socket_keepalive_options = socket_keepalive_options or {} self.socket_type = socket_type self.retry_on_timeout = retry_on_timeout self.health_check_interval = health_check_interval self.next_health_check = -1 self.ssl_context: Optional[RedisSSLContext] = None self.encoder = encoder_class(encoding, encoding_errors, decode_responses) self._reader: Optional[asyncio.StreamReader] = None self._writer: Optional[asyncio.StreamWriter] = None self._parser = parser_class( socket_read_size=socket_read_size, ) self._connect_callbacks: List[ConnectCallbackT] = [] self._buffer_cutoff = 6000 self._lock = asyncio.Lock() def __repr__(self): repr_args = ",".join((f"{k}={v}" for k, v in self.repr_pieces())) return f"{self.__class__.__name__}<{repr_args}>" def repr_pieces(self): pieces = [("host", self.host), ("port", self.port), ("db", self.db)] if self.client_name: pieces.append(("client_name", self.client_name)) return pieces def __del__(self): try: if self.is_connected: loop = asyncio.get_event_loop() coro = self.disconnect() if loop.is_running(): loop.create_task(coro) else: loop.run_until_complete(coro) except Exception: pass @property def is_connected(self): return bool(self._reader and self._writer) def register_connect_callback(self, callback): self._connect_callbacks.append(callback) def clear_connect_callbacks(self): self._connect_callbacks = []
MIT License
googleapis/python-kms
google/cloud/kms_v1/services/key_management_service/transports/grpc.py
KeyManagementServiceGrpcTransport.update_crypto_key_primary_version
python
def update_crypto_key_primary_version( self, ) -> Callable[[service.UpdateCryptoKeyPrimaryVersionRequest], resources.CryptoKey]: if "update_crypto_key_primary_version" not in self._stubs: self._stubs[ "update_crypto_key_primary_version" ] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKeyPrimaryVersion", request_serializer=service.UpdateCryptoKeyPrimaryVersionRequest.serialize, response_deserializer=resources.CryptoKey.deserialize, ) return self._stubs["update_crypto_key_primary_version"]
r"""Return a callable for the update crypto key primary version method over gRPC. Update the version of a [CryptoKey][google.cloud.kms.v1.CryptoKey] that will be used in [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. Returns an error if called on a key whose purpose is not [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT]. Returns: Callable[[~.UpdateCryptoKeyPrimaryVersionRequest], ~.CryptoKey]: A function that, when called, will call the underlying RPC on the server.
https://github.com/googleapis/python-kms/blob/412bf4f5f939ac14ad217734c21049c67f7b5b3a/google/cloud/kms_v1/services/key_management_service/transports/grpc.py#L710-L741
import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers from google.api_core import gapic_v1 import google.auth from google.auth import credentials as ga_credentials from google.auth.transport.grpc import SslCredentials import grpc from google.cloud.kms_v1.types import resources from google.cloud.kms_v1.types import service from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from .base import KeyManagementServiceTransport, DEFAULT_CLIENT_INFO class KeyManagementServiceGrpcTransport(KeyManagementServiceTransport): _stubs: Dict[str, Callable] def __init__( self, *, host: str = "cloudkms.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, ) -> None: self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: credentials = False self._grpc_channel = channel self._ssl_channel_credentials = None else: if api_mtls_endpoint: host = api_mtls_endpoint if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) self._prep_wrapped_messages(client_info) @classmethod def create_channel( cls, host: str = "cloudkms.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, ) -> grpc.Channel: return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property def list_key_rings( self, ) -> Callable[[service.ListKeyRingsRequest], service.ListKeyRingsResponse]: if "list_key_rings" not in self._stubs: self._stubs["list_key_rings"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/ListKeyRings", request_serializer=service.ListKeyRingsRequest.serialize, response_deserializer=service.ListKeyRingsResponse.deserialize, ) return self._stubs["list_key_rings"] @property def list_crypto_keys( self, ) -> Callable[[service.ListCryptoKeysRequest], service.ListCryptoKeysResponse]: if "list_crypto_keys" not in self._stubs: self._stubs["list_crypto_keys"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/ListCryptoKeys", request_serializer=service.ListCryptoKeysRequest.serialize, response_deserializer=service.ListCryptoKeysResponse.deserialize, ) return self._stubs["list_crypto_keys"] @property def list_crypto_key_versions( self, ) -> Callable[ [service.ListCryptoKeyVersionsRequest], service.ListCryptoKeyVersionsResponse ]: if "list_crypto_key_versions" not in self._stubs: self._stubs["list_crypto_key_versions"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/ListCryptoKeyVersions", request_serializer=service.ListCryptoKeyVersionsRequest.serialize, response_deserializer=service.ListCryptoKeyVersionsResponse.deserialize, ) return self._stubs["list_crypto_key_versions"] @property def list_import_jobs( self, ) -> Callable[[service.ListImportJobsRequest], service.ListImportJobsResponse]: if "list_import_jobs" not in self._stubs: self._stubs["list_import_jobs"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/ListImportJobs", request_serializer=service.ListImportJobsRequest.serialize, response_deserializer=service.ListImportJobsResponse.deserialize, ) return self._stubs["list_import_jobs"] @property def get_key_ring(self) -> Callable[[service.GetKeyRingRequest], resources.KeyRing]: if "get_key_ring" not in self._stubs: self._stubs["get_key_ring"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/GetKeyRing", request_serializer=service.GetKeyRingRequest.serialize, response_deserializer=resources.KeyRing.deserialize, ) return self._stubs["get_key_ring"] @property def get_crypto_key( self, ) -> Callable[[service.GetCryptoKeyRequest], resources.CryptoKey]: if "get_crypto_key" not in self._stubs: self._stubs["get_crypto_key"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/GetCryptoKey", request_serializer=service.GetCryptoKeyRequest.serialize, response_deserializer=resources.CryptoKey.deserialize, ) return self._stubs["get_crypto_key"] @property def get_crypto_key_version( self, ) -> Callable[[service.GetCryptoKeyVersionRequest], resources.CryptoKeyVersion]: if "get_crypto_key_version" not in self._stubs: self._stubs["get_crypto_key_version"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/GetCryptoKeyVersion", request_serializer=service.GetCryptoKeyVersionRequest.serialize, response_deserializer=resources.CryptoKeyVersion.deserialize, ) return self._stubs["get_crypto_key_version"] @property def get_public_key( self, ) -> Callable[[service.GetPublicKeyRequest], resources.PublicKey]: if "get_public_key" not in self._stubs: self._stubs["get_public_key"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/GetPublicKey", request_serializer=service.GetPublicKeyRequest.serialize, response_deserializer=resources.PublicKey.deserialize, ) return self._stubs["get_public_key"] @property def get_import_job( self, ) -> Callable[[service.GetImportJobRequest], resources.ImportJob]: if "get_import_job" not in self._stubs: self._stubs["get_import_job"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/GetImportJob", request_serializer=service.GetImportJobRequest.serialize, response_deserializer=resources.ImportJob.deserialize, ) return self._stubs["get_import_job"] @property def create_key_ring( self, ) -> Callable[[service.CreateKeyRingRequest], resources.KeyRing]: if "create_key_ring" not in self._stubs: self._stubs["create_key_ring"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/CreateKeyRing", request_serializer=service.CreateKeyRingRequest.serialize, response_deserializer=resources.KeyRing.deserialize, ) return self._stubs["create_key_ring"] @property def create_crypto_key( self, ) -> Callable[[service.CreateCryptoKeyRequest], resources.CryptoKey]: if "create_crypto_key" not in self._stubs: self._stubs["create_crypto_key"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/CreateCryptoKey", request_serializer=service.CreateCryptoKeyRequest.serialize, response_deserializer=resources.CryptoKey.deserialize, ) return self._stubs["create_crypto_key"] @property def create_crypto_key_version( self, ) -> Callable[[service.CreateCryptoKeyVersionRequest], resources.CryptoKeyVersion]: if "create_crypto_key_version" not in self._stubs: self._stubs["create_crypto_key_version"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/CreateCryptoKeyVersion", request_serializer=service.CreateCryptoKeyVersionRequest.serialize, response_deserializer=resources.CryptoKeyVersion.deserialize, ) return self._stubs["create_crypto_key_version"] @property def import_crypto_key_version( self, ) -> Callable[[service.ImportCryptoKeyVersionRequest], resources.CryptoKeyVersion]: if "import_crypto_key_version" not in self._stubs: self._stubs["import_crypto_key_version"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/ImportCryptoKeyVersion", request_serializer=service.ImportCryptoKeyVersionRequest.serialize, response_deserializer=resources.CryptoKeyVersion.deserialize, ) return self._stubs["import_crypto_key_version"] @property def create_import_job( self, ) -> Callable[[service.CreateImportJobRequest], resources.ImportJob]: if "create_import_job" not in self._stubs: self._stubs["create_import_job"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/CreateImportJob", request_serializer=service.CreateImportJobRequest.serialize, response_deserializer=resources.ImportJob.deserialize, ) return self._stubs["create_import_job"] @property def update_crypto_key( self, ) -> Callable[[service.UpdateCryptoKeyRequest], resources.CryptoKey]: if "update_crypto_key" not in self._stubs: self._stubs["update_crypto_key"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKey", request_serializer=service.UpdateCryptoKeyRequest.serialize, response_deserializer=resources.CryptoKey.deserialize, ) return self._stubs["update_crypto_key"] @property def update_crypto_key_version( self, ) -> Callable[[service.UpdateCryptoKeyVersionRequest], resources.CryptoKeyVersion]: if "update_crypto_key_version" not in self._stubs: self._stubs["update_crypto_key_version"] = self.grpc_channel.unary_unary( "/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKeyVersion", request_serializer=service.UpdateCryptoKeyVersionRequest.serialize, response_deserializer=resources.CryptoKeyVersion.deserialize, ) return self._stubs["update_crypto_key_version"] @property
Apache License 2.0
criteo/deepr
deepr/examples/movielens/jobs/evaluate.py
precision_recall_f1
python
def precision_recall_f1(true: np.ndarray, pred: np.ndarray, k: int): num_predicted = np.unique(pred).size num_intersect = np.intersect1d(pred, true).size num_observed = np.unique(true).size p = num_intersect / min(num_predicted, k) r = num_intersect / min(num_observed, k) f1 = 2 * p * r / (p + r) if p != 0 or r != 0 else 0 return p, r, f1
Compute precision, recall and f1_score.
https://github.com/criteo/deepr/blob/ca36e99dff193efc23fd5329040ccd9a52c361c2/deepr/examples/movielens/jobs/evaluate.py#L99-L107
import logging from dataclasses import dataclass from typing import List, Union, Optional import numpy as np import deepr from deepr.utils import mlflow try: import faiss except ImportError as e: print(f"Faiss needs to be installed for MovieLens {e}") LOGGER = logging.getLogger(__name__) @dataclass class Evaluate(deepr.jobs.Job): path_predictions: str path_embeddings: str path_biases: Optional[str] = None k: Union[int, List[int]] = 50 use_mlflow: bool = False num_queries: int = 1000 def run(self): with deepr.io.ParquetDataset(self.path_predictions).open() as ds: predictions = ds.read_pandas().to_pandas() users = np.stack(predictions["user"]) if deepr.io.Path(self.path_embeddings).suffix == ".npz": with deepr.io.Path(self.path_embeddings).open("rb") as file: embeddings = np.load(file) embeddings = embeddings.astype(np.float32) else: with deepr.io.ParquetDataset(self.path_embeddings).open() as ds: embeddings = ds.read_pandas().to_pandas() embeddings = embeddings.to_numpy() if self.path_biases is not None: with deepr.io.ParquetDataset(self.path_biases).open() as ds: biases = ds.read_pandas().to_pandas() biases = biases.to_numpy() embeddings = np.concatenate([embeddings, biases], axis=-1) ones = np.ones([users.shape[0], 1], np.float32) users = np.concatenate([users, ones], axis=-1) LOGGER.info(f"Shapes, embeddings={embeddings.shape}, users={users.shape}") index = faiss.IndexFlatIP(embeddings.shape[-1]) index.add(np.ascontiguousarray(embeddings)) _, indices = index.search(users, k=self.num_queries) k_values = [self.k] if isinstance(self.k, int) else self.k for k in k_values: precision, recall, f1, ndcg = compute_metrics(predictions["input"], predictions["target"], indices, k=k) LOGGER.info( f"precision@{k} = {precision}\n" f"recall@{k} = {recall}\n" f"f1@{k} = {f1}\n" f"NDCG@{k} = {ndcg}" ) if self.use_mlflow: mlflow.log_metric(key=f"precision_at_{k}", value=precision) mlflow.log_metric(key=f"recall_at_{k}", value=recall) mlflow.log_metric(key=f"f1_at_{k}", value=f1) mlflow.log_metric(key=f"ndcg_at_{k}", value=ndcg) def compute_metrics(inputs: List[np.ndarray], targets: List[np.ndarray], predictions: List[np.ndarray], k: int): recalls = [] precisions = [] f1s = [] ndcgs = [] for inp, tgt, pred in zip(inputs, targets, predictions): pred = [idx for idx in pred if idx not in inp][:k] p, r, f1 = precision_recall_f1(tgt, pred, k=k) ndcg = ndcg_score(tgt, pred, k=k) recalls.append(r) precisions.append(p) f1s.append(f1) ndcgs.append(ndcg) return np.mean(precisions), np.mean(recalls), np.mean(f1s), np.mean(ndcgs)
Apache License 2.0
btimby/fulltext
fulltext/util.py
BaseBackend.check
python
def check(self, title): pass
May be overridden by subclass. This is called before text extraction. If the overriding method raises an exception a warning is printed and bin backend is used.
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/util.py#L367-L372
from __future__ import print_function import contextlib import atexit import errno import logging import os import subprocess import warnings import sys import functools import tempfile import shutil from os.path import join as pathjoin import six from six import PY3 try: import exiftool except ImportError: exiftool = None from fulltext.compat import which LOGGER = logging.getLogger(__file__) LOGGER.addHandler(logging.NullHandler()) TEMPDIR = os.environ.get('FULLTEXT_TEMP', tempfile.gettempdir()) HERE = os.path.abspath(os.path.dirname(__file__)) class BackendError(AssertionError): pass class CommandLineError(Exception): def render(self, msg): return msg % vars(self) class MissingCommandException(CommandLineError): def __init__(self, cmd, msg=""): self.cmd = cmd self.msg = msg def __str__(self): if self.msg: return self.msg else: return "%r CLI tool is not installed" % self.cmd class ShellError(CommandLineError): def __init__(self, command, exit_code, stdout, stderr): self.command = command self.exit_code = exit_code self.stdout = stdout self.stderr = stderr self.executable = self.command.split()[0] def failed_message(self): return ( "The command `%(command)s` failed with exit code %(exit_code)d\n" "------------- stdout -------------\n" "%(stdout)s" "------------- stderr -------------\n" "%(stderr)s" ) % vars(self) def __str__(self): return self.failed_message() def run(*cmd, **kwargs): stdin = kwargs.get('stdin', None) try: pipe = subprocess.Popen( cmd, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) except IOError as e: if e.errno == errno.ENOENT: raise MissingCommandException(cmd[0]) raise except OSError as e: if e.errno == errno.ENOENT: raise MissingCommandException(cmd[0]) try: stdout, stderr = pipe.communicate() if stderr: if PY3: warn(stderr.decode(sys.getfilesystemencoding(), "ignore")) else: warn(stderr) if pipe.returncode != 0: raise ShellError(' '.join(cmd), pipe.returncode, stdout, stderr) return stdout finally: if pipe.stdout: pipe.stdout.close() if pipe.stderr: pipe.stderr.close() try: if pipe.stdin: pipe.stdin.close() finally: pipe.wait() def warn(msg): warnings.warn(msg, UserWarning, stacklevel=2) LOGGER.warning(msg) def is_windows(): return os.name == 'nt' def is_windows64(): return is_windows() and 'PROGRAMFILES(X86)' in os.environ def get_data_dir(): if hasattr(sys, '_MEIPASS'): path = pathjoin(sys._MEIPASS, 'fulltext', 'data') if not os.path.isdir(path): print(">>> WARN: assuming you're using pyinstaller from duster", file=sys.stderr) path = pathjoin(sys._MEIPASS, 'duster', 'data') else: path = pathjoin(HERE, 'data') assert os.path.isdir(path), path return path def assert_cmd_exists(cmd): if not which(cmd): raise MissingCommandException(cmd) if not is_windows(): import magic else: def _set_binpath(): bindir = 'bin64' if is_windows64() else 'bin32' path = pathjoin(get_data_dir(), bindir) os.environ['PATH'] += os.pathsep + path assert_cmd_exists("pdftotext") assert_cmd_exists("unrtf") assert_cmd_exists("exiftool") assert_cmd_exists("unrar") _set_binpath() def _import_magic(): from magic import Magic as _Magic class Magic(_Magic): def from_file(self, filename, mime=True): return _Magic.from_file(self, filename) def from_buffer(self, buf, mime=True): return _Magic.from_buffer(self, buf) path = pathjoin(get_data_dir(), 'magic') assert os.path.isfile(path), path return Magic(mime=True, magic_file=path) magic = _import_magic() def memoize(fun): @functools.wraps(fun) def wrapper(*args, **kwargs): key = (args, frozenset(sorted(kwargs.items()))) try: return cache[key] except KeyError: ret = cache[key] = fun(*args, **kwargs) return ret def cache_clear(): cache.clear() cache = {} wrapper.cache_clear = cache_clear return wrapper @memoize def term_supports_colors(): try: import curses assert sys.stderr.isatty() curses.setupterm() assert curses.tigetnum("colors") > 0 except Exception: return False else: return True def hilite(s, ok=True, bold=False): if not term_supports_colors(): return s attr = [] if ok is None: pass elif ok: attr.append('32') else: attr.append('31') if bold: attr.append('1') return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s) def is_file_path(obj): return isinstance(obj, six.string_types) or isinstance(obj, bytes) def memoize(fun): @functools.wraps(fun) def wrapper(*args, **kwargs): key = (args, frozenset(sorted(kwargs.items()))) try: return cache[key] except KeyError: ret = cache[key] = fun(*args, **kwargs) return ret def cache_clear(): cache.clear() cache = {} wrapper.cache_clear = cache_clear return wrapper @contextlib.contextmanager def fobj_to_tempfile(f, suffix=''): with tempfile.NamedTemporaryFile( dir=TEMPDIR, suffix=suffix, delete=False) as t: shutil.copyfileobj(f, t) try: yield t.name finally: os.remove(t.name) if exiftool is not None: _et = exiftool.ExifTool() _et.start() @atexit.register def _close_et(): LOGGER.debug("terminating exiftool subprocess") _et.terminate() def exiftool_title(path, encoding, encoding_error): if is_file_path(path): title = (_et.get_tag("title", path) or "").strip() if title: if hasattr(title, "decode"): return title.decode(encoding, encoding_error) else: return title else: def exiftool_title(*a, **kw): return None class BaseBackend(object): def __init__(self, encoding, encoding_errors, kwargs): self.encoding = encoding self.encoding_errors = encoding_errors self.kwargs = kwargs def setup(self): pass def teardown(self): pass
MIT License
tensorflow/tensor2tensor
tensor2tensor/rl/ppo.py
_distributional_to_value
python
def _distributional_to_value(value_d, size, subscale, threshold): half = size // 2 value_range = (tf.to_float(tf.range(-half, half)) + 0.5) * subscale probs = tf.nn.softmax(value_d) if threshold == 0.0: return tf.reduce_sum(probs * value_range, axis=-1) accumulated_probs = tf.cumsum(probs, axis=-1) probs = tf.where(accumulated_probs < threshold, tf.zeros_like(probs), probs) probs /= tf.reduce_sum(probs, axis=-1, keepdims=True) return tf.reduce_sum(probs * value_range, axis=-1)
Get a scalar value out of a value distribution in distributional RL.
https://github.com/tensorflow/tensor2tensor/blob/c22a226704e5887862bf9edd9f269892c9016ad4/tensor2tensor/rl/ppo.py#L98-L113
from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensor2tensor.layers import common_layers from tensor2tensor.models.research.rl import get_policy from tensor2tensor.utils import learning_rate from tensor2tensor.utils import optimize import tensorflow.compat.v1 as tf import tensorflow_probability as tfp def define_ppo_step(data_points, hparams, action_space, lr, epoch=-1, distributional_size=1, distributional_subscale=0.04): del distributional_subscale (observation, action, discounted_reward, discounted_reward_probs, norm_advantage, old_pdf) = data_points obs_shape = common_layers.shape_list(observation) observation = tf.reshape( observation, [obs_shape[0] * obs_shape[1]] + obs_shape[2:] ) (logits, new_value) = get_policy(observation, hparams, action_space, epoch=epoch, distributional_size=distributional_size) logits = tf.reshape(logits, obs_shape[:2] + [action_space.n]) new_policy_dist = tfp.distributions.Categorical(logits=logits) new_pdf = new_policy_dist.prob(action) ratio = new_pdf / old_pdf clipped_ratio = tf.clip_by_value(ratio, 1 - hparams.clipping_coef, 1 + hparams.clipping_coef) surrogate_objective = tf.minimum(clipped_ratio * norm_advantage, ratio * norm_advantage) policy_loss = -tf.reduce_mean(surrogate_objective) if distributional_size > 1: new_value = tf.reshape(new_value, obs_shape[:2] + [distributional_size]) new_value = tf.nn.log_softmax(new_value, axis=-1) value_shape = common_layers.shape_list(new_value) new_value_shifted = tf.concat([new_value[1:], new_value[-1:]], axis=0) new_value_mean = (new_value + new_value_shifted) / 2 new_value = tf.concat([tf.expand_dims(new_value, axis=-1), tf.expand_dims(new_value_mean, axis=-1)], -1) new_value = tf.reshape(new_value, value_shape[:-1] + [2 * value_shape[-1]]) discounted_reward = tf.cast(discounted_reward, tf.int32) value_loss = tf.batch_gather(new_value, discounted_reward) discounted_reward_probs = tf.expand_dims(discounted_reward_probs, axis=1) value_loss = - tf.reduce_sum(value_loss * discounted_reward_probs, axis=-1) value_loss = hparams.value_loss_coef * tf.reduce_mean(value_loss) else: new_value = tf.reshape(new_value, obs_shape[:2]) value_error = new_value - discounted_reward value_loss = hparams.value_loss_coef * tf.reduce_mean(value_error ** 2) entropy = new_policy_dist.entropy() entropy_loss = -hparams.entropy_loss_coef * tf.reduce_mean(entropy) losses = [policy_loss, value_loss, entropy_loss] loss = sum(losses) variables = tf.global_variables(hparams.policy_network + "/.*") train_op = optimize.optimize(loss, lr, hparams, variables=variables) with tf.control_dependencies([train_op]): return [tf.identity(x) for x in losses]
Apache License 2.0
dask/dask
dask/array/einsumfuncs.py
parse_einsum_input
python
def parse_einsum_input(operands): if len(operands) == 0: raise ValueError("No input operands") if isinstance(operands[0], basestring): subscripts = operands[0].replace(" ", "") operands = [asarray(o) for o in operands[1:]] for s in subscripts: if s in ".,->": continue if s not in einsum_symbols_set: raise ValueError("Character %s is not a valid symbol." % s) else: tmp_operands = list(operands) operand_list = [] subscript_list = [] for p in range(len(operands) // 2): operand_list.append(tmp_operands.pop(0)) subscript_list.append(tmp_operands.pop(0)) output_list = tmp_operands[-1] if len(tmp_operands) else None operands = [asarray(v) for v in operand_list] subscripts = "" last = len(subscript_list) - 1 for num, sub in enumerate(subscript_list): for s in sub: if s is Ellipsis: subscripts += "..." elif isinstance(s, int): subscripts += einsum_symbols[s] else: raise TypeError( "For this input type lists must contain " "either int or Ellipsis" ) if num != last: subscripts += "," if output_list is not None: subscripts += "->" for s in output_list: if s is Ellipsis: subscripts += "..." elif isinstance(s, int): subscripts += einsum_symbols[s] else: raise TypeError( "For this input type lists must contain " "either int or Ellipsis" ) if ("-" in subscripts) or (">" in subscripts): invalid = (subscripts.count("-") > 1) or (subscripts.count(">") > 1) if invalid or (subscripts.count("->") != 1): raise ValueError("Subscripts can only contain one '->'.") if "." in subscripts: used = subscripts.replace(".", "").replace(",", "").replace("->", "") unused = list(einsum_symbols_set - set(used)) ellipse_inds = "".join(unused) longest = 0 if "->" in subscripts: input_tmp, output_sub = subscripts.split("->") split_subscripts = input_tmp.split(",") out_sub = True else: split_subscripts = subscripts.split(",") out_sub = False for num, sub in enumerate(split_subscripts): if "." in sub: if (sub.count(".") != 3) or (sub.count("...") != 1): raise ValueError("Invalid Ellipses.") if operands[num].shape == (): ellipse_count = 0 else: ellipse_count = max(operands[num].ndim, 1) ellipse_count -= len(sub) - 3 if ellipse_count > longest: longest = ellipse_count if ellipse_count < 0: raise ValueError("Ellipses lengths do not match.") elif ellipse_count == 0: split_subscripts[num] = sub.replace("...", "") else: rep_inds = ellipse_inds[-ellipse_count:] split_subscripts[num] = sub.replace("...", rep_inds) subscripts = ",".join(split_subscripts) if longest == 0: out_ellipse = "" else: out_ellipse = ellipse_inds[-longest:] if out_sub: subscripts += "->" + output_sub.replace("...", out_ellipse) else: output_subscript = "" tmp_subscripts = subscripts.replace(",", "") for s in sorted(set(tmp_subscripts)): if s not in einsum_symbols_set: raise ValueError("Character %s is not a valid symbol." % s) if tmp_subscripts.count(s) == 1: output_subscript += s normal_inds = "".join(sorted(set(output_subscript) - set(out_ellipse))) subscripts += "->" + out_ellipse + normal_inds if "->" in subscripts: input_subscripts, output_subscript = subscripts.split("->") else: input_subscripts = subscripts tmp_subscripts = subscripts.replace(",", "") output_subscript = "" for s in sorted(set(tmp_subscripts)): if s not in einsum_symbols_set: raise ValueError("Character %s is not a valid symbol." % s) if tmp_subscripts.count(s) == 1: output_subscript += s for char in output_subscript: if char not in input_subscripts: raise ValueError("Output character %s did not appear in the input" % char) if len(input_subscripts.split(",")) != len(operands): raise ValueError( "Number of einsum subscripts must be equal to the number of operands." ) return (input_subscripts, output_subscript, operands)
A reproduction of numpy's _parse_einsum_input() which in itself is a reproduction of c side einsum parsing in python. Returns ------- input_strings : str Parsed input strings output_string : str Parsed output string operands : list of array_like The operands to use in the numpy contraction Examples -------- The operand list is simplified to reduce printing: >> a = np.random.rand(4, 4) >> b = np.random.rand(4, 4, 4) >> __parse_einsum_input(('...a,...a->...', a, b)) ('za,xza', 'xz', [a, b]) >> __parse_einsum_input((a, [Ellipsis, 0], b, [Ellipsis, 0])) ('za,xza', 'xz', [a, b])
https://github.com/dask/dask/blob/e05d1cf32899a0379f38c9e2a971b11465f470a4/dask/array/einsumfuncs.py#L26-L193
import numpy as np from numpy.compat import basestring from ..utils import derived_from from .core import asarray, blockwise, einsum_lookup einsum_symbols = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" einsum_symbols_set = set(einsum_symbols) def chunk_einsum(*operands, **kwargs): subscripts = kwargs.pop("subscripts") ncontract_inds = kwargs.pop("ncontract_inds") dtype = kwargs.pop("kernel_dtype") einsum = einsum_lookup.dispatch(type(operands[0])) chunk = einsum(subscripts, *operands, dtype=dtype, **kwargs) return chunk.reshape(chunk.shape + (1,) * ncontract_inds)
BSD 3-Clause New or Revised License
sbg/mitty
mitty/empirical/gc.py
gc_and_coverage_for_chromosome
python
def gc_and_coverage_for_chromosome(bam_fname, fasta_fname, chrom_idx, block_len=10000): bam_fp = pysam.AlignmentFile(bam_fname, mode='rb') region_start, region_end = 1, bam_fp.header['SQ'][chrom_idx]['LN'] logger.debug('Processing {}:{}-{}'.format(bam_fp.header['SQ'][chrom_idx]['SN'], region_start, region_end)) fasta_fp = pysam.FastaFile(fasta_fname) return np.array([ gc_and_coverage_for_region(bam_fp, fasta_fp, region='{}:{}-{}'.format(bam_fp.header['SQ'][chrom_idx]['SN'], r, r + block_len)) for r in range(region_start, region_end, block_len) ], dtype=[('gc', float), ('coverage', float)])
:param bam_fname: Passing file names rather than file objects for parallelization :param fasta_fname: :param chrom_idx: 0, 1, 2 ... referring to chroms in the bam header :param block_len: how many bp to chunk by :return: an array of gc and cov values
https://github.com/sbg/mitty/blob/e299649f71b78da036b25a96cec3440764095c87/mitty/empirical/gc.py#L43-L59
from multiprocessing import Pool import time import pickle import logging import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import pysam logger = logging.getLogger(__name__) def gc_and_coverage_for_region(bam_fp, fasta_fp, region): seq = fasta_fp.fetch(region=region) if seq.count('N') / float(len(seq)) > 0.1: return None, None gc = float(seq.count('G') + seq.count('C')) / len(seq) cov_l = [b.n for b in bam_fp.pileup(region=region)] cov = float(sum(cov_l)) / max(len(cov_l), 1) return gc, cov
Apache License 2.0
tramac/awesome-semantic-segmentation-pytorch
core/utils/score.py
intersectionAndUnion
python
def intersectionAndUnion(imPred, imLab, numClass): imPred = imPred * (imLab >= 0) intersection = imPred * (imPred == imLab) (area_intersection, _) = np.histogram(intersection, bins=numClass, range=(1, numClass)) (area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_lab, _) = np.histogram(imLab, bins=numClass, range=(1, numClass)) area_union = area_pred + area_lab - area_intersection return (area_intersection, area_union)
This function takes the prediction and label of a single image, returns intersection and union areas for each class To compute over many images do: for i in range(Nimages): (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i]) IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1)
https://github.com/tramac/awesome-semantic-segmentation-pytorch/blob/5843f75215dadc5d734155a238b425a753a665d9/core/utils/score.py#L119-L140
import torch import numpy as np __all__ = ['SegmentationMetric', 'batch_pix_accuracy', 'batch_intersection_union', 'pixelAccuracy', 'intersectionAndUnion', 'hist_info', 'compute_score'] class SegmentationMetric(object): def __init__(self, nclass): super(SegmentationMetric, self).__init__() self.nclass = nclass self.reset() def update(self, preds, labels): def evaluate_worker(self, pred, label): correct, labeled = batch_pix_accuracy(pred, label) inter, union = batch_intersection_union(pred, label, self.nclass) self.total_correct += correct self.total_label += labeled if self.total_inter.device != inter.device: self.total_inter = self.total_inter.to(inter.device) self.total_union = self.total_union.to(union.device) self.total_inter += inter self.total_union += union if isinstance(preds, torch.Tensor): evaluate_worker(self, preds, labels) elif isinstance(preds, (list, tuple)): for (pred, label) in zip(preds, labels): evaluate_worker(self, pred, label) def get(self): pixAcc = 1.0 * self.total_correct / (2.220446049250313e-16 + self.total_label) IoU = 1.0 * self.total_inter / (2.220446049250313e-16 + self.total_union) mIoU = IoU.mean().item() return pixAcc, mIoU def reset(self): self.total_inter = torch.zeros(self.nclass) self.total_union = torch.zeros(self.nclass) self.total_correct = 0 self.total_label = 0 def batch_pix_accuracy(output, target): predict = torch.argmax(output.long(), 1) + 1 target = target.long() + 1 pixel_labeled = torch.sum(target > 0).item() pixel_correct = torch.sum((predict == target) * (target > 0)).item() assert pixel_correct <= pixel_labeled, "Correct area should be smaller than Labeled" return pixel_correct, pixel_labeled def batch_intersection_union(output, target, nclass): mini = 1 maxi = nclass nbins = nclass predict = torch.argmax(output, 1) + 1 target = target.float() + 1 predict = predict.float() * (target > 0).float() intersection = predict * (predict == target).float() area_inter = torch.histc(intersection.cpu(), bins=nbins, min=mini, max=maxi) area_pred = torch.histc(predict.cpu(), bins=nbins, min=mini, max=maxi) area_lab = torch.histc(target.cpu(), bins=nbins, min=mini, max=maxi) area_union = area_pred + area_lab - area_inter assert torch.sum(area_inter > area_union).item() == 0, "Intersection area should be smaller than Union area" return area_inter.float(), area_union.float() def pixelAccuracy(imPred, imLab): pixel_labeled = np.sum(imLab >= 0) pixel_correct = np.sum((imPred == imLab) * (imLab >= 0)) pixel_accuracy = 1.0 * pixel_correct / pixel_labeled return (pixel_accuracy, pixel_correct, pixel_labeled)
Apache License 2.0
simon-bc/bcex
bcex/examples/candles_strategy.py
ReversalCandleStrategy.__init__
python
def __init__( self, n_candles_before_reversal=3, ignore_none_candles=True, balance_fraction=0.1, **kwargs, ): super().__init__(**kwargs) self.n_candles_before_reversal = n_candles_before_reversal self.ignore_none_candles = ignore_none_candles self.balance_fraction = balance_fraction
Initialise Strategy Parameters ---------- n_candles_before_reversal : int number of candles of the opposite color before a change is classed as a reversal ignore_none_candles : bool How to treat candles where the price does not change balance_fraction : float the % of balance in given currency to place on order kwargs
https://github.com/simon-bc/bcex/blob/e0578631805472c8bfe421d2729b2e2af4f9e438/bcex/examples/candles_strategy.py#L319-L341
import logging from datetime import datetime, timedelta import pandas as pd import pytz from bcex.core.orders import OrderSide, OrderType from bcex.core.utils import datetime2unixepoch, unixepoch2datetime from bcex.core.websocket_client import Environment, Channel from bcex.examples.trader import BaseTrader from requests import get class CandlesStrategy(BaseTrader): CHANNELS = Channel.PRIVATE + [Channel.TICKER, Channel.SYMBOLS, Channel.PRICES] def __init__( self, symbol, start_date, heikin_ashi=False, granularity=3600, refresh_rate=60, env=Environment.PROD, **kwargs, ): channel_kwargs = {"prices": {"granularity": granularity}} super().__init__( symbol, refresh_rate=refresh_rate, env=env, channels_kwargs=channel_kwargs, **kwargs, ) self.heikin_ashi = heikin_ashi self.granularity = granularity self._historical_candles = None self.start_date = start_date self.latest_timestamp = None def get_historical_candles(self): end_date = datetime.now(pytz.UTC) payload = { "symbol": self.symbol, "start": datetime2unixepoch(self.start_date), "end": datetime2unixepoch(end_date), "granularity": self.granularity, } prices_url = "https://api.blockchain.com/nabu-gateway/markets/exchange/prices?" r = get(prices_url, params=payload) res = r.json() df_res = pd.DataFrame( { unixepoch2datetime(rec[0]): { "open": rec[1], "high": rec[2], "low": rec[3], "close": rec[4], } for rec in res["prices"] } ).T return df_res.sort_index() @property def historical_candles(self): if self._historical_candles is None: self._historical_candles = self.get_historical_candles() return self._historical_candles def _check_candle_is_finished(self, rec): return unixepoch2datetime(rec[0]) + timedelta( seconds=self.granularity ) < datetime.now(pytz.UTC) def get_latest_candles(self): res = self.exchange.get_candles(self.symbol) if res: df_res = pd.DataFrame( { unixepoch2datetime(rec[0]): { "open": rec[1], "high": rec[2], "low": rec[3], "close": rec[4], } for rec in res if self._check_candle_is_finished(rec) } ).T return df_res.sort_index() return pd.DataFrame() @property def live_candles(self): return self.get_latest_candles() def get_candle_df(self): live_candles = self.live_candles if live_candles.empty: return self.historical_candles.copy() min_time = live_candles.iloc[0].name historical_candles = self.historical_candles[ self.historical_candles.index < min_time ] df = pd.concat([historical_candles, live_candles]).sort_index() return df def is_new_candle(self, candles): last_timestamp = candles.iloc[-1].name if last_timestamp > self.latest_timestamp: return True return False def make_candles_heikin_ashi(self, candles_df): candles_df["ha_close"] = ( candles_df["open"] + candles_df["close"] + candles_df["high"] + candles_df["low"] ) / 4.0 candles_df["ha_open"] = ( (candles_df["open"] + candles_df["close"]) / 2.0 ).shift(1) candles_df.loc[candles_df.ha_open.isna(), "ha_open"] = ( candles_df["open"] + candles_df["close"] ) / 2 candles_df["ha_high"] = candles_df[["high", "ha_close", "ha_open"]].max(axis=1) candles_df["ha_low"] = candles_df[["low", "ha_close", "ha_open"]].min(axis=1) candles_df.drop(["high", "low", "open", "close"], axis=1, inplace=True) candles_df.rename( { "ha_close": "close", "ha_open": "open", "ha_high": "high", "ha_low": "low", }, axis=1, inplace=True, ) return candles_df def act_on_new_candle(self, candles_df): if self.heikin_ashi: candles_df = self.make_candles_heikin_ashi(candles_df) self.order_decision_from_candles(candles_df) self.latest_timestamp = candles_df.iloc[-1].name def order_decision_from_candles(self, candles_df): raise NotImplementedError def handle_orders(self): candles = self.get_candle_df() if self.latest_timestamp is not None: if self.is_new_candle(candles): logging.info("New Candle") self.act_on_new_candle(candles) else: logging.info("No New Candle") else: self.act_on_new_candle(candles) class MovingAverageStrategy(CandlesStrategy): CHANNELS = Channel.PRIVATE + [ Channel.TICKER, Channel.SYMBOLS, Channel.PRICES, Channel.L2, ] def __init__(self, rolling_window=30, balance_fraction=0.1, **kwargs): super().__init__(**kwargs,) self.rolling_window = rolling_window self.balance_fraction = balance_fraction def order_decision_from_candles(self, df): self.exchange.cancel_all_orders() df["closing_prices_rolling_average"] = df.close.rolling( self.rolling_window ).mean() df["close_over_rolling_average"] = df.close > df.closing_prices_rolling_average last_row = df.iloc[-1] last_side_over = last_row.close_over_rolling_average moving_average = last_row.closing_prices_rolling_average if last_side_over: bid_price = self.exchange.get_bid_price(self.symbol) balance = self.exchange.get_available_balance(self.symbol.split("-")[0]) logging.info( f"Moving Average {moving_average} current bid {bid_price}, placing sell limit order" ) self.exchange.place_order( symbol=self.symbol, order_type=OrderType.LIMIT, quantity=balance * self.balance_fraction, price=moving_average, side=OrderSide.SELL, check_balance=True, ) else: ask_price = self.exchange.get_ask_price(self.symbol) balance = self.exchange.get_available_balance(self.symbol.split("-")[1]) logging.info( f"Moving Average {moving_average} current ask {ask_price}, placing buy limit order" ) self.exchange.place_order( symbol=self.symbol, order_type=OrderType.LIMIT, quantity=(balance * self.balance_fraction) / moving_average, price=moving_average, side=OrderSide.BUY, check_balance=True, ) class ReversalCandleStrategy(CandlesStrategy):
MIT License
machine-learning-exchange/mlx
api/client/swagger_client/models/api_pipeline.py
ApiPipeline.created_at
python
def created_at(self): return self._created_at
Gets the created_at of this ApiPipeline. # noqa: E501 :return: The created_at of this ApiPipeline. # noqa: E501 :rtype: datetime
https://github.com/machine-learning-exchange/mlx/blob/be1503c45538dac1a8188560fbec4a07b2a367bf/api/client/swagger_client/models/api_pipeline.py#L112-L119
import pprint import re import six from swagger_client.models.api_parameter import ApiParameter class ApiPipeline(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'id': 'str', 'created_at': 'datetime', 'name': 'str', 'description': 'str', 'parameters': 'list[ApiParameter]', 'status': 'str', 'default_version_id': 'str', 'namespace': 'str' } attribute_map = { 'id': 'id', 'created_at': 'created_at', 'name': 'name', 'description': 'description', 'parameters': 'parameters', 'status': 'status', 'default_version_id': 'default_version_id', 'namespace': 'namespace' } def __init__(self, id=None, created_at=None, name=None, description=None, parameters=None, status=None, default_version_id=None, namespace=None): self._id = None self._created_at = None self._name = None self._description = None self._parameters = None self._status = None self._default_version_id = None self._namespace = None self.discriminator = None if id is not None: self.id = id if created_at is not None: self.created_at = created_at if name is not None: self.name = name if description is not None: self.description = description if parameters is not None: self.parameters = parameters if status is not None: self.status = status if default_version_id is not None: self.default_version_id = default_version_id if namespace is not None: self.namespace = namespace @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property
Apache License 2.0
truckersmp-cli/truckersmp-cli
truckersmp_cli/args.py
create_arg_parser
python
def create_arg_parser(): desc = """ A simple launcher for TruckersMP to play ATS or ETS2 in multiplayer. truckersmp-cli allows to download TruckersMP and handles starting TruckersMP through Wine while supporting the Windows versions of American Truck Simulator and Euro Truck Simulator 2. The Windows version of Steam should already be able to run in the same Wine prefix. The Windows versions of ATS and ETS2 can be installed and updated via SteamCMD while all running Steam processes will be stopped to prevent Steam from loosing connection. Your Steam password and guard code are required by SteamCMD once for this to work. On Linux it's possible to start TruckersMP through Proton. A working native Steam installation is needed for this which has the desired game installed or with an update pending. SteamCMD can use your saved credentials for convenience. """ store_actions = [] parser = argparse.ArgumentParser( description=desc, epilog=gen_proton_appid_list(), formatter_class=argparse.RawDescriptionHelpFormatter) store_actions.append(parser.add_argument( "-a", "--ats", help="**DEPRECATED** use American Truck Simulator", action="store_true")) store_actions.append(parser.add_argument( "-b", "--beta", metavar="VERSION", help="""set game version to VERSION, useful for downgrading (e.g. "temporary_1_35")""")) store_actions.append(parser.add_argument( "-c", "--configfile", metavar="FILE", default=File.default_configfile, help="""use alternative configuration file [Default: $XDG_CONFIG_HOME/truckersmp-cli/truckersmp-cli.conf]""")) store_actions.append(parser.add_argument( "-d", "--enable-d3d11", help="**DEPRECATED** use Direct3D 11 instead of OpenGL", action="store_true")) store_actions.append(parser.add_argument( "-e", "--ets2", help="""**DEPRECATED** use Euro Truck Simulator 2 [Default if neither ATS or ETS2 are specified] """, action="store_true")) store_actions.append(parser.add_argument( "-g", "--gamedir", metavar="DIR", help="""choose a different directory for the game files [Default: $XDG_DATA_HOME/truckersmp-cli/(Game name)/data]""")) store_actions.append(parser.add_argument( "-i", "--proton-appid", metavar="APPID", type=int, default=AppId.proton[AppId.proton["default"]], help=f"""choose a different AppID for Proton (Needs an update for changes) [Default: {AppId.proton[AppId.proton["default"]]}]""")) store_actions.append(parser.add_argument( "-l", "--logfile", metavar="LOG", default="", help="""write log into LOG, "-vv" option is recommended [Default: Empty string (only stderr)] Note: Messages from Steam/SteamCMD won't be written, only from this script (Game logs are written into "My Documents/{ETS2,ATS}MP/logs/client_*.log")""")) store_actions.append(parser.add_argument( "-m", "--moddir", metavar="DIR", help="""choose a different directory for the mod files [Default: $XDG_DATA_HOME/truckersmp-cli/TruckersMP, Fallback: ./truckersmp]""")) store_actions.append(parser.add_argument( "-n", "--account", metavar="NAME", help="""steam account name to use (This account should own the game and ideally is logged in with saved credentials)""")) store_actions.append(parser.add_argument( "-o", "--protondir", metavar="DIR", default=Dir.default_protondir, help="""choose a different Proton directory [Default: $XDG_DATA_HOME/truckersmp-cli/Proton] While updating any previous version in this folder gets changed to the given (-i) or default Proton version""")) store_actions.append(parser.add_argument( "-p", "--proton", help="""start the game with Proton [Default on Linux if neither Proton or Wine are specified] """, action="store_true")) store_actions.append(parser.add_argument( "-r", "--rendering-backend", choices=("auto", "dx11", "gl"), default="auto", help="""choose a rendering backend [Default: auto (OpenGL is used when "rendering-backend = " is not specified for the game in the configuration file)]""")) store_actions.append(parser.add_argument( "-s", "--start", help="""**DEPRECATED** start the game [Default if neither start or update are specified]""", action="store_true")) store_actions.append(parser.add_argument( "--steamruntimedir", metavar="DIR", default=Dir.default_steamruntimedir, help="""choose a different Steam Runtime directory for Proton 5.13 or newer [Default: $XDG_DATA_HOME/truckersmp-cli/SteamRuntime]""")) store_actions.append(parser.add_argument( "-u", "--update", help="""**DEPRECATED** update the game [Default if neither start or update are specified]""", action="store_true")) store_actions.append(parser.add_argument( "-v", "--verbose", help="verbose output (none:error, once:info, twice or more:debug)", action="count")) store_actions.append(parser.add_argument( "-w", "--wine", help="""start the game with Wine [Default on other systems if neither Proton or Wine are specified]""", action="store_true")) store_actions.append(parser.add_argument( "-x", "--prefixdir", metavar="DIR", help="""choose a different directory for the prefix [Default: $XDG_DATA_HOME/truckersmp-cli/(Game name)/prefix]""")) store_actions.append(parser.add_argument( "--activate-native-d3dcompiler-47", help="""activate native 64-bit d3dcompiler_47.dll when starting (Needed for D3D11 renderer)""", action="store_true")) store_actions.append(parser.add_argument( "--check-windows-steam", help="""check for the Windows Steam version on updating when using Proton""", action="store_true")) store_actions.append(parser.add_argument( "--disable-proton-overlay", help="disable Steam Overlay when using Proton", action="store_true")) store_actions.append(parser.add_argument( "--downgrade", help="""**DEPRECATED** downgrade to the latest version supported by TruckersMP Note: This option implies "--update" option and is ignored if "--beta" ("-b") option is specified""", action="store_true")) store_actions.append(parser.add_argument( "--game-options", metavar="OPTIONS", help="""specify ATS/ETS2 options Note: If specifying one option, use "--game-options=-option" format [Default: "-nointro -64bit"]""")) store_actions.append(parser.add_argument( "--native-steam-dir", metavar="DIR", default="auto", help="""choose native Steam installation, useful only if your Steam directory is not detected automatically [Default: "auto"]""")) store_actions.append(parser.add_argument( "--self-update", help="""update files to the latest release and quit Note: Python package users should use pip instead""", action="store_true")) store_actions.append(parser.add_argument( "--singleplayer", help="""**DEPRECATED** start singleplayer game, useful for save editing, using/testing DXVK in singleplayer, etc.""", action="store_true")) store_actions.append(parser.add_argument( "--skip-update-proton", help="""skip updating already-installed Proton and Steam Runtime when updating game with Proton enabled""", action="store_true")) store_actions.append(parser.add_argument( "--use-wined3d", help="use OpenGL-based D3D11 instead of DXVK when using Proton", action="store_true")) store_actions.append(parser.add_argument( "--wine-desktop", metavar="SIZE", help="""use Wine desktop, work around missing TruckerMP overlay after tabbing out using DXVK, mouse clicking won't work in other GUI apps while the game is running, SIZE must be 'WIDTHxHEIGHT' format (e.g. 1920x1080)""")) store_actions.append(parser.add_argument( "--wine-steam-dir", metavar="DIR", help="""choose a directory for Windows version of Steam [Default: "C:\\Program Files (x86)\\Steam" in the prefix]""")) store_actions.append(parser.add_argument( "--without-steam-runtime", help="don't use Steam Runtime even when using Proton 5.13 or newer", action="store_true")) store_actions.append(parser.add_argument( "--without-wine-discord-ipc-bridge", help="don't use wine-discord-ipc-bridge for Discord Rich Presence", action="store_true")) store_actions.append(parser.add_argument( "--version", help="""print version information and quit""", action="store_true")) group_action_desc = "choose an action" for name, desc in ACTIONS: group_action_desc += f"\n {name:17} : {desc}" group_action = parser.add_argument_group("action", group_action_desc) group_action.add_argument( "action", choices=[act[0] for act in ACTIONS] + ["none", ], default="none", nargs="?") group_game_desc = "choose a game" for name, desc in GAMES: group_game_desc += f"\n {name:6} : {desc}" group_game = parser.add_argument_group("game", group_game_desc) group_game.add_argument( "game", choices=[game[0] for game in GAMES] + ["none", ], default="none", nargs="?") return parser, store_actions
Create an ArgumentParser object. This function returns 2-element tuple: * The 1st element is the new ArgumentParser object (used only in "truckersmp-cli" program) * The 2nd element is a list of _StoreAction objects (used only in "gen_completions" program)
https://github.com/truckersmp-cli/truckersmp-cli/blob/98b0828ca2edca4b5d6cd7788bfb621e00a53882/truckersmp_cli/args.py#L136-L362
import argparse import logging import os import platform import sys from .utils import VDF_IS_AVAILABLE, get_current_steam_user from .variables import AppId, Args, Dir, File ACTIONS = ( ("start", "start game"), ("update", "update/install latest game"), ("downgrade", 'downgrade game (install game from "temporary_X_Y" branch)'), ("updateandstart", '"update" and "start"'), ("ustart", 'same as "updateandstart" ("update" and "start")'), ("downgradeandstart", '"downgrade" and "start"'), ("dstart", 'same as "downgradeandstart" ("downgrade" and "start")'), ) GAMES = ( ("ets2mp", "ETS2 multiplayer"), ("ets2", "ETS2 singleplayer"), ("atsmp", "ATS multiplayer"), ("ats", "ATS singleplayer"), ) def check_args_errors(): if Args.downgrade: Args.update = True if not Args.update and not Args.start: logging.info("--update/--start not specified, doing both.") Args.start = True Args.update = True if Args.ats and Args.ets2: sys.exit("It's only possible to use one game at a time.") elif not Args.ats and not Args.ets2: logging.info("--ats/--ets2 not specified, choosing ETS2.") Args.ets2 = True game = "ats" if Args.ats else "ets2" Args.steamid = str(AppId.game[game]) if not Args.prefixdir: Args.prefixdir = Dir.default_prefixdir[game] if not Args.gamedir: Args.gamedir = Dir.default_gamedir[game] if Args.proton and Args.wine: sys.exit("Start/Update with Proton (-p) or Wine (-w)?") elif not Args.proton and not Args.wine: if platform.system() == "Linux": logging.info("Platform is Linux, using Proton") Args.proton = True else: logging.info("Platform is not Linux, using Wine") Args.wine = True if Args.wine: if Args.prefixdir in ( Dir.default_prefixdir["ats"], Dir.default_prefixdir["ets2"]): logging.debug("""Prefix directory is the default while using Wine, making sure it's the same directory as Proton""") Args.prefixdir = os.path.join(Args.prefixdir, "pfx") Args.check_windows_steam = True if not Args.wine_steam_dir: Args.wine_steam_dir = os.path.join( Args.prefixdir, "" if Args.wine else "pfx", "dosdevices/c:/Program Files (x86)/Steam") if Args.start and not Args.update: if (not os.path.isfile( os.path.join(Args.gamedir, "bin/win_x64/eurotrucks2.exe")) and not os.path.isfile( os.path.join(Args.gamedir, "bin/win_x64/amtrucks.exe"))): sys.exit(f"Game not found in {Args.gamedir}\n" "Need to download (-u) the game?") if Args.update and not Args.account: if VDF_IS_AVAILABLE: Args.account = get_current_steam_user() if not Args.account: logging.info("Unable to find logged in steam user automatically.") sys.exit("Need the steam account name (-n name) to update.") if Args.wine_desktop: split_size = Args.wine_desktop.split("x") if len(split_size) != 2: sys.exit(f'Desktop size ({Args.wine_desktop}) must be "WIDTHxHEIGHT" format') try: if int(split_size[0]) < 1024 or int(split_size[1]) < 768: logging.info( "Desktop size (%s) is too small, setting size to 1024x768.", Args.wine_desktop, ) Args.wine_desktop = "1024x768" except ValueError: sys.exit(f"Invalid desktop width or height ({Args.wine_desktop})") logging.info("AppID/GameID: %s (%s)", Args.steamid, game) logging.info("Game directory: %s", Args.gamedir) logging.info("Prefix: %s", Args.prefixdir) if Args.proton: logging.info("Proton directory: %s", Args.protondir) logging.info("Steam Runtime directory: %s", Args.steamruntimedir)
MIT License
mosdef-hub/foyer
foyer/topology_graph.py
TopologyGraph.from_gmso_topology
python
def from_gmso_topology(cls, gmso_topology): from foyer.utils.io import import_ gmso = import_("gmso") if not isinstance(gmso_topology, gmso.Topology): raise TypeError( f"Expected `openff_topology` to be of type {gmso.Topology}. " f"Got {type(gmso_topology).__name__} instead" ) top_graph = cls() for atom in gmso_topology.sites: if isinstance(atom, gmso.Atom): if atom.name.startswith("_"): top_graph.add_atom( name=atom.name, index=gmso_topology.get_index(atom), atomic_number=None, element=atom.name, ) else: top_graph.add_atom( name=atom.name, index=gmso_topology.get_index(atom), atomic_number=atom.element.atomic_number, element=atom.element.symbol, ) for top_bond in gmso_topology.bonds: atoms_indices = [ gmso_topology.get_index(atom) for atom in top_bond.connection_members ] top_graph.add_bond(atoms_indices[0], atoms_indices[1]) return top_graph
Return a TopologyGraph with relevant attributes from an GMSO topology. Parameters ---------- gmso_topology: gmso.Topology The GMSO Topology Returns ------- TopologyGraph The equivalent TopologyGraph of the openFF Topology `openff_topology`
https://github.com/mosdef-hub/foyer/blob/5679d7068e99832b93b96c306a110d9377359339/foyer/topology_graph.py#L195-L244
import networkx as nx from parmed import Structure from parmed import periodic_table as pt from foyer.exceptions import FoyerError class AtomData: def __init__(self, index, name, atomic_number=None, element=None, **kwargs): self.index = index self.name = name self.atomic_number = atomic_number self.element = element for key, value in kwargs.items(): setattr(self, key, value) class TopologyGraph(nx.Graph): def __init__(self, *args, **kwargs): super(TopologyGraph, self).__init__(*args, **kwargs) def add_atom(self, index, name, atomic_number=None, element=None, **kwargs): if not name.startswith("_") and not (atomic_number and element): raise FoyerError( "For atoms representing an element, please include " "both the atomic_number or element symbol for the atom" ) atom_data = AtomData(index, name, atomic_number, element, **kwargs) self.add_node(index, atom_data=atom_data) def add_bond(self, atom_1_index, atom_2_index): self.add_edge(atom_1_index, atom_2_index) def atoms(self, data=False): if data: for idx, data in self.nodes(data=data): yield idx, data["atom_data"] else: for idx in self.nodes(data=data): yield idx def add_bond_partners(self): for atom_idx, data in self.nodes(data=True): data["bond_partners"] = list(self.neighbors(atom_idx)) @classmethod def from_parmed(cls, structure: Structure): if not isinstance(structure, Structure): raise TypeError( f"Expected `structure` to be of type {Structure}. " f"Got {type(structure).__name__} instead" ) topology_graph = cls() for atom in structure.atoms: if atom.name.startswith("_"): atomic_number = None element = None else: atomic_number = atom.atomic_number element = atom.element_name topology_graph.add_atom( name=atom.name, index=atom.idx, atomic_number=atomic_number, element=element, ) for bond in structure.bonds: topology_graph.add_bond(bond.atom1.idx, bond.atom2.idx) return topology_graph @classmethod def from_openff_topology(cls, openff_topology): from foyer.utils.io import import_ openff_toolkit = import_( "openff.toolkit" ) if not isinstance(openff_topology, openff_toolkit.topology.Topology): raise TypeError( f"Expected `openff_topology` to be of type {openff_toolkit.topology.Topology}. " f"Got {type(openff_topology).__name__} instead" ) top_graph = cls() for top_atom in openff_topology.topology_atoms: atom = top_atom.atom element = pt.Element[atom.atomic_number] top_graph.add_atom( name=atom.name, index=top_atom.topology_atom_index, atomic_number=atom.atomic_number, element=element, ) for top_bond in openff_topology.topology_bonds: atoms_indices = [ atom.topology_atom_index for atom in top_bond.atoms ] top_graph.add_bond(atoms_indices[0], atoms_indices[1]) return top_graph @classmethod
MIT License
openstack/neutron-lib
neutron_lib/api/validators/allowedaddresspairs.py
_validate_allowed_address_pairs
python
def _validate_allowed_address_pairs(address_pairs, valid_values=None): unique_check = {} if not isinstance(address_pairs, list): raise exc.HTTPBadRequest( _("Allowed address pairs must be a list.")) if len(address_pairs) > cfg.CONF.max_allowed_address_pair: raise exceptions.AllowedAddressPairExhausted( quota=cfg.CONF.max_allowed_address_pair) for address_pair in address_pairs: msg = validators.validate_dict(address_pair) if msg: return msg if 'mac_address' in address_pair: msg = validators.validate_mac_address(address_pair['mac_address']) if msg: raise exc.HTTPBadRequest(msg) if 'ip_address' not in address_pair: raise exceptions.AllowedAddressPairsMissingIP() mac = address_pair.get('mac_address') ip_address = address_pair['ip_address'] if (mac, ip_address) not in unique_check: unique_check[(mac, ip_address)] = None else: raise exceptions.DuplicateAddressPairInRequest( mac_address=mac, ip_address=ip_address) invalid_attrs = set(address_pair.keys()) - set(['mac_address', 'ip_address']) if invalid_attrs: msg = (_("Unrecognized attribute(s) '%s'") % ', '.join(set(address_pair.keys()) - set(['mac_address', 'ip_address']))) raise exc.HTTPBadRequest(msg) if '/' in ip_address: msg = validators.validate_subnet(ip_address) else: msg = validators.validate_ip_address(ip_address) if msg: raise exc.HTTPBadRequest(msg)
Validates a list of allowed address pair dicts. Validation herein requires the caller to have registered the max_allowed_address_pair oslo config option in the global CONF prior to having this validator used. :param address_pairs: A list of address pair dicts to validate. :param valid_values: Not used. :returns: None :raises: AllowedAddressPairExhausted if the address pairs requested exceeds cfg.CONF.max_allowed_address_pair. AllowedAddressPairsMissingIP if any address pair dicts are missing and IP address. DuplicateAddressPairInRequest if duplicated IPs are in the list of address pair dicts. Otherwise a HTTPBadRequest is raised if any of the address pairs are invalid.
https://github.com/openstack/neutron-lib/blob/3ac99500361b2be58028cf7385f2ba592eae97ee/neutron_lib/api/validators/allowedaddresspairs.py#L23-L81
from oslo_config import cfg from webob import exc from neutron_lib._i18n import _ from neutron_lib.api import validators from neutron_lib.exceptions import allowedaddresspairs as exceptions
Apache License 2.0
happyleavesaoc/python-snapcast
snapcast/control/server.py
Snapserver.group_mute
python
def group_mute(self, identifier, status): return self._request(GROUP_SETMUTE, identifier, 'mute', status)
Set group mute.
https://github.com/happyleavesaoc/python-snapcast/blob/891ebb6001ad98f668668a77716fdeccb9a8cca1/snapcast/control/server.py#L174-L176
import asyncio import logging from packaging import version from snapcast.control.client import Snapclient from snapcast.control.group import Snapgroup from snapcast.control.protocol import SERVER_ONDISCONNECT, SnapcastProtocol from snapcast.control.stream import Snapstream _LOGGER = logging.getLogger(__name__) CONTROL_PORT = 1705 SERVER_GETSTATUS = 'Server.GetStatus' SERVER_GETRPCVERSION = 'Server.GetRPCVersion' SERVER_DELETECLIENT = 'Server.DeleteClient' SERVER_ONUPDATE = 'Server.OnUpdate' CLIENT_GETSTATUS = 'Client.GetStatus' CLIENT_SETNAME = 'Client.SetName' CLIENT_SETLATENCY = 'Client.SetLatency' CLIENT_SETSTREAM = 'Client.SetStream' CLIENT_SETVOLUME = 'Client.SetVolume' CLIENT_ONCONNECT = 'Client.OnConnect' CLIENT_ONDISCONNECT = 'Client.OnDisconnect' CLIENT_ONVOLUMECHANGED = 'Client.OnVolumeChanged' CLIENT_ONLATENCYCHANGED = 'Client.OnLatencyChanged' CLIENT_ONNAMECHANGED = 'Client.OnNameChanged' GROUP_GETSTATUS = 'Group.GetStatus' GROUP_SETMUTE = 'Group.SetMute' GROUP_SETSTREAM = 'Group.SetStream' GROUP_SETCLIENTS = 'Group.SetClients' GROUP_SETNAME = 'Group.SetName' GROUP_ONMUTE = 'Group.OnMute' GROUP_ONSTREAMCHANGED = 'Group.OnStreamChanged' STREAM_SETMETA = 'Stream.SetMeta' STREAM_ONUPDATE = 'Stream.OnUpdate' STREAM_ONMETA = 'Stream.OnMetadata' SERVER_RECONNECT_DELAY = 5 _EVENTS = [SERVER_ONUPDATE, CLIENT_ONVOLUMECHANGED, CLIENT_ONLATENCYCHANGED, CLIENT_ONNAMECHANGED, CLIENT_ONCONNECT, CLIENT_ONDISCONNECT, GROUP_ONMUTE, GROUP_ONSTREAMCHANGED, STREAM_ONUPDATE, STREAM_ONMETA] _METHODS = [SERVER_GETSTATUS, SERVER_GETRPCVERSION, SERVER_DELETECLIENT, SERVER_DELETECLIENT, CLIENT_GETSTATUS, CLIENT_SETNAME, CLIENT_SETLATENCY, CLIENT_SETSTREAM, CLIENT_SETVOLUME, GROUP_GETSTATUS, GROUP_SETMUTE, GROUP_SETSTREAM, GROUP_SETCLIENTS, GROUP_SETNAME, STREAM_SETMETA] _VERSIONS = { GROUP_SETNAME: '0.16.0', } class ServerVersionError(NotImplementedError): pass class Snapserver(object): def __init__(self, loop, host, port=CONTROL_PORT, reconnect=False): self._loop = loop self._port = port self._reconnect = reconnect self._clients = {} self._streams = {} self._groups = {} self._host = host self._version = None self._protocol = None self._callbacks = { CLIENT_ONCONNECT: self._on_client_connect, CLIENT_ONDISCONNECT: self._on_client_disconnect, CLIENT_ONVOLUMECHANGED: self._on_client_volume_changed, CLIENT_ONNAMECHANGED: self._on_client_name_changed, CLIENT_ONLATENCYCHANGED: self._on_client_latency_changed, GROUP_ONMUTE: self._on_group_mute, GROUP_ONSTREAMCHANGED: self._on_group_stream_changed, STREAM_ONMETA: self._on_stream_meta, STREAM_ONUPDATE: self._on_stream_update, SERVER_ONDISCONNECT: self._on_server_disconnect, SERVER_ONUPDATE: self._on_server_update } self._on_update_callback_func = None self._on_connect_callback_func = None self._on_disconnect_callback_func = None self._new_client_callback_func = None @asyncio.coroutine def start(self): yield from self._do_connect() _LOGGER.info('connected to snapserver on %s:%s', self._host, self._port) status = yield from self.status() self.synchronize(status) self._on_server_connect() @asyncio.coroutine def _do_connect(self): _, self._protocol = yield from self._loop.create_connection( lambda: SnapcastProtocol(self._callbacks), self._host, self._port) def _reconnect_cb(self): @asyncio.coroutine def try_reconnect(): try: yield from self._do_connect() except IOError: self._loop.call_later(SERVER_RECONNECT_DELAY, self._reconnect_cb) asyncio.ensure_future(try_reconnect()) @asyncio.coroutine def _transact(self, method, params=None): result = yield from self._protocol.request(method, params) return result @property def version(self): return self._version @asyncio.coroutine def status(self): result = yield from self._transact(SERVER_GETSTATUS) return result def rpc_version(self): return self._transact(SERVER_GETRPCVERSION) @asyncio.coroutine def delete_client(self, identifier): params = {'id': identifier} response = yield from self._transact(SERVER_DELETECLIENT, params) self.synchronize(response) def client_name(self, identifier, name): return self._request(CLIENT_SETNAME, identifier, 'name', name) def client_latency(self, identifier, latency): return self._request(CLIENT_SETLATENCY, identifier, 'latency', latency) def client_volume(self, identifier, volume): return self._request(CLIENT_SETVOLUME, identifier, 'volume', volume) def client_status(self, identifier): return self._request(CLIENT_GETSTATUS, identifier, 'client') def group_status(self, identifier): return self._request(GROUP_GETSTATUS, identifier, 'group')
MIT License
airshipit/drydock
python/drydock_provisioner/drivers/node/maasdriver/models/interface.py
Interface.responds_to_ip
python
def responds_to_ip(self, ip_address): for link in getattr(self, 'links', []): if link.get('ip_address', None) == ip_address: return True return False
Check if this interface will respond to connections for an IP. :param ip_address: string of the IP address we are checking :return: true if this interface should respond to the IP, false otherwise
https://github.com/airshipit/drydock/blob/c90fa60e2a156953563ea9fd9d9aa848171c8325/python/drydock_provisioner/drivers/node/maasdriver/models/interface.py#L225-L236
import logging import drydock_provisioner.drivers.node.maasdriver.models.base as model_base import drydock_provisioner.drivers.node.maasdriver.models.fabric as maas_fabric import drydock_provisioner.drivers.node.maasdriver.models.subnet as maas_subnet import drydock_provisioner.drivers.node.maasdriver.models.vlan as maas_vlan import drydock_provisioner.error as errors class Interface(model_base.ResourceBase): resource_url = 'nodes/{system_id}/interfaces/{resource_id}/' fields = [ 'resource_id', 'system_id', 'name', 'type', 'mac_address', 'vlan', 'links', 'effective_mtu', 'fabric_id', 'mtu', 'parents', ] json_fields = [ 'name', 'type', 'mac_address', 'vlan', 'mtu', ] def __init__(self, api_client, **kwargs): super(Interface, self).__init__(api_client, **kwargs) self.logger = logging.getLogger('drydock.nodedriver.maasdriver') def attach_fabric(self, fabric_id=None, fabric_name=None): fabric = None fabrics = maas_fabric.Fabrics(self.api_client) fabrics.refresh() if fabric_id is not None: fabric = fabrics.select(fabric_id) elif fabric_name is not None: fabric = fabrics.singleton({'name': fabric_name}) else: self.logger.warning("Must specify fabric_id or fabric_name") raise ValueError("Must specify fabric_id or fabric_name") if fabric is None: self.logger.warning( "Fabric not found in MaaS for fabric_id %s, fabric_name %s" % (fabric_id, fabric_name)) raise errors.DriverError( "Fabric not found in MaaS for fabric_id %s, fabric_name %s" % (fabric_id, fabric_name)) fabric_vlan = fabric.vlans.singleton({'vid': 0}) if fabric_vlan is None: self.logger.warning( "Cannot locate untagged VLAN on fabric %s" % (fabric_id)) raise errors.DriverError( "Cannot locate untagged VLAN on fabric %s" % (fabric_id)) self.vlan = fabric_vlan.resource_id self.logger.info( "Attaching interface %s on system %s to VLAN %s on fabric %s" % (self.resource_id, self.system_id, fabric_vlan.resource_id, fabric.resource_id)) self.update() def is_linked(self, subnet_id): for link in self.links: if link.get('subnet_id', None) == subnet_id: return True return False def disconnect(self): url = self.interpolate_url() self.logger.debug( "Disconnecting interface %s from networks." % (self.name)) resp = self.api_client.post(url, op='disconnect') if not resp.ok: self.logger.warning( "Could not disconnect interface, MaaS error: %s - %s" % (resp.status_code, resp.text)) raise errors.DriverError( "Could not disconnect interface, MaaS error: %s - %s" % (resp.status_code, resp.text)) def unlink_subnet(self, subnet_id): for link in self.links: if link.get('subnet_id', None) == subnet_id: url = self.interpolate_url() resp = self.api_client.post( url, op='unlink_subnet', files={'id': link.get('resource_id')}) if not resp.ok: raise errors.DriverError("Error unlinking subnet") else: return raise errors.DriverError( "Error unlinking interface, Link to subnet_id %s not found." % subnet_id) def link_subnet(self, subnet_id=None, subnet_cidr=None, ip_address=None, primary=False): subnet = None subnets = maas_subnet.Subnets(self.api_client) subnets.refresh() if subnet_id is not None: subnet = subnets.select(subnet_id) elif subnet_cidr is not None: subnet = subnets.singleton({'cidr': subnet_cidr}) else: self.logger.warning("Must specify subnet_id or subnet_cidr") raise ValueError("Must specify subnet_id or subnet_cidr") if subnet is None: self.logger.warning( "Subnet not found in MaaS for subnet_id %s, subnet_cidr %s" % (subnet_id, subnet_cidr)) raise errors.DriverError( "Subnet not found in MaaS for subnet_id %s, subnet_cidr %s" % (subnet_id, subnet_cidr)) url = self.interpolate_url() if self.is_linked(subnet.resource_id): self.logger.info( "Interface %s already linked to subnet %s, unlinking." % (self.resource_id, subnet.resource_id)) self.unlink_subnet(subnet.resource_id) options = { 'subnet': subnet.resource_id, 'default_gateway': primary, } if ip_address == 'dhcp': options['mode'] = 'dhcp' elif ip_address is not None: options['ip_address'] = ip_address options['mode'] = 'static' else: options['mode'] = 'link_up' self.logger.debug( "Linking interface %s to subnet: subnet=%s, mode=%s, address=%s, primary=%s" % (self.resource_id, subnet.resource_id, options['mode'], ip_address, primary)) resp = self.api_client.post(url, op='link_subnet', files=options) if not resp.ok: self.logger.error( "Error linking interface %s to subnet %s - MaaS response %s: %s" % (self.resouce_id, subnet.resource_id, resp.status_code, resp.text)) raise errors.DriverError( "Error linking interface %s to subnet %s - MaaS response %s" % (self.resouce_id, subnet.resource_id, resp.status_code)) self.refresh() return
Apache License 2.0
iexcloud/pyex
pyEX/studies/technicals/overlap.py
wma
python
def wma(client, symbol, timeframe="6m", col="close", periods=None): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["wma-{}".format(per)] = t.WMA(df[col].values.astype(float), per) return pd.DataFrame(build)
This will return a dataframe of weighted moving average for the given symbol across the given timeframe Args: client (pyEX.Client); Client symbol (string); Ticker timeframe (string); timeframe to use, for pyEX.chart col (string); column to use to calculate periods (int); periods Returns: DataFrame: result
https://github.com/iexcloud/pyex/blob/48223a046d120703e8cc8f6c57f8a1450ee3f835/pyEX/studies/technicals/overlap.py#L434-L457
import pandas as pd import talib as t from ..utils import tolist def bollinger(client, symbol, timeframe="6m", col="close", period=2): df = client.chartDF(symbol, timeframe) bb = t.BBANDS(df[col].values.astype(float), period) return pd.DataFrame( {col: df[col].values, "upper": bb[0], "middle": bb[1], "lower": bb[2]} ) def dema(client, symbol, timeframe="6m", col="close", periods=None): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["ema-{}".format(per)] = t.DEMA(df[col].values.astype(float), per) return pd.DataFrame(build) def ema(client, symbol, timeframe="6m", col="close", periods=None): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["ema-{}".format(per)] = t.EMA(df[col].values.astype(float), per) return pd.DataFrame(build) def ht_trendline(client, symbol, timeframe="6m", col="close"): df = client.chartDF(symbol, timeframe) build = {col: df[col].values} build["ht-{}".format(col)] = t.HT_TRENDLINE(df[col].values.astype(float)) return pd.DataFrame(build) def kama(client, symbol, timeframe="6m", col="close", period=30): df = client.chartDF(symbol, timeframe) build = {col: df[col].values} build["kama-{}".format(col)] = t.KAMA(df[col].values.astype(float), period) return pd.DataFrame(build) def mama(client, symbol, timeframe="6m", col="close", fastlimit=0, slowlimit=0): df = client.chartDF(symbol, timeframe) build = {col: df[col].values} build["mama-{}".format(col)], build["fama-{}".format(col)] = t.MAMA( df[col].values.astype(float), fastlimit=fastlimit, slowlimit=slowlimit ) return pd.DataFrame(build) def mavp( client, symbol, timeframe="6m", col="close", periods=None, minperiod=2, maxperiod=30, matype=0, ): df = client.chartDF(symbol, timeframe) if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["mavp-{}".format(per)] = t.MAVP( df[col].values.astype(float), per, minperiod=minperiod, maxperiod=maxperiod, matype=matype, ) return pd.DataFrame(build) def midpoint(client, symbol, timeframe="6m", col="close", period=14): df = client.chartDF(symbol, timeframe) build = {col: df[col].values} build["kama-{}".format(col)] = t.MIDPOINT(df[col].values.astype(float), period) return pd.DataFrame(build) def midpice(client, symbol, timeframe="6m", col="close", period=14): df = client.chartDF(symbol, timeframe) build = {col: df[col].values} build["kama-{}".format(col)] = t.MIDPRICE(df[col].values.astype(float), period) return pd.DataFrame(build) def sar( client, symbol, timeframe="6m", highcol="high", lowcol="low", acceleration=0, maximum=0, ): df = client.chartDF(symbol, timeframe) sar = t.SAR( df[highcol].values.astype(float), df[lowcol].values.astype(float), acceleration=acceleration, maximum=maximum, ) return pd.DataFrame( {highcol: df[highcol].values, lowcol: df[lowcol].values, "sar": sar} ) def sarext( client, symbol, timeframe="6m", highcol="high", lowcol="low", startvalue=0, offsetonreverse=0, accelerationinitlong=0, accelerationlong=0, accelerationmaxlong=0, accelerationinitshort=0, accelerationshort=0, accelerationmaxshort=0, ): df = client.chartDF(symbol, timeframe) sar = t.SAREXT( df[highcol].values.astype(float), df[lowcol].values.astype(float), startvalue=startvalue, offsetonreverse=offsetonreverse, accelerationinitlong=accelerationinitlong, accelerationlong=accelerationlong, accelerationmaxlong=accelerationmaxlong, accelerationinitshort=accelerationinitshort, accelerationshort=accelerationshort, accelerationmaxshort=accelerationmaxshort, ) return pd.DataFrame( {highcol: df[highcol].values, lowcol: df[lowcol].values, "sar": sar} ) def sma(client, symbol, timeframe="6m", col="close", periods=None): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["sma-{}".format(per)] = t.EMA(df[col].values.astype(float), per) return pd.DataFrame(build) def t3(client, symbol, timeframe="6m", col="close", periods=None, vfactor=0): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["t3-{}".format(per)] = t.T3( df[col].values.astype(float), per, vfactor=vfactor ) return pd.DataFrame(build) def tema(client, symbol, timeframe="6m", col="close", periods=None): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["sma-{}".format(per)] = t.TEMA(df[col].values.astype(float), per) return pd.DataFrame(build) def trima(client, symbol, timeframe="6m", col="close", periods=None): if periods is None: periods = [30] periods = tolist(periods) df = client.chartDF(symbol, timeframe) build = {col: df[col].values} for per in periods: build["trima-{}".format(per)] = t.TRIMA(df[col].values.astype(float), per) return pd.DataFrame(build)
Apache License 2.0
netflix-skunkworks/bucketsnake
bucket_snake/s3/permissions.py
collect_policies
python
def collect_policies(buckets_dict): account_policies = {} for bucket, details in buckets_dict.items(): policy = account_policies.get(details["account_number"], { "list": set(), "get": set(), "put": set(), "delete": set() }) for prefix_perms in details["permissions"]: for perm in prefix_perms["perms"]: bucket_arn = "arn:aws:s3:::{bucket}".format(bucket=bucket) if perm == "list": policy["list"].add(bucket_arn) else: prefix_arn = "{bucket_arn}/{prefix}".format(bucket_arn=bucket_arn, prefix=prefix_perms["prefix"]) policy[perm].add(prefix_arn) account_policies[details["account_number"]] = policy return account_policies
This creates the mapping of AWS S3 IAM permissions for a given AWS account (for where the bucket resides) :param buckets_dict: :return:
https://github.com/netflix-skunkworks/bucketsnake/blob/75438be05d3ed77d9795c135c7a6817bc7c6a8a2/bucket_snake/s3/permissions.py#L73-L104
from bucket_snake.s3.models import BUCKET_TABLE from bucket_snake.config import CONFIG S3_PERMISSIONS = { "list": [ "s3:ListBucket", "s3:ListBucketVersions" ], "get": [ "s3:GetObject", "s3:GetObjectTagging", "s3:GetObjectVersion", "s3:GetObjectVersionTagging", "s3:GetObjectAcl", "s3:GetObjectVersionAcl" ], "put": [ "s3:PutObject", "s3:PutObjectTagging", "s3:PutObjectVersionTagging", "s3:ListMultipartUploadParts*", "s3:AbortMultipartUpload", "s3:RestoreObject" ], "delete": [ "s3:DeleteObject", "s3:DeleteObjectTagging", "s3:DeleteObjectVersion", "s3:DeleteObjectVersionTagging" ] } def check_if_cross_account(source_account_number, bucket): if BUCKET_TABLE.buckets[bucket] == source_account_number: return False return True def build_bucket_account_mapping(request_data): buckets_same_account = {} buckets_cross_account = {} for bucket, permissions in request_data["buckets"].items(): if check_if_cross_account(request_data["account_number"], bucket): buckets_cross_account[bucket] = dict(permissions=permissions, account_number=BUCKET_TABLE.buckets[bucket]) else: buckets_same_account[bucket] = dict(permissions=permissions, account_number=request_data["account_number"]) return buckets_same_account, buckets_cross_account
Apache License 2.0
docusign/docusign-python-client
docusign_esign/models/notary_recipient.py
NotaryRecipient.email
python
def email(self): return self._email
Gets the email of this NotaryRecipient. # noqa: E501 # noqa: E501 :return: The email of this NotaryRecipient. # noqa: E501 :rtype: str
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/notary_recipient.py#L979-L987
import pprint import re import six from docusign_esign.client.configuration import Configuration class NotaryRecipient(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'access_code': 'str', 'access_code_metadata': 'PropertyMetadata', 'add_access_code_to_email': 'str', 'additional_notifications': 'list[RecipientAdditionalNotification]', 'agent_can_edit_email': 'str', 'agent_can_edit_name': 'str', 'allow_system_override_for_locked_recipient': 'str', 'auto_navigation': 'str', 'auto_responded_reason': 'str', 'bulk_recipients_uri': 'str', 'can_sign_offline': 'str', 'client_user_id': 'str', 'completed_count': 'str', 'creation_reason': 'str', 'custom_fields': 'list[str]', 'declined_date_time': 'str', 'declined_reason': 'str', 'default_recipient': 'str', 'delivered_date_time': 'str', 'delivery_method': 'str', 'delivery_method_metadata': 'PropertyMetadata', 'designator_id': 'str', 'designator_id_guid': 'str', 'document_visibility': 'list[DocumentVisibility]', 'email': 'str', 'email_metadata': 'PropertyMetadata', 'email_notification': 'RecipientEmailNotification', 'embedded_recipient_start_url': 'str', 'error_details': 'ErrorDetails', 'excluded_documents': 'list[str]', 'fax_number': 'str', 'fax_number_metadata': 'PropertyMetadata', 'first_name': 'str', 'first_name_metadata': 'PropertyMetadata', 'full_name': 'str', 'full_name_metadata': 'PropertyMetadata', 'id_check_configuration_name': 'str', 'id_check_configuration_name_metadata': 'PropertyMetadata', 'id_check_information_input': 'IdCheckInformationInput', 'identity_verification': 'RecipientIdentityVerification', 'inherit_email_notification_configuration': 'str', 'is_bulk_recipient': 'str', 'is_bulk_recipient_metadata': 'PropertyMetadata', 'last_name': 'str', 'last_name_metadata': 'PropertyMetadata', 'live_oak_start_url': 'str', 'locked_recipient_phone_auth_editable': 'str', 'locked_recipient_sms_editable': 'str', 'name': 'str', 'name_metadata': 'PropertyMetadata', 'notary_id': 'str', 'notary_signers': 'list[str]', 'notary_type': 'str', 'note': 'str', 'note_metadata': 'PropertyMetadata', 'offline_attributes': 'OfflineAttributes', 'phone_authentication': 'RecipientPhoneAuthentication', 'phone_number': 'RecipientPhoneNumber', 'proof_file': 'RecipientProofFile', 'recipient_attachments': 'list[RecipientAttachment]', 'recipient_authentication_status': 'AuthenticationStatus', 'recipient_feature_metadata': 'list[FeatureAvailableMetadata]', 'recipient_id': 'str', 'recipient_id_guid': 'str', 'recipient_signature_providers': 'list[RecipientSignatureProvider]', 'recipient_supplies_tabs': 'str', 'recipient_type': 'str', 'recipient_type_metadata': 'PropertyMetadata', 'require_id_lookup': 'str', 'require_id_lookup_metadata': 'PropertyMetadata', 'require_signer_certificate': 'str', 'require_sign_on_paper': 'str', 'require_upload_signature': 'str', 'role_name': 'str', 'routing_order': 'str', 'routing_order_metadata': 'PropertyMetadata', 'sent_date_time': 'str', 'signature_info': 'RecipientSignatureInformation', 'signed_date_time': 'str', 'sign_in_each_location': 'str', 'sign_in_each_location_metadata': 'PropertyMetadata', 'signing_group_id': 'str', 'signing_group_id_metadata': 'PropertyMetadata', 'signing_group_name': 'str', 'signing_group_users': 'list[UserInfo]', 'sms_authentication': 'RecipientSMSAuthentication', 'social_authentications': 'list[SocialAuthentication]', 'status': 'str', 'status_code': 'str', 'suppress_emails': 'str', 'tabs': 'Tabs', 'template_locked': 'str', 'template_required': 'str', 'total_tab_count': 'str', 'user_id': 'str' } attribute_map = { 'access_code': 'accessCode', 'access_code_metadata': 'accessCodeMetadata', 'add_access_code_to_email': 'addAccessCodeToEmail', 'additional_notifications': 'additionalNotifications', 'agent_can_edit_email': 'agentCanEditEmail', 'agent_can_edit_name': 'agentCanEditName', 'allow_system_override_for_locked_recipient': 'allowSystemOverrideForLockedRecipient', 'auto_navigation': 'autoNavigation', 'auto_responded_reason': 'autoRespondedReason', 'bulk_recipients_uri': 'bulkRecipientsUri', 'can_sign_offline': 'canSignOffline', 'client_user_id': 'clientUserId', 'completed_count': 'completedCount', 'creation_reason': 'creationReason', 'custom_fields': 'customFields', 'declined_date_time': 'declinedDateTime', 'declined_reason': 'declinedReason', 'default_recipient': 'defaultRecipient', 'delivered_date_time': 'deliveredDateTime', 'delivery_method': 'deliveryMethod', 'delivery_method_metadata': 'deliveryMethodMetadata', 'designator_id': 'designatorId', 'designator_id_guid': 'designatorIdGuid', 'document_visibility': 'documentVisibility', 'email': 'email', 'email_metadata': 'emailMetadata', 'email_notification': 'emailNotification', 'embedded_recipient_start_url': 'embeddedRecipientStartURL', 'error_details': 'errorDetails', 'excluded_documents': 'excludedDocuments', 'fax_number': 'faxNumber', 'fax_number_metadata': 'faxNumberMetadata', 'first_name': 'firstName', 'first_name_metadata': 'firstNameMetadata', 'full_name': 'fullName', 'full_name_metadata': 'fullNameMetadata', 'id_check_configuration_name': 'idCheckConfigurationName', 'id_check_configuration_name_metadata': 'idCheckConfigurationNameMetadata', 'id_check_information_input': 'idCheckInformationInput', 'identity_verification': 'identityVerification', 'inherit_email_notification_configuration': 'inheritEmailNotificationConfiguration', 'is_bulk_recipient': 'isBulkRecipient', 'is_bulk_recipient_metadata': 'isBulkRecipientMetadata', 'last_name': 'lastName', 'last_name_metadata': 'lastNameMetadata', 'live_oak_start_url': 'liveOakStartURL', 'locked_recipient_phone_auth_editable': 'lockedRecipientPhoneAuthEditable', 'locked_recipient_sms_editable': 'lockedRecipientSmsEditable', 'name': 'name', 'name_metadata': 'nameMetadata', 'notary_id': 'notaryId', 'notary_signers': 'notarySigners', 'notary_type': 'notaryType', 'note': 'note', 'note_metadata': 'noteMetadata', 'offline_attributes': 'offlineAttributes', 'phone_authentication': 'phoneAuthentication', 'phone_number': 'phoneNumber', 'proof_file': 'proofFile', 'recipient_attachments': 'recipientAttachments', 'recipient_authentication_status': 'recipientAuthenticationStatus', 'recipient_feature_metadata': 'recipientFeatureMetadata', 'recipient_id': 'recipientId', 'recipient_id_guid': 'recipientIdGuid', 'recipient_signature_providers': 'recipientSignatureProviders', 'recipient_supplies_tabs': 'recipientSuppliesTabs', 'recipient_type': 'recipientType', 'recipient_type_metadata': 'recipientTypeMetadata', 'require_id_lookup': 'requireIdLookup', 'require_id_lookup_metadata': 'requireIdLookupMetadata', 'require_signer_certificate': 'requireSignerCertificate', 'require_sign_on_paper': 'requireSignOnPaper', 'require_upload_signature': 'requireUploadSignature', 'role_name': 'roleName', 'routing_order': 'routingOrder', 'routing_order_metadata': 'routingOrderMetadata', 'sent_date_time': 'sentDateTime', 'signature_info': 'signatureInfo', 'signed_date_time': 'signedDateTime', 'sign_in_each_location': 'signInEachLocation', 'sign_in_each_location_metadata': 'signInEachLocationMetadata', 'signing_group_id': 'signingGroupId', 'signing_group_id_metadata': 'signingGroupIdMetadata', 'signing_group_name': 'signingGroupName', 'signing_group_users': 'signingGroupUsers', 'sms_authentication': 'smsAuthentication', 'social_authentications': 'socialAuthentications', 'status': 'status', 'status_code': 'statusCode', 'suppress_emails': 'suppressEmails', 'tabs': 'tabs', 'template_locked': 'templateLocked', 'template_required': 'templateRequired', 'total_tab_count': 'totalTabCount', 'user_id': 'userId' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._access_code = None self._access_code_metadata = None self._add_access_code_to_email = None self._additional_notifications = None self._agent_can_edit_email = None self._agent_can_edit_name = None self._allow_system_override_for_locked_recipient = None self._auto_navigation = None self._auto_responded_reason = None self._bulk_recipients_uri = None self._can_sign_offline = None self._client_user_id = None self._completed_count = None self._creation_reason = None self._custom_fields = None self._declined_date_time = None self._declined_reason = None self._default_recipient = None self._delivered_date_time = None self._delivery_method = None self._delivery_method_metadata = None self._designator_id = None self._designator_id_guid = None self._document_visibility = None self._email = None self._email_metadata = None self._email_notification = None self._embedded_recipient_start_url = None self._error_details = None self._excluded_documents = None self._fax_number = None self._fax_number_metadata = None self._first_name = None self._first_name_metadata = None self._full_name = None self._full_name_metadata = None self._id_check_configuration_name = None self._id_check_configuration_name_metadata = None self._id_check_information_input = None self._identity_verification = None self._inherit_email_notification_configuration = None self._is_bulk_recipient = None self._is_bulk_recipient_metadata = None self._last_name = None self._last_name_metadata = None self._live_oak_start_url = None self._locked_recipient_phone_auth_editable = None self._locked_recipient_sms_editable = None self._name = None self._name_metadata = None self._notary_id = None self._notary_signers = None self._notary_type = None self._note = None self._note_metadata = None self._offline_attributes = None self._phone_authentication = None self._phone_number = None self._proof_file = None self._recipient_attachments = None self._recipient_authentication_status = None self._recipient_feature_metadata = None self._recipient_id = None self._recipient_id_guid = None self._recipient_signature_providers = None self._recipient_supplies_tabs = None self._recipient_type = None self._recipient_type_metadata = None self._require_id_lookup = None self._require_id_lookup_metadata = None self._require_signer_certificate = None self._require_sign_on_paper = None self._require_upload_signature = None self._role_name = None self._routing_order = None self._routing_order_metadata = None self._sent_date_time = None self._signature_info = None self._signed_date_time = None self._sign_in_each_location = None self._sign_in_each_location_metadata = None self._signing_group_id = None self._signing_group_id_metadata = None self._signing_group_name = None self._signing_group_users = None self._sms_authentication = None self._social_authentications = None self._status = None self._status_code = None self._suppress_emails = None self._tabs = None self._template_locked = None self._template_required = None self._total_tab_count = None self._user_id = None self.discriminator = None setattr(self, "_{}".format('access_code'), kwargs.get('access_code', None)) setattr(self, "_{}".format('access_code_metadata'), kwargs.get('access_code_metadata', None)) setattr(self, "_{}".format('add_access_code_to_email'), kwargs.get('add_access_code_to_email', None)) setattr(self, "_{}".format('additional_notifications'), kwargs.get('additional_notifications', None)) setattr(self, "_{}".format('agent_can_edit_email'), kwargs.get('agent_can_edit_email', None)) setattr(self, "_{}".format('agent_can_edit_name'), kwargs.get('agent_can_edit_name', None)) setattr(self, "_{}".format('allow_system_override_for_locked_recipient'), kwargs.get('allow_system_override_for_locked_recipient', None)) setattr(self, "_{}".format('auto_navigation'), kwargs.get('auto_navigation', None)) setattr(self, "_{}".format('auto_responded_reason'), kwargs.get('auto_responded_reason', None)) setattr(self, "_{}".format('bulk_recipients_uri'), kwargs.get('bulk_recipients_uri', None)) setattr(self, "_{}".format('can_sign_offline'), kwargs.get('can_sign_offline', None)) setattr(self, "_{}".format('client_user_id'), kwargs.get('client_user_id', None)) setattr(self, "_{}".format('completed_count'), kwargs.get('completed_count', None)) setattr(self, "_{}".format('creation_reason'), kwargs.get('creation_reason', None)) setattr(self, "_{}".format('custom_fields'), kwargs.get('custom_fields', None)) setattr(self, "_{}".format('declined_date_time'), kwargs.get('declined_date_time', None)) setattr(self, "_{}".format('declined_reason'), kwargs.get('declined_reason', None)) setattr(self, "_{}".format('default_recipient'), kwargs.get('default_recipient', None)) setattr(self, "_{}".format('delivered_date_time'), kwargs.get('delivered_date_time', None)) setattr(self, "_{}".format('delivery_method'), kwargs.get('delivery_method', None)) setattr(self, "_{}".format('delivery_method_metadata'), kwargs.get('delivery_method_metadata', None)) setattr(self, "_{}".format('designator_id'), kwargs.get('designator_id', None)) setattr(self, "_{}".format('designator_id_guid'), kwargs.get('designator_id_guid', None)) setattr(self, "_{}".format('document_visibility'), kwargs.get('document_visibility', None)) setattr(self, "_{}".format('email'), kwargs.get('email', None)) setattr(self, "_{}".format('email_metadata'), kwargs.get('email_metadata', None)) setattr(self, "_{}".format('email_notification'), kwargs.get('email_notification', None)) setattr(self, "_{}".format('embedded_recipient_start_url'), kwargs.get('embedded_recipient_start_url', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('excluded_documents'), kwargs.get('excluded_documents', None)) setattr(self, "_{}".format('fax_number'), kwargs.get('fax_number', None)) setattr(self, "_{}".format('fax_number_metadata'), kwargs.get('fax_number_metadata', None)) setattr(self, "_{}".format('first_name'), kwargs.get('first_name', None)) setattr(self, "_{}".format('first_name_metadata'), kwargs.get('first_name_metadata', None)) setattr(self, "_{}".format('full_name'), kwargs.get('full_name', None)) setattr(self, "_{}".format('full_name_metadata'), kwargs.get('full_name_metadata', None)) setattr(self, "_{}".format('id_check_configuration_name'), kwargs.get('id_check_configuration_name', None)) setattr(self, "_{}".format('id_check_configuration_name_metadata'), kwargs.get('id_check_configuration_name_metadata', None)) setattr(self, "_{}".format('id_check_information_input'), kwargs.get('id_check_information_input', None)) setattr(self, "_{}".format('identity_verification'), kwargs.get('identity_verification', None)) setattr(self, "_{}".format('inherit_email_notification_configuration'), kwargs.get('inherit_email_notification_configuration', None)) setattr(self, "_{}".format('is_bulk_recipient'), kwargs.get('is_bulk_recipient', None)) setattr(self, "_{}".format('is_bulk_recipient_metadata'), kwargs.get('is_bulk_recipient_metadata', None)) setattr(self, "_{}".format('last_name'), kwargs.get('last_name', None)) setattr(self, "_{}".format('last_name_metadata'), kwargs.get('last_name_metadata', None)) setattr(self, "_{}".format('live_oak_start_url'), kwargs.get('live_oak_start_url', None)) setattr(self, "_{}".format('locked_recipient_phone_auth_editable'), kwargs.get('locked_recipient_phone_auth_editable', None)) setattr(self, "_{}".format('locked_recipient_sms_editable'), kwargs.get('locked_recipient_sms_editable', None)) setattr(self, "_{}".format('name'), kwargs.get('name', None)) setattr(self, "_{}".format('name_metadata'), kwargs.get('name_metadata', None)) setattr(self, "_{}".format('notary_id'), kwargs.get('notary_id', None)) setattr(self, "_{}".format('notary_signers'), kwargs.get('notary_signers', None)) setattr(self, "_{}".format('notary_type'), kwargs.get('notary_type', None)) setattr(self, "_{}".format('note'), kwargs.get('note', None)) setattr(self, "_{}".format('note_metadata'), kwargs.get('note_metadata', None)) setattr(self, "_{}".format('offline_attributes'), kwargs.get('offline_attributes', None)) setattr(self, "_{}".format('phone_authentication'), kwargs.get('phone_authentication', None)) setattr(self, "_{}".format('phone_number'), kwargs.get('phone_number', None)) setattr(self, "_{}".format('proof_file'), kwargs.get('proof_file', None)) setattr(self, "_{}".format('recipient_attachments'), kwargs.get('recipient_attachments', None)) setattr(self, "_{}".format('recipient_authentication_status'), kwargs.get('recipient_authentication_status', None)) setattr(self, "_{}".format('recipient_feature_metadata'), kwargs.get('recipient_feature_metadata', None)) setattr(self, "_{}".format('recipient_id'), kwargs.get('recipient_id', None)) setattr(self, "_{}".format('recipient_id_guid'), kwargs.get('recipient_id_guid', None)) setattr(self, "_{}".format('recipient_signature_providers'), kwargs.get('recipient_signature_providers', None)) setattr(self, "_{}".format('recipient_supplies_tabs'), kwargs.get('recipient_supplies_tabs', None)) setattr(self, "_{}".format('recipient_type'), kwargs.get('recipient_type', None)) setattr(self, "_{}".format('recipient_type_metadata'), kwargs.get('recipient_type_metadata', None)) setattr(self, "_{}".format('require_id_lookup'), kwargs.get('require_id_lookup', None)) setattr(self, "_{}".format('require_id_lookup_metadata'), kwargs.get('require_id_lookup_metadata', None)) setattr(self, "_{}".format('require_signer_certificate'), kwargs.get('require_signer_certificate', None)) setattr(self, "_{}".format('require_sign_on_paper'), kwargs.get('require_sign_on_paper', None)) setattr(self, "_{}".format('require_upload_signature'), kwargs.get('require_upload_signature', None)) setattr(self, "_{}".format('role_name'), kwargs.get('role_name', None)) setattr(self, "_{}".format('routing_order'), kwargs.get('routing_order', None)) setattr(self, "_{}".format('routing_order_metadata'), kwargs.get('routing_order_metadata', None)) setattr(self, "_{}".format('sent_date_time'), kwargs.get('sent_date_time', None)) setattr(self, "_{}".format('signature_info'), kwargs.get('signature_info', None)) setattr(self, "_{}".format('signed_date_time'), kwargs.get('signed_date_time', None)) setattr(self, "_{}".format('sign_in_each_location'), kwargs.get('sign_in_each_location', None)) setattr(self, "_{}".format('sign_in_each_location_metadata'), kwargs.get('sign_in_each_location_metadata', None)) setattr(self, "_{}".format('signing_group_id'), kwargs.get('signing_group_id', None)) setattr(self, "_{}".format('signing_group_id_metadata'), kwargs.get('signing_group_id_metadata', None)) setattr(self, "_{}".format('signing_group_name'), kwargs.get('signing_group_name', None)) setattr(self, "_{}".format('signing_group_users'), kwargs.get('signing_group_users', None)) setattr(self, "_{}".format('sms_authentication'), kwargs.get('sms_authentication', None)) setattr(self, "_{}".format('social_authentications'), kwargs.get('social_authentications', None)) setattr(self, "_{}".format('status'), kwargs.get('status', None)) setattr(self, "_{}".format('status_code'), kwargs.get('status_code', None)) setattr(self, "_{}".format('suppress_emails'), kwargs.get('suppress_emails', None)) setattr(self, "_{}".format('tabs'), kwargs.get('tabs', None)) setattr(self, "_{}".format('template_locked'), kwargs.get('template_locked', None)) setattr(self, "_{}".format('template_required'), kwargs.get('template_required', None)) setattr(self, "_{}".format('total_tab_count'), kwargs.get('total_tab_count', None)) setattr(self, "_{}".format('user_id'), kwargs.get('user_id', None)) @property def access_code(self): return self._access_code @access_code.setter def access_code(self, access_code): self._access_code = access_code @property def access_code_metadata(self): return self._access_code_metadata @access_code_metadata.setter def access_code_metadata(self, access_code_metadata): self._access_code_metadata = access_code_metadata @property def add_access_code_to_email(self): return self._add_access_code_to_email @add_access_code_to_email.setter def add_access_code_to_email(self, add_access_code_to_email): self._add_access_code_to_email = add_access_code_to_email @property def additional_notifications(self): return self._additional_notifications @additional_notifications.setter def additional_notifications(self, additional_notifications): self._additional_notifications = additional_notifications @property def agent_can_edit_email(self): return self._agent_can_edit_email @agent_can_edit_email.setter def agent_can_edit_email(self, agent_can_edit_email): self._agent_can_edit_email = agent_can_edit_email @property def agent_can_edit_name(self): return self._agent_can_edit_name @agent_can_edit_name.setter def agent_can_edit_name(self, agent_can_edit_name): self._agent_can_edit_name = agent_can_edit_name @property def allow_system_override_for_locked_recipient(self): return self._allow_system_override_for_locked_recipient @allow_system_override_for_locked_recipient.setter def allow_system_override_for_locked_recipient(self, allow_system_override_for_locked_recipient): self._allow_system_override_for_locked_recipient = allow_system_override_for_locked_recipient @property def auto_navigation(self): return self._auto_navigation @auto_navigation.setter def auto_navigation(self, auto_navigation): self._auto_navigation = auto_navigation @property def auto_responded_reason(self): return self._auto_responded_reason @auto_responded_reason.setter def auto_responded_reason(self, auto_responded_reason): self._auto_responded_reason = auto_responded_reason @property def bulk_recipients_uri(self): return self._bulk_recipients_uri @bulk_recipients_uri.setter def bulk_recipients_uri(self, bulk_recipients_uri): self._bulk_recipients_uri = bulk_recipients_uri @property def can_sign_offline(self): return self._can_sign_offline @can_sign_offline.setter def can_sign_offline(self, can_sign_offline): self._can_sign_offline = can_sign_offline @property def client_user_id(self): return self._client_user_id @client_user_id.setter def client_user_id(self, client_user_id): self._client_user_id = client_user_id @property def completed_count(self): return self._completed_count @completed_count.setter def completed_count(self, completed_count): self._completed_count = completed_count @property def creation_reason(self): return self._creation_reason @creation_reason.setter def creation_reason(self, creation_reason): self._creation_reason = creation_reason @property def custom_fields(self): return self._custom_fields @custom_fields.setter def custom_fields(self, custom_fields): self._custom_fields = custom_fields @property def declined_date_time(self): return self._declined_date_time @declined_date_time.setter def declined_date_time(self, declined_date_time): self._declined_date_time = declined_date_time @property def declined_reason(self): return self._declined_reason @declined_reason.setter def declined_reason(self, declined_reason): self._declined_reason = declined_reason @property def default_recipient(self): return self._default_recipient @default_recipient.setter def default_recipient(self, default_recipient): self._default_recipient = default_recipient @property def delivered_date_time(self): return self._delivered_date_time @delivered_date_time.setter def delivered_date_time(self, delivered_date_time): self._delivered_date_time = delivered_date_time @property def delivery_method(self): return self._delivery_method @delivery_method.setter def delivery_method(self, delivery_method): self._delivery_method = delivery_method @property def delivery_method_metadata(self): return self._delivery_method_metadata @delivery_method_metadata.setter def delivery_method_metadata(self, delivery_method_metadata): self._delivery_method_metadata = delivery_method_metadata @property def designator_id(self): return self._designator_id @designator_id.setter def designator_id(self, designator_id): self._designator_id = designator_id @property def designator_id_guid(self): return self._designator_id_guid @designator_id_guid.setter def designator_id_guid(self, designator_id_guid): self._designator_id_guid = designator_id_guid @property def document_visibility(self): return self._document_visibility @document_visibility.setter def document_visibility(self, document_visibility): self._document_visibility = document_visibility @property
MIT License
srusskih/sublimejedi
dependencies/parso/utils.py
version_info
python
def version_info(): from parso import __version__ tupl = re.findall(r'[a-z]+|\d+', __version__) return Version(*[x if i == 3 else int(x) for i, x in enumerate(tupl)])
Returns a namedtuple of parso's version, similar to Python's ``sys.version_info``.
https://github.com/srusskih/sublimejedi/blob/8a5054f0a053c8a8170c06c56216245240551d54/dependencies/parso/utils.py#L112-L119
from collections import namedtuple import re import sys from ast import literal_eval from functools import total_ordering from parso._compatibility import unicode _NON_LINE_BREAKS = ( u'\v', u'\f', u'\x1C', u'\x1D', u'\x1E', u'\x85', u'\u2028', u'\u2029', ) Version = namedtuple('Version', 'major, minor, micro') def split_lines(string, keepends=False): if keepends: lst = string.splitlines(True) merge = [] for i, line in enumerate(lst): try: last_chr = line[-1] except IndexError: pass else: if last_chr in _NON_LINE_BREAKS: merge.append(i) for index in reversed(merge): try: lst[index] = lst[index] + lst[index + 1] del lst[index + 1] except IndexError: pass if string.endswith('\n') or string.endswith('\r') or string == '': lst.append('') return lst else: return re.split(r'\n|\r\n|\r', string) def python_bytes_to_unicode(source, encoding='utf-8', errors='strict'): def detect_encoding(): byte_mark = literal_eval(r"b'\xef\xbb\xbf'") if source.startswith(byte_mark): return 'utf-8' first_two_lines = re.match(br'(?:[^\n]*\n){0,2}', source).group(0) possible_encoding = re.search(br"coding[=:]\s*([-\w.]+)", first_two_lines) if possible_encoding: return possible_encoding.group(1) else: return encoding if isinstance(source, unicode): return source encoding = detect_encoding() if not isinstance(encoding, unicode): encoding = unicode(encoding, 'utf-8', 'replace') return unicode(source, encoding, errors)
MIT License
python-discord/sir-lancebot
bot/exts/utilities/githubinfo.py
GithubInfo.github_user_info
python
async def github_user_info(self, ctx: commands.Context, username: str) -> None: async with ctx.typing(): user_data = await self.fetch_data(f"{GITHUB_API_URL}/users/{quote_plus(username)}") if "message" in user_data: embed = discord.Embed( title=random.choice(NEGATIVE_REPLIES), description=f"The profile for `{username}` was not found.", colour=Colours.soft_red ) await ctx.send(embed=embed) return org_data = await self.fetch_data(user_data["organizations_url"]) orgs = [f"[{org['login']}](https://github.com/{org['login']})" for org in org_data] orgs_to_add = " | ".join(orgs) gists = user_data["public_gists"] if user_data["blog"].startswith("http"): blog = user_data["blog"] elif user_data["blog"]: blog = f"https://{user_data['blog']}" else: blog = "No website link available" embed = discord.Embed( title=f"`{user_data['login']}`'s GitHub profile info", description=f"```\n{user_data['bio']}\n```\n" if user_data["bio"] else "", colour=discord.Colour.blurple(), url=user_data["html_url"], timestamp=datetime.strptime(user_data["created_at"], "%Y-%m-%dT%H:%M:%SZ") ) embed.set_thumbnail(url=user_data["avatar_url"]) embed.set_footer(text="Account created at") if user_data["type"] == "User": embed.add_field( name="Followers", value=f"[{user_data['followers']}]({user_data['html_url']}?tab=followers)" ) embed.add_field( name="Following", value=f"[{user_data['following']}]({user_data['html_url']}?tab=following)" ) embed.add_field( name="Public repos", value=f"[{user_data['public_repos']}]({user_data['html_url']}?tab=repositories)" ) if user_data["type"] == "User": embed.add_field( name="Gists", value=f"[{gists}](https://gist.github.com/{quote_plus(username, safe='')})" ) embed.add_field( name=f"Organization{'s' if len(orgs)!=1 else ''}", value=orgs_to_add if orgs else "No organizations." ) embed.add_field(name="Website", value=blog) await ctx.send(embed=embed)
Fetches a user's GitHub information.
https://github.com/python-discord/sir-lancebot/blob/559e76ffbef7af85132d86f2e3ab8acf7e7f5eef/bot/exts/utilities/githubinfo.py#L37-L105
import logging import random from datetime import datetime from urllib.parse import quote, quote_plus import discord from discord.ext import commands from bot.bot import Bot from bot.constants import Colours, NEGATIVE_REPLIES from bot.exts.core.extensions import invoke_help_command log = logging.getLogger(__name__) GITHUB_API_URL = "https://api.github.com" class GithubInfo(commands.Cog): def __init__(self, bot: Bot): self.bot = bot async def fetch_data(self, url: str) -> dict: async with self.bot.http_session.get(url) as r: return await r.json() @commands.group(name="github", aliases=("gh", "git")) @commands.cooldown(1, 10, commands.BucketType.user) async def github_group(self, ctx: commands.Context) -> None: if ctx.invoked_subcommand is None: await invoke_help_command(ctx) @github_group.command(name="user", aliases=("userinfo",))
MIT License
niemeyer-research-group/pymars
pymars/soln2cti.py
build_falloff
python
def build_falloff(parameters, falloff_function): if falloff_function == 'Troe': falloff_string = ('Troe(' + f'A = {parameters[0]}' + f', T3 = {parameters[1]}' + f', T1 = {parameters[2]}' + f', T2 = {parameters[3]})' ) elif falloff_function == 'SRI': falloff_string = ('SRI(' + f'A = {parameters[0]}' + f', B = {parameters[1]}' + f', C = {parameters[2]}' + f', D = {parameters[3]}' + f', E = {parameters[4]})' ) else: raise NotImplementedError(f'Falloff function not supported: {falloff_function}') return falloff_string
Creates falloff reaction Troe parameter string Parameters ---------- parameters : numpy.ndarray Array of falloff parameters; length varies based on ``falloff_function`` falloff_function : {'Troe', 'SRI'} Type of falloff function Returns ------- falloff_string : str String of falloff parameters
https://github.com/niemeyer-research-group/pymars/blob/6f032b91cbfe33b935bd6ed6f6fb1302fdeca802/pymars/soln2cti.py#L138-L172
import os import math from textwrap import fill import cantera as ct CALORIES_CONSTANT = 4184.0 DEBEYE_CONVERSION = 3.33564e-30 indent = ['', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ' ] def section_break(section_title): return('#' + '-' * 75 + '\n' + f'# {section_title}\n' + '#' + '-' * 75 + '\n\n' ) def build_arrhenius(rate, reaction_order, reaction_type): if reaction_type in [ct.ElementaryReaction, ct.PlogReaction]: pre_exponential_factor = rate.pre_exponential_factor * 1e3**(reaction_order - 1) elif reaction_type == ct.ThreeBodyReaction: pre_exponential_factor = rate.pre_exponential_factor * 1e3**reaction_order elif reaction_type in [ct.FalloffReaction, ct.ChemicallyActivatedReaction]: raise ValueError('Function does not support falloff or chemically activated reactions') else: raise NotImplementedError('Reaction type not supported: ', reaction_type) arrhenius = [f'{pre_exponential_factor:.6e}', str(rate.temperature_exponent), str(rate.activation_energy / CALORIES_CONSTANT) ] return ', '.join(arrhenius) def build_falloff_arrhenius(rate, reaction_order, reaction_type, pressure_limit): assert pressure_limit in ['low', 'high'], 'Pressure range needs to be high or low' if reaction_type == ct.FalloffReaction: if pressure_limit == 'low': pre_exponential_factor = rate.pre_exponential_factor * 1e3**(reaction_order) elif pressure_limit == 'high': pre_exponential_factor = rate.pre_exponential_factor * 1e3**(reaction_order - 1) elif reaction_type == ct.ChemicallyActivatedReaction: if pressure_limit == 'low': pre_exponential_factor = rate.pre_exponential_factor * 1e3**(reaction_order - 1) elif pressure_limit == 'high': pre_exponential_factor = rate.pre_exponential_factor * 1e3**(reaction_order - 2) else: raise ValueError('Reaction type not supported: ', reaction_type) arrhenius = [f'{pre_exponential_factor:.6E}', str(rate.temperature_exponent), str(rate.activation_energy / CALORIES_CONSTANT) ] return '[' + ', '.join(arrhenius) + ']'
MIT License
cocos-bcx/python-middleware
PythonMiddleware/instance.py
shared_graphene_instance
python
def shared_graphene_instance(): global _shared_graphene_instance if not _shared_graphene_instance: _shared_graphene_instance = gph.Graphene() return _shared_graphene_instance
This method will initialize ``_shared_graphene_instance`` and return it. The purpose of this method is to have offer single default graphene instance that can be reused by multiple classes.
https://github.com/cocos-bcx/python-middleware/blob/9e8db14cdbf12131964d48d1189e0686b69369a8/PythonMiddleware/instance.py#L5-L14
import PythonMiddleware as gph _shared_graphene_instance = None
MIT License
threatresponse/margaritashotgun
margaritashotgun/ssh_tunnel.py
Forward.__init__
python
def __init__(self, local_port, remote_address, remote_port, transport): super(Forward, self).__init__() self.local_port = local_port self.remote_address = remote_address self.remote_port = remote_port self.transport = transport
type: local_port: int param: local_port: local tunnel endpoint ip binding type: remote_address: str param: remote_address: Remote tunnel endpoing ip binding type: remote_port: int param: remote_port: Remote tunnel endpoint port binding type: transport: :py:class:`paramiko.Transport` param: transport: Paramiko ssh transport
https://github.com/threatresponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/ssh_tunnel.py#L92-L107
import errno import logging import paramiko from paramiko import AuthenticationException, SSHException import select import socket import threading from margaritashotgun.exceptions import * try: import socketserver except ImportError: import SocketServer as socketserver logger = logging.getLogger(__name__) class SSHTunnel(): def __init__(self): self.transport = None self.forward = None self.username = None self.address = None self.local_port = None self.remote_address = None self.remote_port = None def configure(self, transport, auth, address, port): self.transport = transport self.username = auth.username self.address = address self.port = port def start(self, local_port, remote_address, remote_port): self.local_port = local_port self.remote_address = remote_address self.remote_port = remote_port logger.debug(("Starting ssh tunnel {0}:{1}:{2} for " "{3}@{4}".format(local_port, remote_address, remote_port, self.username, self.address))) self.forward = Forward(local_port, remote_address, remote_port, self.transport) self.forward.start() def cleanup(self): if self.local_port is not None: logger.debug(("Stopping ssh tunnel {0}:{1}:{2} for " "{3}@{4}".format(self.local_port, self.remote_address, self.remote_port, self.username, self.address))) if self.forward is not None: self.forward.stop() self.forward.join() if self.transport is not None: self.transport.close() class Forward(threading.Thread):
MIT License
iexbase/tron-api-python
tronapi/transactionbuilder.py
TransactionBuilder.create_proposal
python
def create_proposal(self, parameters: Any, issuer_address=None): if issuer_address is None: issuer_address = self.tron.default_address.hex if not self.tron.isAddress(issuer_address): raise InvalidAddress('Invalid issuerAddress provided') return self.tron.manager.request('/wallet/proposalcreate', { 'owner_address': self.tron.address.to_hex(issuer_address), 'parameters': parameters })
Creates a proposal to modify the network. Can only be created by a current Super Representative. Args: parameters (Any): proposal parameters issuer_address: owner address Examples: >>> from tronapi import Tron >>> data = [ >>> {'key': 1, 'value': 2}, >>> {'key': 1, 'value': 2} >>> ] >>> tron = Tron() >>> tron.transaction.create_proposal(data)
https://github.com/iexbase/tron-api-python/blob/2da3b705d0bb139ef556d93cf14f3f1a5f938e87/tronapi/transactionbuilder.py#L317-L345
from datetime import datetime from typing import ( Any, Tuple, List ) from eth_abi import encode_abi from trx_utils import ( is_string, is_integer, is_boolean, is_hex, encode_hex ) from tronapi.exceptions import ( InvalidTronError, TronError, InvalidAddress ) from tronapi.common.validation import is_valid_url DEFAULT_TIME = datetime.now() START_DATE = int(DEFAULT_TIME.timestamp() * 1000) class TransactionBuilder(object): def __init__(self, tron): self.tron = tron def send_transaction(self, to, amount, account=None): if account is None: account = self.tron.default_address.hex if not self.tron.isAddress(to): raise InvalidTronError('Invalid recipient address provided') if not isinstance(amount, float) or amount <= 0: raise InvalidTronError('Invalid amount provided') _to = self.tron.address.to_hex(to) _from = self.tron.address.to_hex(account) if _to == _from: raise TronError('Cannot transfer TRX to the same account') response = self.tron.manager.request('/wallet/createtransaction', { 'to_address': _to, 'owner_address': _from, 'amount': self.tron.toSun(amount) }) return response def send_token(self, to, amount, token_id, account=None): if account is None: account = self.tron.default_address.hex if not self.tron.isAddress(to): raise InvalidTronError('Invalid recipient address provided') if not isinstance(amount, int) or amount <= 0: raise InvalidTronError('Invalid amount provided') if not token_id: raise InvalidTronError('Invalid token ID provided') if not self.tron.isAddress(account): raise InvalidTronError('Invalid origin address provided') _to = self.tron.address.to_hex(to) _from = self.tron.address.to_hex(account) _token_id = self.tron.toHex(text=str(token_id)) if _to == _from: raise TronError('Cannot transfer TRX to the same account') if is_string(token_id) and token_id.upper() == 'TRX': return self.send_transaction(_to, amount, _from) return self.tron.manager.request('/wallet/transferasset', { 'to_address': _to, 'owner_address': _from, 'asset_name': _token_id, 'amount': amount }) def freeze_balance(self, amount, duration, resource, account=None): if account is None: account = self.tron.default_address.hex if resource not in ('BANDWIDTH', 'ENERGY',): raise InvalidTronError('Invalid resource provided: Expected "BANDWIDTH" or "ENERGY"') if not is_integer(amount) or amount <= 0: raise InvalidTronError('Invalid amount provided') if not is_integer(duration) or duration < 3: raise InvalidTronError('Invalid duration provided, minimum of 3 days') if not self.tron.isAddress(account): raise InvalidTronError('Invalid address provided') response = self.tron.manager.request('/wallet/freezebalance', { 'owner_address': self.tron.address.to_hex(account), 'frozen_balance': self.tron.toSun(amount), 'frozen_duration': int(duration), 'resource': resource }) if 'Error' in response: raise TronError(response['Error']) return response def unfreeze_balance(self, resource='BANDWIDTH', account=None): if account is None: account = self.tron.default_address.hex if resource not in ('BANDWIDTH', 'ENERGY',): raise InvalidTronError('Invalid resource provided: Expected "BANDWIDTH" or "ENERGY"') if not self.tron.isAddress(account): raise InvalidTronError('Invalid address provided') response = self.tron.manager.request('/wallet/unfreezebalance', { 'owner_address': self.tron.address.to_hex(account), 'resource': resource }) if 'Error' in response: raise ValueError(response['Error']) return response def purchase_token(self, to: str, token_id: str, amount: int, buyer=None): if buyer is None: buyer = self.tron.default_address.hex if not self.tron.isAddress(to): raise InvalidAddress('Invalid to address provided') if not len(token_id): raise ValueError('Invalid token ID provided') if amount <= 0: raise ValueError('Invalid amount provided') _to = self.tron.address.to_hex(to) _from = self.tron.address.to_hex(buyer) return self.tron.manager.request('/wallet/participateassetissue', { 'to_address': _to, 'owner_address': _from, 'asset_name': self.tron.toHex(text=token_id), 'amount': int(amount) }) def withdraw_block_rewards(self, address: str = None): if not address: address = self.tron.default_address.hex if not self.tron.isAddress(address): raise InvalidAddress('Invalid address provided') return self.tron.manager.request('/wallet/withdrawbalance', { 'owner_address': self.tron.address.to_hex(address) }) def apply_for_sr(self, url, address=None): if address is None: address = self.tron.default_address.hex if not self.tron.isAddress(address): raise TronError('Invalid address provided') if not is_valid_url(url): raise TronError('Invalid url provided') return self.tron.manager.request('/wallet/createwitness', { 'owner_address': self.tron.address.to_hex(address), 'url': self.tron.toHex(text=url) }) def vote(self, votes: List[Tuple[str, int]], voter_address: str = None): if voter_address is None: voter_address = self.tron.default_address.hex _view_vote = [] for sr_address, vote_count in votes: if not self.tron.isAddress(sr_address): raise InvalidAddress( 'Invalid SR address provided: ' + sr_address ) if not is_integer(vote_count) or vote_count <= 0: raise ValueError( 'Invalid vote count provided for SR: ' + sr_address ) _view_vote.append({ 'vote_address': self.tron.address.to_hex(sr_address), 'vote_count': int(vote_count) }) return self.tron.manager.request('/wallet/votewitnessaccount', { 'owner_address': self.tron.address.to_hex(voter_address), 'votes': _view_vote })
MIT License
paddlepaddle/paddle
python/paddle/fluid/tests/unittests/test_dynrnn_gradient_check.py
SeedFixedTestCase.tearDownClass
python
def tearDownClass(cls): numpy.random.set_state(cls._np_rand_state) random.setstate(cls._py_rand_state)
Restore random seeds
https://github.com/paddlepaddle/paddle/blob/056b87414880e0520bb4560fc40d5b62db9c5175/python/paddle/fluid/tests/unittests/test_dynrnn_gradient_check.py#L224-L227
from __future__ import print_function import numpy import random import collections import paddle.fluid as fluid import unittest from decorator_helper import * class Memory(object): def __init__(self, shape, dtype='float32'): self.ex = numpy.zeros(shape=shape, dtype=dtype) self.cur = None def update(self, val): assert val.shape == self.ex.shape assert val.dtype == self.ex.dtype self.cur = val def next(self): self.ex = self.cur self.cur = None def __next__(self): self.next() def reset(self): self.ex = numpy.zeros(shape=self.ex.shape, dtype=self.ex.dtype) self.cur = None class Output(object): def __init__(self): self.outs = [] def next_sequence(self): self.outs.append([]) def out(self, val): self.outs[-1].append(val) def last(self): return self.outs[-1][-1] class BaseRNN(object): def __init__(self, ins, mems, params, outs, num_seq=5, max_seq_len=15): self.num_seq = num_seq self.inputs = collections.defaultdict(list) for _ in range(num_seq): seq_len = random.randint(1, max_seq_len - 1) for iname in ins: ishape = ins[iname].get('shape', None) idtype = ins[iname].get('dtype', 'float32') lst = [] for _ in range(seq_len): lst.append(numpy.random.random(size=ishape).astype(idtype)) self.inputs[iname].append(lst) self.mems = dict() for mname in mems: mshape = mems[mname].get('shape', None) mdtype = mems[mname].get('dtype', 'float32') self.mems[mname] = Memory(shape=mshape, dtype=mdtype) self.params = dict() for pname in params: pshape = params[pname].get('shape', None) pdtype = params[pname].get('dtype', 'float32') self.params[pname] = numpy.random.random(size=pshape).astype(pdtype) self.outputs = dict() for oname in outs: self.outputs[oname] = Output() def step(self, **kwargs): raise NotImplementedError() def exe(self): retv = dict() for out in self.outputs: retv[out] = [] for seq_id in range(self.num_seq): for mname in self.mems: self.mems[mname].reset() for out in self.outputs: self.outputs[out].next_sequence() iname0 = list(self.inputs.keys())[0] seq_len = len(self.inputs[iname0][seq_id]) for step_id in range(seq_len): xargs = dict() for iname in self.inputs: xargs[iname] = self.inputs[iname][seq_id][step_id] for mname in self.mems: xargs[mname] = self.mems[mname] for pname in self.params: xargs[pname] = self.params[pname] for out in self.outputs: xargs[out] = self.outputs[out] self.step(**xargs) for mname in self.mems: next(self.mems[mname]) for out in self.outputs: retv[out].append(self.outputs[out].last()) for out in retv: retv[out] = numpy.array(retv[out]) return retv def to_feed(self, place): feed_dict = dict() for iname in self.inputs: lod = [] np_flatten = [] for seq_id in range(len(self.inputs[iname])): seq_len = len(self.inputs[iname][seq_id]) lod.append(seq_len) np_flatten.extend(self.inputs[iname][seq_id]) t = fluid.Tensor() t.set(numpy.array(np_flatten), place) t.set_recursive_sequence_lengths([lod]) feed_dict[iname] = t for pname in self.params: feed_dict[pname] = self.params[pname] return feed_dict def get_numeric_gradient_of_param(self, param_name, delta=0.001): p = self.params[param_name] if len(p.shape) != 2: raise ValueError("Not support get numeric gradient of an parameter," " which is not matrix") g = numpy.zeros(shape=p.shape, dtype=p.dtype) for i in range(p.shape[0]): for j in range(p.shape[1]): o = p[i][j] p[i][j] += delta pos = self._exe_mean_out_() p[i][j] -= 2 * delta neg = self._exe_mean_out_() p[i][j] = o g[i][j] = (pos - neg) / (delta * 2) return g def get_numeric_gradient_of_input(self, input_name, delta=0.001, return_one_tensor=True): ipt = self.inputs[input_name] grad = [] for seq in ipt: seq_grad = [] for item in seq: item_grad = numpy.zeros(shape=item.shape, dtype=item.dtype) if len(item.shape) != 1: raise ValueError("Not support") for i in range(len(item)): o = item[i] item[i] += delta pos = self._exe_mean_out_() item[i] -= 2 * delta neg = self._exe_mean_out_() item[i] = o item_grad[i] = (pos - neg) / (delta * 2) seq_grad.append(item_grad) grad.append(seq_grad) if not return_one_tensor: return grad for i in range(len(grad)): grad[i] = numpy.concatenate(grad[i]) grad = numpy.concatenate(grad) return grad def _exe_mean_out_(self): outs = self.exe() return numpy.array([o.mean() for o in outs.values()]).mean() class SeedFixedTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls._np_rand_state = numpy.random.get_state() cls._py_rand_state = random.getstate() numpy.random.seed(123) random.seed(124) @classmethod
Apache License 2.0
shenweichen/deepctr
deepctr/contrib/rnn.py
_transpose_batch_time
python
def _transpose_batch_time(x): x_static_shape = x.get_shape() if x_static_shape.ndims is not None and x_static_shape.ndims < 2: raise ValueError( "Expected input tensor %s to have rank at least 2, but saw shape: %s" % (x, x_static_shape)) x_rank = array_ops.rank(x) x_t = array_ops.transpose( x, array_ops.concat( ([1, 0], math_ops.range(2, x_rank)), axis=0)) x_t.set_shape( tensor_shape.TensorShape([ x_static_shape[1].value, x_static_shape[0].value ]).concatenate(x_static_shape[2:])) return x_t
Transpose the batch and time dimensions of a Tensor. Retains as much of the static shape information as possible. Args: x: A tensor of rank 2 or higher. Returns: x transposed along the first two dimensions. Raises: ValueError: if `x` is rank 1 or lower.
https://github.com/shenweichen/deepctr/blob/cd9643c2dc2a25c8b27269e2033b1a0b6aafcac7/deepctr/contrib/rnn.py#L78-L114
from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import rnn_cell_impl from tensorflow.python.ops import tensor_array_ops from tensorflow.python.ops import variable_scope as vs from tensorflow.python.util import nest import tensorflow as tf def _like_rnncell_(cell): conditions = [hasattr(cell, "output_size"), hasattr(cell, "state_size"), hasattr(cell, "zero_state"), callable(cell)] return all(conditions) _concat = rnn_cell_impl._concat try: _like_rnncell = rnn_cell_impl._like_rnncell except Exception as e: _like_rnncell = _like_rnncell_
Apache License 2.0
google-coral/tflite
python/examples/detection/detect.py
get_output
python
def get_output(interpreter, score_threshold, image_scale=(1.0, 1.0)): boxes = output_tensor(interpreter, 0) class_ids = output_tensor(interpreter, 1) scores = output_tensor(interpreter, 2) count = int(output_tensor(interpreter, 3)) width, height = input_size(interpreter) image_scale_x, image_scale_y = image_scale sx, sy = width / image_scale_x, height / image_scale_y def make(i): ymin, xmin, ymax, xmax = boxes[i] return Object( id=int(class_ids[i]), score=float(scores[i]), bbox=BBox(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax).scale(sx, sy).map(int)) return [make(i) for i in range(count) if scores[i] >= score_threshold]
Returns list of detected objects.
https://github.com/google-coral/tflite/blob/eced31ac01e9c2636150decef7d3c335d0feb304/python/examples/detection/detect.py#L142-L163
import collections import numpy as np Object = collections.namedtuple('Object', ['id', 'score', 'bbox']) class BBox(collections.namedtuple('BBox', ['xmin', 'ymin', 'xmax', 'ymax'])): __slots__ = () @property def width(self): return self.xmax - self.xmin @property def height(self): return self.ymax - self.ymin @property def area(self): return self.width * self.height @property def valid(self): return self.width >= 0 and self.height >= 0 def scale(self, sx, sy): return BBox(xmin=sx * self.xmin, ymin=sy * self.ymin, xmax=sx * self.xmax, ymax=sy * self.ymax) def translate(self, dx, dy): return BBox(xmin=dx + self.xmin, ymin=dy + self.ymin, xmax=dx + self.xmax, ymax=dy + self.ymax) def map(self, f): return BBox(xmin=f(self.xmin), ymin=f(self.ymin), xmax=f(self.xmax), ymax=f(self.ymax)) @staticmethod def intersect(a, b): return BBox(xmin=max(a.xmin, b.xmin), ymin=max(a.ymin, b.ymin), xmax=min(a.xmax, b.xmax), ymax=min(a.ymax, b.ymax)) @staticmethod def union(a, b): return BBox(xmin=min(a.xmin, b.xmin), ymin=min(a.ymin, b.ymin), xmax=max(a.xmax, b.xmax), ymax=max(a.ymax, b.ymax)) @staticmethod def iou(a, b): intersection = BBox.intersect(a, b) if not intersection.valid: return 0.0 area = intersection.area return area / (a.area + b.area - area) def input_size(interpreter): _, height, width, _ = interpreter.get_input_details()[0]['shape'] return width, height def input_tensor(interpreter): tensor_index = interpreter.get_input_details()[0]['index'] return interpreter.tensor(tensor_index)()[0] def set_input(interpreter, size, resize): width, height = input_size(interpreter) w, h = size scale = min(width / w, height / h) w, h = int(w * scale), int(h * scale) tensor = input_tensor(interpreter) tensor.fill(0) _, _, channel = tensor.shape tensor[:h, :w] = np.reshape(resize((w, h)), (h, w, channel)) return scale, scale def output_tensor(interpreter, i): tensor = interpreter.tensor(interpreter.get_output_details()[i]['index'])() return np.squeeze(tensor)
Apache License 2.0
google/citest
citest/gcp_testing/gcloud_contract.py
GCloudObjectObserver.export_to_json_snapshot
python
def export_to_json_snapshot(self, snapshot, entity): snapshot.edge_builder.make_control(entity, 'Args', self.__args) super(GCloudObjectObserver, self).export_to_json_snapshot(snapshot, entity)
Implements JsonSnapshotableEntity interface.
https://github.com/google/citest/blob/eda9171eed35b82ce6f048229bebd898edc25369/citest/gcp_testing/gcloud_contract.py#L42-L45
import json import logging import traceback import citest.json_contract as jc from citest.json_predicate import JsonError import citest.service_testing.cli_agent as cli_agent class GCloudObjectObserver(jc.ObjectObserver): def __init__(self, gcloud, args, filter=None): super(GCloudObjectObserver, self).__init__(filter) self.__gcloud = gcloud self.__args = args
Apache License 2.0
hernantz/classyconf
classyconf/configuration.py
Value.__init__
python
def __init__( self, key: str = None, *, help: str = "", default: NOT_SET = NOT_SET, cast: Callable = None, ): self.key = key self.help = help self.default = default self.cast = cast
:param key: Name of the value used in file or environment variable. Set automatically by the metaclass. :param default: Default value if none is provided. If left unset, loading a config that fails to provide this value will raise a UnknownConfiguration exception. :param cast: Callable to cast variable with. Defaults to type of default (if provided), identity if default is not provided or raises TypeError if provided cast is not callable. :param help: Plain-text description of the value.
https://github.com/hernantz/classyconf/blob/2c29fda3db99809a03f550d6275d2566b4dc40f3/classyconf/configuration.py#L53-L76
from collections import OrderedDict from typing import Callable from .casts import Boolean, Identity, List, Option, Tuple, evaluate from .exceptions import UnknownConfiguration from .loaders import NOT_SET, Environment as_boolean = Boolean() as_list = List() as_tuple = Tuple() as_option = Option as_is = Identity() def getconf(item, default=NOT_SET, cast=None, loaders=None): if callable(cast): cast = cast elif cast is None and (default is NOT_SET or default is None): cast = as_is elif isinstance(default, bool): cast = as_boolean elif cast is None: cast = type(default) else: raise TypeError("Cast must be callable") for loader in loaders: try: return cast(loader[item]) except KeyError: continue if default is NOT_SET: raise UnknownConfiguration("Configuration '{}' not found".format(item)) return cast(default) class Value:
MIT License
multiparty/conclave
conclave/comp.py
MPCPushUp.__init__
python
def __init__(self, conclave_config: cc_conf.CodeGenConfig): super(MPCPushUp, self).__init__(conclave_config) self.reverse = True
Initialize MPCPushUp object.
https://github.com/multiparty/conclave/blob/6edd21c65be8937bb528198896fb6e912011594a/conclave/comp.py#L455-L459
import copy import warnings import conclave.config as cc_conf import conclave.dag as ccdag import conclave.lang as cc import conclave.utils as utils from conclave.utils import defCol def push_op_node_down(top_node: ccdag.OpNode, bottom_node: ccdag.OpNode): assert (len(bottom_node.children) <= 1) child = next(iter(bottom_node.children), None) ccdag.remove_between(top_node, child, bottom_node) grand_parents = copy.copy(top_node.get_sorted_parents()) for idx, grand_parent in enumerate(grand_parents): to_insert = copy.deepcopy(bottom_node) to_insert.out_rel.rename(to_insert.out_rel.name + "_" + str(idx)) to_insert.parents = set() to_insert.children = set() ccdag.insert_between(grand_parent, top_node, to_insert) to_insert.update_stored_with() def split_agg(node: ccdag.Aggregate): assert (len(node.children) <= 1) clone = copy.deepcopy(node) assert clone.aggregator in {"sum", "count"} clone.aggregator = "sum" clone.out_rel.rename(node.out_rel.name + "_obl") assert (len(clone.group_cols) == 1) updated_group_col = copy.deepcopy(node.out_rel.columns[0]) updated_group_col.idx = 0 updated_over_col = copy.deepcopy(node.out_rel.columns[1]) updated_over_col.idx = 1 clone.group_cols = [updated_group_col] clone.agg_col = updated_over_col clone.parents = set() clone.children = set() clone.is_mpc = True child = next(iter(node.children), None) ccdag.insert_between(node, child, clone) def fork_node(node: ccdag.Concat): child_it = enumerate(copy.copy(node.get_sorted_children())) next(child_it) for idx, child in child_it: clone = copy.deepcopy(node) clone.out_rel.rename(node.out_rel.name + "_" + str(idx)) clone.parents = copy.copy(node.parents) warnings.warn("hacky fork_node") clone.ordered = copy.copy(node.ordered) clone.children = {child} for parent in clone.parents: parent.children.add(clone) node.children.remove(child) child.replace_parent(node, clone) child.update_op_specific_cols() class DagRewriter: def __init__(self, conclave_config: cc_conf.CodeGenConfig): self.conclave_config = conclave_config self.reverse = False def rewrite(self, dag: ccdag.OpDag): ordered = dag.top_sort() if self.reverse: ordered = ordered[::-1] for node in ordered: print(type(self).__name__, "rewriting", node.out_rel.name) if isinstance(node, ccdag.HybridAggregate): self._rewrite_hybrid_aggregate(node) elif isinstance(node, ccdag.Aggregate): self._rewrite_aggregate(node) elif isinstance(node, ccdag.Divide): self._rewrite_divide(node) elif isinstance(node, ccdag.Project): self._rewrite_project(node) elif isinstance(node, ccdag.Filter): self._rewrite_filter(node) elif isinstance(node, ccdag.Multiply): self._rewrite_multiply(node) elif isinstance(node, ccdag.JoinFlags): self._rewrite_join_flags(node) elif isinstance(node, ccdag.PublicJoin): self._rewrite_public_join(node) elif isinstance(node, ccdag.HybridJoin): self._rewrite_hybrid_join(node) elif isinstance(node, ccdag.Join): self._rewrite_join(node) elif isinstance(node, ccdag.Concat): self._rewrite_concat(node) elif isinstance(node, ccdag.Close): self._rewrite_close(node) elif isinstance(node, ccdag.Open): self._rewrite_open(node) elif isinstance(node, ccdag.Create): self._rewrite_create(node) elif isinstance(node, ccdag.Distinct): self._rewrite_distinct(node) elif isinstance(node, ccdag.DistinctCount): self._rewrite_distinct_count(node) elif isinstance(node, ccdag.PubJoin): self._rewrite_pub_join(node) elif isinstance(node, ccdag.ConcatCols): self._rewrite_concat_cols(node) elif isinstance(node, ccdag.SortBy): self._rewrite_sort_by(node) elif isinstance(node, ccdag.FilterBy): self._rewrite_filter_by(node) elif isinstance(node, ccdag.Union): self._rewrite_union(node) elif isinstance(node, ccdag.PubIntersect): self._rewrite_pub_intersect(node) elif isinstance(node, ccdag.Persist): self._rewrite_persist(node) elif isinstance(node, ccdag.IndexesToFlags): self._rewrite_indexes_to_flags(node) elif isinstance(node, ccdag.NumRows): self._rewrite_num_rows(node) elif isinstance(node, ccdag.Blackbox): self._rewrite_blackbox(node) elif isinstance(node, ccdag.Shuffle): self._rewrite_shuffle(node) elif isinstance(node, ccdag.Index): self._rewrite_index(node) elif isinstance(node, ccdag.CompNeighs): self._rewrite_comp_neighs(node) else: msg = "Unknown class " + type(node).__name__ raise Exception(msg) def _rewrite_aggregate(self, node: ccdag.Aggregate): pass def _rewrite_hybrid_aggregate(self, node: ccdag.HybridAggregate): pass def _rewrite_divide(self, node: ccdag.Divide): pass def _rewrite_project(self, node: ccdag.Project): pass def _rewrite_filter(self, node: ccdag.Filter): pass def _rewrite_multiply(self, node: ccdag.Multiply): pass def _rewrite_join_flags(self, node: ccdag.JoinFlags): pass def _rewrite_public_join(self, node: ccdag.PublicJoin): pass def _rewrite_hybrid_join(self, node: ccdag.HybridJoin): pass def _rewrite_join(self, node: ccdag.Join): pass def _rewrite_concat(self, node: ccdag.Concat): pass def _rewrite_close(self, node: ccdag.Close): pass def _rewrite_open(self, node: ccdag.Open): pass def _rewrite_create(self, node: ccdag.Create): pass def _rewrite_distinct(self, node: ccdag.Distinct): pass def _rewrite_distinct_count(self, node: ccdag.DistinctCount): pass def _rewrite_pub_join(self, node: ccdag.PubJoin): pass def _rewrite_concat_cols(self, node: ccdag.ConcatCols): pass def _rewrite_sort_by(self, node: ccdag.SortBy): pass def _rewrite_filter_by(self, node: ccdag.FilterBy): pass def _rewrite_union(self, node: ccdag.Union): pass def _rewrite_pub_intersect(self, node: ccdag.PubIntersect): pass def _rewrite_persist(self, node: ccdag.Persist): pass def _rewrite_indexes_to_flags(self, node: ccdag.IndexesToFlags): pass def _rewrite_num_rows(self, node: ccdag.NumRows): pass def _rewrite_blackbox(self, node: ccdag.Blackbox): pass def _rewrite_shuffle(self, node: ccdag.Shuffle): pass def _rewrite_index(self, node: ccdag.Index): pass def _rewrite_comp_neighs(self, node: ccdag.CompNeighs): pass class MPCPushDown(DagRewriter): def __init__(self, conclave_config: cc_conf.CodeGenConfig): super(MPCPushDown, self).__init__(conclave_config) @staticmethod def _do_commute(top_op: ccdag.OpNode, bottom_op: ccdag.OpNode): if isinstance(top_op, ccdag.Aggregate): if isinstance(bottom_op, ccdag.Divide): return True elif top_op.aggregator == 'mean': return True elif top_op.aggregator == 'std_dev': return True else: return False else: return False @staticmethod def _rewrite_default(node: ccdag.OpNode): node.is_mpc = node.requires_mpc() def _rewrite_unary_default(self, node: ccdag.UnaryOpNode): parent = next(iter(node.parents)) if parent.is_mpc: if node.is_leaf(): node.is_mpc = True return if isinstance(parent, ccdag.Concat) and parent.is_boundary(): push_op_node_down(parent, node) parent.update_out_rel_cols() elif isinstance(parent, ccdag.Aggregate) and self._do_commute(parent, node): agg_op = parent agg_parent = agg_op.parent if isinstance(agg_parent, ccdag.Concat) and agg_parent.is_boundary(): concat_op = agg_parent assert len(concat_op.children) == 1 push_op_node_down(agg_op, node) updated_node = agg_op.parent push_op_node_down(concat_op, updated_node) concat_op.update_out_rel_cols() else: node.is_mpc = True else: node.is_mpc = True else: pass def _rewrite_aggregate(self, node: ccdag.Aggregate): parent = next(iter(node.parents)) if parent.is_mpc: if isinstance(parent, ccdag.Concat) and parent.is_boundary(): if node.aggregator != "mean" and node.aggregator != "std_dev": split_agg(node) push_op_node_down(parent, node) parent.update_out_rel_cols() else: node.is_mpc = True else: node.is_mpc = True else: pass def _rewrite_project(self, node: ccdag.Project): self._rewrite_unary_default(node) def _rewrite_filter(self, node: ccdag.Filter): self._rewrite_default(node) def _rewrite_multiply(self, node: ccdag.Multiply): self._rewrite_unary_default(node) def _rewrite_divide(self, node: ccdag.Divide): self._rewrite_unary_default(node) def _rewrite_public_join(self, node: ccdag.PublicJoin): raise Exception("PublicJoin encountered during MPCPushDown") def _rewrite_hybrid_join(self, node: ccdag.HybridJoin): raise Exception("HybridJoin encountered during MPCPushDown") def _rewrite_join(self, node: ccdag.Join): self._rewrite_default(node) def _rewrite_concat_cols(self, node: ccdag.ConcatCols): self._rewrite_default(node) def _rewrite_concat(self, node: ccdag.Concat): if node.requires_mpc(): node.is_mpc = True if len(node.children) > 1 and node.is_boundary(): fork_node(node) def _rewrite_distinct_count(self, node: ccdag.DistinctCount): self._rewrite_default(node) def _rewrite_pub_join(self, node: ccdag.PubJoin): self._rewrite_default(node) def _rewrite_sort_by(self, node: ccdag.SortBy): self._rewrite_default(node) def _rewrite_filter_by(self, node: ccdag.FilterBy): self._rewrite_default(node) def _rewrite_union(self, node: ccdag.Union): self._rewrite_default(node) def _rewrite_pub_intersect(self, node: ccdag.PubIntersect): self._rewrite_default(node) def _rewrite_persist(self, node: ccdag.Persist): self._rewrite_default(node) def _rewrite_indexes_to_flags(self, node: ccdag.IndexesToFlags): self._rewrite_default(node) class MPCPushUp(DagRewriter):
MIT License
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/components/binary_sensor/netatmo.py
NetatmoBinarySensor.name
python
def name(self): return self._name
Return the name of the Netatmo device and this sensor.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/components/binary_sensor/netatmo.py#L137-L139
import logging import voluptuous as vol from homeassistant.components.binary_sensor import ( BinarySensorDevice, PLATFORM_SCHEMA) from homeassistant.components.netatmo import CameraData from homeassistant.const import CONF_TIMEOUT from homeassistant.helpers import config_validation as cv _LOGGER = logging.getLogger(__name__) DEPENDENCIES = ['netatmo'] WELCOME_SENSOR_TYPES = { "Someone known": "motion", "Someone unknown": "motion", "Motion": "motion", } PRESENCE_SENSOR_TYPES = { "Outdoor motion": "motion", "Outdoor human": "motion", "Outdoor animal": "motion", "Outdoor vehicle": "motion" } TAG_SENSOR_TYPES = { "Tag Vibration": "vibration", "Tag Open": "opening" } CONF_HOME = 'home' CONF_CAMERAS = 'cameras' CONF_WELCOME_SENSORS = 'welcome_sensors' CONF_PRESENCE_SENSORS = 'presence_sensors' CONF_TAG_SENSORS = 'tag_sensors' DEFAULT_TIMEOUT = 90 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_CAMERAS, default=[]): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_HOME): cv.string, vol.Optional(CONF_PRESENCE_SENSORS, default=list(PRESENCE_SENSOR_TYPES)): vol.All(cv.ensure_list, [vol.In(PRESENCE_SENSOR_TYPES)]), vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, vol.Optional(CONF_WELCOME_SENSORS, default=list(WELCOME_SENSOR_TYPES)): vol.All(cv.ensure_list, [vol.In(WELCOME_SENSOR_TYPES)]), }) def setup_platform(hass, config, add_devices, discovery_info=None): netatmo = hass.components.netatmo home = config.get(CONF_HOME) timeout = config.get(CONF_TIMEOUT) if timeout is None: timeout = DEFAULT_TIMEOUT module_name = None import lnetatmo try: data = CameraData(netatmo.NETATMO_AUTH, home) if not data.get_camera_names(): return None except lnetatmo.NoDevice: return None welcome_sensors = config.get( CONF_WELCOME_SENSORS, WELCOME_SENSOR_TYPES) presence_sensors = config.get( CONF_PRESENCE_SENSORS, PRESENCE_SENSOR_TYPES) tag_sensors = config.get(CONF_TAG_SENSORS, TAG_SENSOR_TYPES) for camera_name in data.get_camera_names(): camera_type = data.get_camera_type(camera=camera_name, home=home) if camera_type == 'NACamera': if CONF_CAMERAS in config: if config[CONF_CAMERAS] != [] and camera_name not in config[CONF_CAMERAS]: continue for variable in welcome_sensors: add_devices([NetatmoBinarySensor( data, camera_name, module_name, home, timeout, camera_type, variable)], True) if camera_type == 'NOC': if CONF_CAMERAS in config: if config[CONF_CAMERAS] != [] and camera_name not in config[CONF_CAMERAS]: continue for variable in presence_sensors: add_devices([NetatmoBinarySensor( data, camera_name, module_name, home, timeout, camera_type, variable)], True) for module_name in data.get_module_names(camera_name): for variable in tag_sensors: camera_type = None add_devices([NetatmoBinarySensor( data, camera_name, module_name, home, timeout, camera_type, variable)], True) class NetatmoBinarySensor(BinarySensorDevice): def __init__(self, data, camera_name, module_name, home, timeout, camera_type, sensor): self._data = data self._camera_name = camera_name self._module_name = module_name self._home = home self._timeout = timeout if home: self._name = '{} / {}'.format(home, camera_name) else: self._name = camera_name if module_name: self._name += ' / ' + module_name self._sensor_name = sensor self._name += ' ' + sensor self._cameratype = camera_type self._state = None @property
MIT License
heyman/locust
locust/stats.py
StatsEntry.get_current_response_time_percentile
python
def get_current_response_time_percentile(self, percent): if not self.use_response_times_cache: raise ValueError("StatsEntry.use_response_times_cache must be set to True if we should be able to calculate the _current_ response time percentile") t = int(time.time()) acceptable_timestamps = [] for i in xrange(9): acceptable_timestamps.append(t-CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW-i) acceptable_timestamps.append(t-CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW+i) cached = None for ts in acceptable_timestamps: if ts in self.response_times_cache: cached = self.response_times_cache[ts] break if cached: return calculate_response_time_percentile( diff_response_time_dicts(self.response_times, cached.response_times), self.num_requests - cached.num_requests, percent, )
Calculate the *current* response time for a certain percentile. We use a sliding window of (approximately) the last 10 seconds (specified by CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW) when calculating this.
https://github.com/heyman/locust/blob/4baeb1d5ab9c291a9a48798b7d16c86184b3e831/locust/stats.py#L442-L478
import hashlib import time from collections import namedtuple, OrderedDict from copy import copy from itertools import chain import gevent import six from six.moves import xrange from . import events from .exception import StopLocust from .log import console_logger STATS_NAME_WIDTH = 60 CSV_STATS_INTERVAL_SEC = 2 CONSOLE_STATS_INTERVAL_SEC = 2 CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW = 10 CachedResponseTimes = namedtuple("CachedResponseTimes", ["response_times", "num_requests"]) class RequestStatsAdditionError(Exception): pass def calculate_response_time_percentile(response_times, num_requests, percent): num_of_request = int((num_requests * percent)) processed_count = 0 for response_time in sorted(six.iterkeys(response_times), reverse=True): processed_count += response_times[response_time] if(num_requests - processed_count <= num_of_request): return response_time return 0 def diff_response_time_dicts(latest, old): new = {} for time in latest: diff = latest[time] - old.get(time, 0) if diff: new[time] = diff return new class RequestStats(object): def __init__(self): self.entries = {} self.errors = {} self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=True) self.start_time = None @property def num_requests(self): return self.total.num_requests @property def num_none_requests(self): return self.total.num_none_requests @property def num_failures(self): return self.total.num_failures @property def last_request_timestamp(self): return self.total.last_request_timestamp def log_request(self, method, name, response_time, content_length): self.total.log(response_time, content_length) self.get(name, method).log(response_time, content_length) def log_error(self, method, name, error): self.total.log_error(error) self.get(name, method).log_error(error) key = StatsError.create_key(method, name, error) entry = self.errors.get(key) if not entry: entry = StatsError(method, name, error) self.errors[key] = entry entry.occurred() def get(self, name, method): entry = self.entries.get((name, method)) if not entry: entry = StatsEntry(self, name, method) self.entries[(name, method)] = entry return entry def reset_all(self): self.start_time = time.time() self.total.reset() self.errors = {} for r in six.itervalues(self.entries): r.reset() def clear_all(self): self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=True) self.entries = {} self.errors = {} self.start_time = None def serialize_stats(self): return [self.entries[key].get_stripped_report() for key in six.iterkeys(self.entries) if not (self.entries[key].num_requests == 0 and self.entries[key].num_failures == 0)] def serialize_errors(self): return dict([(k, e.to_dict()) for k, e in six.iteritems(self.errors)]) class StatsEntry(object): name = None method = None num_requests = None num_none_requests = None num_failures = None total_response_time = None min_response_time = None max_response_time = None num_reqs_per_sec = None response_times = None use_response_times_cache = False response_times_cache = None total_content_length = None start_time = None last_request_timestamp = None def __init__(self, stats, name, method, use_response_times_cache=False): self.stats = stats self.name = name self.method = method self.use_response_times_cache = use_response_times_cache self.reset() def reset(self): self.start_time = time.time() self.num_requests = 0 self.num_none_requests = 0 self.num_failures = 0 self.total_response_time = 0 self.response_times = {} self.min_response_time = None self.max_response_time = 0 self.last_request_timestamp = None self.num_reqs_per_sec = {} self.total_content_length = 0 if self.use_response_times_cache: self.response_times_cache = OrderedDict() self._cache_response_times(int(time.time())) def log(self, response_time, content_length): t = int(time.time()) if self.use_response_times_cache and self.last_request_timestamp and t > self.last_request_timestamp: self._cache_response_times(t-1) self.num_requests += 1 self._log_time_of_request(t) self._log_response_time(response_time) self.total_content_length += content_length def _log_time_of_request(self, t): self.num_reqs_per_sec[t] = self.num_reqs_per_sec.setdefault(t, 0) + 1 self.last_request_timestamp = t def _log_response_time(self, response_time): if response_time is None: self.num_none_requests += 1 return self.total_response_time += response_time if self.min_response_time is None: self.min_response_time = response_time self.min_response_time = min(self.min_response_time, response_time) self.max_response_time = max(self.max_response_time, response_time) if response_time < 100: rounded_response_time = response_time elif response_time < 1000: rounded_response_time = int(round(response_time, -1)) elif response_time < 10000: rounded_response_time = int(round(response_time, -2)) else: rounded_response_time = int(round(response_time, -3)) self.response_times.setdefault(rounded_response_time, 0) self.response_times[rounded_response_time] += 1 def log_error(self, error): self.num_failures += 1 @property def fail_ratio(self): try: return float(self.num_failures) / self.num_requests except ZeroDivisionError: if self.num_failures > 0: return 1.0 else: return 0.0 @property def avg_response_time(self): try: return float(self.total_response_time) / (self.num_requests - self.num_none_requests) except ZeroDivisionError: return 0 @property def median_response_time(self): if not self.response_times: return 0 median = median_from_dict(self.num_requests - self.num_none_requests, self.response_times) or 0 if median > self.max_response_time: median = self.max_response_time elif median < self.min_response_time: median = self.min_response_time return median @property def current_rps(self): if self.stats.last_request_timestamp is None: return 0 slice_start_time = max(self.stats.last_request_timestamp - 12, int(self.stats.start_time or 0)) reqs = [self.num_reqs_per_sec.get(t, 0) for t in range(slice_start_time, self.stats.last_request_timestamp-2)] return avg(reqs) @property def total_rps(self): if not self.stats.last_request_timestamp or not self.stats.start_time: return 0.0 return self.num_requests / max(self.stats.last_request_timestamp - self.stats.start_time, 1) @property def avg_content_length(self): try: return self.total_content_length / self.num_requests except ZeroDivisionError: return 0 def extend(self, other): if self.last_request_timestamp is not None and other.last_request_timestamp is not None: self.last_request_timestamp = max(self.last_request_timestamp, other.last_request_timestamp) elif other.last_request_timestamp is not None: self.last_request_timestamp = other.last_request_timestamp self.start_time = min(self.start_time, other.start_time) self.num_requests = self.num_requests + other.num_requests self.num_none_requests = self.num_none_requests + other.num_none_requests self.num_failures = self.num_failures + other.num_failures self.total_response_time = self.total_response_time + other.total_response_time self.max_response_time = max(self.max_response_time, other.max_response_time) if self.min_response_time is not None and other.min_response_time is not None: self.min_response_time = min(self.min_response_time, other.min_response_time) elif other.min_response_time is not None: self.min_response_time = other.min_response_time self.total_content_length = self.total_content_length + other.total_content_length for key in other.response_times: self.response_times[key] = self.response_times.get(key, 0) + other.response_times[key] for key in other.num_reqs_per_sec: self.num_reqs_per_sec[key] = self.num_reqs_per_sec.get(key, 0) + other.num_reqs_per_sec[key] def serialize(self): return { "name": self.name, "method": self.method, "last_request_timestamp": self.last_request_timestamp, "start_time": self.start_time, "num_requests": self.num_requests, "num_none_requests": self.num_none_requests, "num_failures": self.num_failures, "total_response_time": self.total_response_time, "max_response_time": self.max_response_time, "min_response_time": self.min_response_time, "total_content_length": self.total_content_length, "response_times": self.response_times, "num_reqs_per_sec": self.num_reqs_per_sec, } @classmethod def unserialize(cls, data): obj = cls(None, data["name"], data["method"]) for key in [ "last_request_timestamp", "start_time", "num_requests", "num_none_requests", "num_failures", "total_response_time", "max_response_time", "min_response_time", "total_content_length", "response_times", "num_reqs_per_sec", ]: setattr(obj, key, data[key]) return obj def get_stripped_report(self): report = self.serialize() self.reset() return report def __str__(self): fail_percent = self.fail_ratio * 100 return (" %-" + str(STATS_NAME_WIDTH) + "s %7d %12s %7d %7d %7d | %7d %7.2f") % ( (self.method and self.method + " " or "") + self.name, self.num_requests, "%d(%.2f%%)" % (self.num_failures, fail_percent), self.avg_response_time, self.min_response_time or 0, self.max_response_time, self.median_response_time or 0, self.current_rps or 0 ) def get_response_time_percentile(self, percent): return calculate_response_time_percentile(self.response_times, self.num_requests, percent)
MIT License
hugapi/hug
hug/input_format.py
text
python
def text(body, charset="utf-8", **kwargs): return body.read().decode(charset)
Takes plain text data
https://github.com/hugapi/hug/blob/8b5ac00632543addfdcecc326d0475a685a0cba7/hug/input_format.py#L35-L37
from __future__ import absolute_import import re from cgi import parse_multipart from urllib.parse import parse_qs as urlencoded_converter from falcon.util.uri import parse_query_string from hug.format import content_type, underscore from hug.json_module import json as json_converter @content_type("text/plain")
MIT License
ibm/power-up
scripts/python/lib/switch_common.py
SwitchCommon.show_mac_address_table
python
def show_mac_address_table(self, format=False): if self.mode == 'passive': mac_info = {} try: with open(self.host, 'r') as f: mac_info = f.read() except IOError as error: self.log.error( 'Passive switch MAC address table file not found (%s)' % error) raise mac_info = self.get_port_to_mac(mac_info) return mac_info mac_info = self.send_cmd(self.SHOW_MAC_ADDRESS_TABLE) if not format or format == 'raw': return mac_info return self.get_port_to_mac(mac_info, format, self.PORT_PREFIX)
Get switch mac address table. The returned text string can be raw or optionally fomatted. Args: format (boolean) : set to 'dict' or 'std' to return a dictionary Returns: raw string if format=False dictionary of ports and mac address values in native switch form if format = 'dict'. ordered dictionary of ports and mac address values in a standard format if fmt = 'std'.
https://github.com/ibm/power-up/blob/53a1db7e86726cf6d1783afaf083c9b7dcabdef8/scripts/python/lib/switch_common.py#L364-L394
import os import stat import subprocess import re import netaddr from orderedattrdict import AttrDict from enum import Enum from filelock import Timeout, FileLock from socket import gethostbyname from time import sleep from random import random import lib.logger as logger from lib.ssh import SSH from lib.switch_exception import SwitchException from lib.genesis import get_switch_lock_path FILE_PATH = os.path.dirname(os.path.abspath(__file__)) SWITCH_LOCK_PATH = get_switch_lock_path() class SwitchCommon(object): ENABLE_REMOTE_CONFIG = 'configure terminal ; {} ' IFC_ETH_CFG = 'interface ethernet {} ' IFC_PORT_CH_CFG = 'interface port-channel {} ' NO_IFC_PORT_CH_CFG = 'no interface port-channel {} ' PORT_PREFIX = 'Eth' SEP = ';' SHOW_VLANS = 'show vlan' CREATE_VLAN = 'vlan {}' DELETE_VLAN = 'no vlan {}' SHOW_PORT = 'show interface brief' CLEAR_MAC_ADDRESS_TABLE = 'clear mac address-table dynamic' SHOW_MAC_ADDRESS_TABLE = 'show mac address-table ;' ENABLE_LACP = 'feature lacp' NO_CHANNEL_GROUP = 'no channel-group' CHANNEL_GROUP_MODE = 'channel-group {} mode {} ' SHOW_PORT_CHANNEL = 'show port-channel summary' SWITCHPORT_MODE = 'switchport mode {} ' SWITCHPORT_ACCESS_VLAN = 'switchport access vlan {} ' SWITCHPORT_TRUNK_NATIVE_VLAN = 'switchport trunk native vlan {} ' SWITCHPORT_TRUNK_ALLOWED_VLAN = 'switchport trunk allowed vlan {} {}' SET_MTU = 'mtu {}' NO_MTU = 'no mtu' SHUTDOWN = 'shutdown' NO_SHUTDOWN = 'no shutdown' FORCE = 'force' MGMT_INTERFACE_CONFIG = 'interface ip {}' SET_INTERFACE_IPADDR = ' ;ip address {}' SET_INTERFACE_MASK = ' ;ip netmask {}' SET_VLAN = ' ;vlan {}' SHOW_IP_INTERFACE_BRIEF = 'show ip interface brief' SHOW_INTERFACE = 'show interface vlan{}' SET_INTERFACE = ('feature interface-vlan ;' 'interface vlan {} ;' 'ip address {} {} ;' 'management ;' 'no shutdown') def __init__(self, host=None, userid=None, password=None, mode=None, outfile=None): self.log = logger.getlogger() pass class AllowOp(Enum): ADD = 'add' ALL = 'all' EXCEPT = 'except' NONE = 'none' REMOVE = 'remove' class PortMode(Enum): ACCESS = 'access' FEX_FABRIC = 'fex-fabric' TRUNK = 'trunk' HYBRID = '' TRUNK_NATIVE = '' def send_cmd(self, cmd): if self.mode == 'passive': f = open(self.outfile, 'a+') f.write(cmd + '\n') f.close() return host_ip = gethostbyname(self.host) lockfile = os.path.join(SWITCH_LOCK_PATH, host_ip + '.lock') if not os.path.isfile(lockfile): os.mknod(lockfile) os.chmod(lockfile, stat.S_IRWXO | stat.S_IRWXG | stat.S_IRWXU) lock = FileLock(lockfile) cnt = 0 while cnt < 5 and not lock.is_locked: if cnt > 0: self.log.info('Waiting to acquire lock for switch {}'. format(self.host)) cnt += 1 try: lock.acquire(timeout=5, poll_intervall=0.05) sleep(0.01) except Timeout: pass if lock.is_locked: if self.ENABLE_REMOTE_CONFIG: cmd = self.ENABLE_REMOTE_CONFIG.format(cmd) self.log.debug(cmd) ssh = SSH() __, data, _ = ssh.exec_cmd( self.host, self.userid, self.password, cmd, ssh_log=True, look_for_keys=False) lock.release() sleep(0.06 + random() / 100) if lock.is_locked: self.log.error('Lock is locked. Should be unlocked') return data.decode("utf-8") else: self.log.error('Unable to acquire lock for switch {}'.format(self.host)) raise SwitchException('Unable to acquire lock for switch {}'. format(self.host)) def get_enums(self): return self.PortMode, self.AllowOp def show_ports(self, format='raw'): if self.mode == 'passive': return None ports = {} port_info = self.send_cmd(self.SHOW_PORT) if format == 'raw': return port_info elif format == 'std': port_info = port_info.splitlines() for line in port_info: match = re.search( r'Eth([0-9/]+)\s+(\d+)\s+\w+\s+(access|trunk)', line) if match: ports[match.group(1)] = { 'mode': match.group(3), 'nvlan': match.group(2), 'avlans': ''} port_info = self.send_cmd('show interface trunk').split('Port') for item in port_info: if 'Vlans Allowed on Trunk' in item: item = item.splitlines() for line in item: match = re.search( r'Eth((?:\d+/)+\d+)\s+((?:\d+[,-])*\d+)', line) if match: ports[match.group(1)]['avlans'] = match.group(2) return ports def show_vlans(self): if self.mode == 'passive': return None self.log.debug(self.SHOW_VLANS) vlan_info = self.send_cmd(self.SHOW_VLANS) return vlan_info def show_native_vlan(self, port): if self.mode == 'passive': return None port = str(port) ports = self.show_ports(format='std') return ports[port]['nvlan'] def set_switchport_mode(self, port, mode, vlan=None): port = str(port) cmd = self.IFC_ETH_CFG.format(port) + self.SEP cmd += self.SWITCHPORT_MODE.format(mode.value) if vlan: if mode.value == 'trunk': cmd += self.SEP + self.SWITCHPORT_TRUNK_NATIVE_VLAN.format(vlan) if mode.value == 'access': cmd += self.SEP + self.SWITCHPORT_ACCESS_VLAN.format(vlan) self.send_cmd(cmd) ports = self.show_ports(format='std') if port not in ports: msg = 'Unable to verify setting of switchport mode' msg += 'for port {}. May already be in a channel group.' msg.format(port) self.log.debug(msg) return if self.mode == 'passive' or ports[port]['mode'] == mode.value: self.log.debug( 'Port {} is in {} mode'.format(port, mode.value)) else: raise SwitchException( 'Failed setting port {} to {} mode'.format(port, mode.value)) if vlan: if self.mode == 'passive' or str(vlan) == ports[port]['nvlan']: msg = 'PVID/Native vlan {} set on port {}'.format(vlan, port) self.log.debug(msg) else: msg = 'Failed setting PVID/Native vlan {} on port {}'.format( vlan, port) self.log.error(msg) raise SwitchException(msg) def is_port_in_trunk_mode(self, port): if self.mode == 'passive': return None port = str(port) ports = self.show_ports(format='std') return self.PortMode.TRUNK.value in ports[port]['mode'] def is_port_in_access_mode(self, port): if self.mode == 'passive': return None port = str(port) ports = self.show_ports('std') return self.PortMode.ACCESS.value in ports[port]['mode'] def allowed_vlans_port(self, port, operation, vlans=''): if isinstance(vlans, (tuple, list)): vlans = vlans[:] vlans = [str(vlans[i]) for i in range(len(vlans))] vlans = ','.join(vlans) else: vlans = str(vlans) cmd = self.IFC_ETH_CFG.format(port) + self.SEP + self.SWITCHPORT_TRUNK_ALLOWED_VLAN.format(operation.value, vlans) self.send_cmd(cmd) res = self.is_vlan_allowed_for_port(vlans, port) if operation.value == 'add': if res is None: return elif not res: msg = 'Not all vlans in {} were added to port {}'. format(vlans, port) self.log.error(msg) else: self.log.debug('vlans {} were added to port {}'. format(vlans, port)) if operation.value == 'remove': if res is None: return elif res: msg = 'Not all vlans in {} were removed from port {}'. format(vlans, port) self.log.error(msg) else: self.log.debug('vlans {} were removed from port {}'. format(vlans, port)) def is_vlan_allowed_for_port(self, vlans, port): if self.mode == 'passive': return None vlans = str(vlans) vlans = vlans.split(',') result = True port = str(port) ports = self.show_ports('std') if port not in ports: msg = 'Unable to verify setting of vlans ' msg += 'for port {}. May already be in a channel group.' msg = msg.format(port) self.log.debug(msg) return avlans = ports[port]['avlans'] avlans = avlans.split(',') for vlan in vlans: res = False for i, _vlans in enumerate(avlans): _vlans = _vlans.strip(' ') if not vlan: res = True break if not _vlans: break elif '-' in vlan and vlan == _vlans: res = True break elif int(vlan) >= int(_vlans.split('-')[0]) and int(vlan) <= int(_vlans.split('-')[-1]): res = True break else: pass result = result and res return result def create_vlan(self, vlan): self.send_cmd(self.CREATE_VLAN.format(vlan)) if self.mode == 'passive' or self.is_vlan_created(vlan): self.log.debug('Created VLAN {}'.format(vlan)) else: raise SwitchException('Failed creating VLAN {}'.format(vlan)) def delete_vlan(self, vlan): self.send_cmd(self.DELETE_VLAN.format(vlan)) if self.mode == 'active' and self.is_vlan_created(vlan): self.log.warning( 'Failed deleting VLAN {}'.format(vlan)) raise SwitchException( 'Failed deleting VLAN {}'.format(vlan)) self.log.info('vlan {} deleted.'.format(vlan)) return def is_vlan_created(self, vlan): if self.mode == 'passive': return None if re.search( r'^' + str(vlan), self.send_cmd(self.SHOW_VLANS), re.MULTILINE): return True return False def set_mtu_for_port(self, port, mtu): self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.SHUTDOWN) if mtu == 0: self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.NO_MTU) else: self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.SET_MTU.format(mtu)) self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.NO_SHUTDOWN)
Apache License 2.0
apache/incubator-retired-cotton
mysos/scheduler/scheduler.py
parse_size
python
def parse_size(size): if not size: resources = dict(cpus=DEFAULT_TASK_CPUS, mem=DEFAULT_TASK_MEM, disk=DEFAULT_TASK_DISK) else: try: resources_ = json.loads(size) resources = dict( cpus=float(resources_['cpus']), mem=parse_data(resources_['mem']), disk=parse_data(resources_['disk'])) except (TypeError, KeyError, ValueError, InvalidData): raise ValueError("'size' should be a JSON dictionary with keys 'cpus', 'mem' and 'disk'") return resources
Return the resources specified in 'size' as a dictionary.
https://github.com/apache/incubator-retired-cotton/blob/4aa9bb0acdd8c609686b5d370ef4b61a520364ef/mysos/scheduler/scheduler.py#L547-L563
from collections import OrderedDict from datetime import datetime import json import posixpath import random import threading import traceback import sys from mysos.common.cluster import get_cluster_path from mysos.common.decorators import logged from .launcher import ( EXECUTOR_CPUS_EPSILON, EXECUTOR_DISK_EPSILON, EXECUTOR_MEM_EPSILON, MySQLClusterLauncher ) from .password import gen_password, PasswordBox from .state import MySQLCluster, Scheduler, StateProvider import mesos.interface import mesos.interface.mesos_pb2 as mesos_pb2 from twitter.common import log from twitter.common.collections.orderedset import OrderedSet from twitter.common.metrics import AtomicGauge, LambdaGauge, MutatorGauge, Observable from twitter.common.quantity import Amount, Data, Time from twitter.common.quantity.parse_simple import InvalidData, parse_data DEFAULT_TASK_CPUS = 1.0 DEFAULT_TASK_DISK = Amount(2, Data.GB) DEFAULT_TASK_MEM = Amount(512, Data.MB) INCOMPATIBLE_ROLE_OFFER_REFUSE_DURATION = Amount(sys.maxint / 2, Time.NANOSECONDS) class MysosScheduler(mesos.interface.Scheduler, Observable): class Error(Exception): pass class ClusterExists(Error): pass class ClusterNotFound(Error): pass class InvalidUser(Error): pass class ServiceUnavailable(Error): pass class Metrics(object): pass def __init__( self, state, state_provider, framework_user, executor_uri, executor_cmd, kazoo, zk_url, election_timeout, admin_keypath, scheduler_key, installer_args=None, backup_store_args=None, executor_environ=None, executor_source_prefix=None, framework_role='*'): self._lock = threading.Lock() if not isinstance(state, Scheduler): raise TypeError("'state' should be an instance of Scheduler") self._state = state if not isinstance(state_provider, StateProvider): raise TypeError("'state_provider' should be an instance of StateProvider") self._state_provider = state_provider self._framework_user = framework_user self._executor_uri = executor_uri self._executor_cmd = executor_cmd self._framework_role = framework_role self._election_timeout = election_timeout self._admin_keypath = admin_keypath self._installer_args = installer_args self._backup_store_args = backup_store_args self._executor_environ = executor_environ self._executor_source_prefix = executor_source_prefix self._driver = None self._discover_zk_url = posixpath.join(zk_url, "discover") self._kazoo = kazoo self._scheduler_key = scheduler_key self._password_box = PasswordBox(scheduler_key) self._tasks = {} self._launchers = OrderedDict() self.stopped = threading.Event() self.connected = threading.Event() self._setup_metrics() def _setup_metrics(self): self._metrics = self.Metrics() self._metrics.cluster_count = self.metrics.register(AtomicGauge('cluster_count', 0)) self._metrics.total_requested_cpus = self.metrics.register( MutatorGauge('total_requested_cpus', 0.)) self._metrics.total_requested_mem_mb = self.metrics.register( MutatorGauge('total_requested_mem_mb', 0.)) self._metrics.total_requested_disk_mb = self.metrics.register( MutatorGauge('total_requested_disk_mb', 0.)) self._metrics.framework_registered = self.metrics.register( MutatorGauge('framework_registered', 0)) self._startup_time = datetime.utcnow() self._metrics.uptime = self.metrics.register( LambdaGauge('uptime', lambda: (datetime.utcnow() - self._startup_time).total_seconds())) self._metrics.tasks_lost = self.metrics.register(AtomicGauge('tasks_lost', 0)) self._metrics.tasks_finished = self.metrics.register(AtomicGauge('tasks_finished', 0)) self._metrics.tasks_failed = self.metrics.register(AtomicGauge('tasks_failed', 0)) self._metrics.tasks_killed = self.metrics.register(AtomicGauge('tasks_killed', 0)) self._metrics.resource_offers = self.metrics.register(AtomicGauge('resource_offers', 0)) self._metrics.offers_incompatible_role = self.metrics.register( AtomicGauge('offers_incompatible_role', 0)) self._metrics.tasks_launched = self.metrics.register(AtomicGauge('tasks_launched', 0)) self._metrics.offers_unused = self.metrics.register(AtomicGauge('offers_unused', 0)) def create_cluster( self, cluster_name, cluster_user, num_nodes, size=None, backup_id=None, cluster_password=None): with self._lock: if not self._driver: raise self.ServiceUnavailable("Service unavailable. Try again later") if cluster_name in self._state.clusters: raise self.ClusterExists("Cluster '%s' already exists" % cluster_name) if not cluster_user: raise self.InvalidUser('Invalid user name: %s' % cluster_user) num_nodes = int(num_nodes) if num_nodes <= 0: raise ValueError("Invalid number of cluster nodes: %s" % num_nodes) resources = parse_size(size) if (resources['cpus'] <= EXECUTOR_CPUS_EPSILON or resources['mem'] <= EXECUTOR_MEM_EPSILON or resources['disk'] <= EXECUTOR_DISK_EPSILON): raise ValueError( "Instance 'size' too small. It should be larger than what Mysos executor consumes: " "(cpus, mem, disk) = (%s, %s, %s)" % ( EXECUTOR_CPUS_EPSILON, EXECUTOR_MEM_EPSILON, EXECUTOR_DISK_EPSILON)) log.info("Requested resources per instance for cluster %s: %s" % (resources, cluster_name)) self._metrics.total_requested_cpus.write( self._metrics.total_requested_cpus.read() + resources['cpus'] * num_nodes) self._metrics.total_requested_mem_mb.write( self._metrics.total_requested_mem_mb.read() + resources['mem'].as_(Data.MB) * num_nodes) self._metrics.total_requested_disk_mb.write( self._metrics.total_requested_disk_mb.read() + resources['disk'].as_(Data.MB) * num_nodes) self._state.clusters.add(cluster_name) self._state_provider.dump_scheduler_state(self._state) if not cluster_password: log.info("Generating password for cluster %s" % cluster_name) cluster_password = gen_password() cluster = MySQLCluster( cluster_name, cluster_user, self._password_box.encrypt(cluster_password), num_nodes, cpus=resources['cpus'], mem=resources['mem'], disk=resources['disk'], backup_id=backup_id) self._state_provider.dump_cluster_state(cluster) log.info("Creating launcher for cluster %s" % cluster_name) self._launchers[cluster_name] = MySQLClusterLauncher( self._driver, cluster, self._state_provider, self._discover_zk_url, self._kazoo, self._framework_user, self._executor_uri, self._executor_cmd, self._election_timeout, self._admin_keypath, self._scheduler_key, installer_args=self._installer_args, backup_store_args=self._backup_store_args, executor_environ=self._executor_environ, executor_source_prefix=self._executor_source_prefix, framework_role=self._framework_role) self._metrics.cluster_count.increment() return get_cluster_path(self._discover_zk_url, cluster_name), cluster_password def delete_cluster(self, cluster_name, password): with self._lock: if not self._driver: raise self.ServiceUnavailable("Service unavailable. Try again later") if cluster_name not in self._state.clusters: raise self.ClusterNotFound("Cluster '%s' not found" % cluster_name) launcher = self._launchers[cluster_name] launcher.kill(password) log.info("Attempted to kill cluster %s" % cluster_name) self._metrics.cluster_count.decrement() cluster_info = launcher.cluster_info self._metrics.total_requested_cpus.write( self._metrics.total_requested_cpus.read() - cluster_info.total_cpus) self._metrics.total_requested_mem_mb.write( self._metrics.total_requested_mem_mb.read() - cluster_info.total_mem_mb) self._metrics.total_requested_disk_mb.write( self._metrics.total_requested_disk_mb.read() - cluster_info.total_disk_mb) if launcher.terminated: log.info("Deleting the launcher for cluster %s directly because the cluster has already " "terminated" % launcher.cluster_name) self._delete_launcher(launcher) return get_cluster_path(self._discover_zk_url, cluster_name) @property def clusters(self): with self._lock: for launcher in self._launchers.values(): yield launcher.cluster_info def _stop(self): if self._driver: self._driver.stop(True) self.stopped.set() @logged def registered(self, driver, frameworkId, masterInfo): with self._lock: self._driver = driver self._state.framework_info.id.value = frameworkId.value self._state_provider.dump_scheduler_state(self._state) try: self._recover() except Exception as e: log.error("Stopping scheduler because: %s" % e) log.error(traceback.format_exc()) self._stop() return self._metrics.framework_registered.write(1) self.connected.set() def _recover(self): for cluster_name in OrderedSet(self._state.clusters): log.info("Recovering launcher for cluster %s" % cluster_name) cluster = self._state_provider.load_cluster_state(cluster_name) if not cluster: log.info("Skipping cluster %s because its state cannot be found" % cluster_name) self._state.clusters.remove(cluster_name) self._state_provider.dump_scheduler_state(self._state) continue for task_id in cluster.tasks: self._tasks[task_id] = cluster.name self._launchers[cluster.name] = MySQLClusterLauncher( self._driver, cluster, self._state_provider, self._discover_zk_url, self._kazoo, self._framework_user, self._executor_uri, self._executor_cmd, self._election_timeout, self._admin_keypath, self._scheduler_key, installer_args=self._installer_args, backup_store_args=self._backup_store_args, executor_environ=self._executor_environ, executor_source_prefix=self._executor_source_prefix, framework_role=self._framework_role) self._metrics.cluster_count.increment() cluster_info = self._launchers[cluster.name].cluster_info self._metrics.total_requested_cpus.write( self._metrics.total_requested_cpus.read() + cluster_info.total_cpus) self._metrics.total_requested_mem_mb.write( self._metrics.total_requested_mem_mb.read() + cluster_info.total_mem_mb) self._metrics.total_requested_disk_mb.write( self._metrics.total_requested_disk_mb.read() + cluster_info.total_disk_mb) log.info("Recovered %s clusters" % len(self._launchers)) @logged def reregistered(self, driver, masterInfo): self._driver = driver self._metrics.framework_registered.write(1) self.connected.set() @logged def disconnected(self, driver): self._metrics.framework_registered.write(0) @logged def resourceOffers(self, driver, offers): with self._lock: log.debug('Got %d resource offers' % len(offers)) self._metrics.resource_offers.add(len(offers)) for offer in shuffled(offers): task_id = None filters = None for name in self._launchers: launcher = self._launchers[name] try: task_id, _ = launcher.launch(offer) except MySQLClusterLauncher.IncompatibleRoleError as e: log.info("Declining offer %s for %s because '%s'" % ( offer.id.value, INCOMPATIBLE_ROLE_OFFER_REFUSE_DURATION, e)) filters = mesos_pb2.Filters() filters.refuse_seconds = INCOMPATIBLE_ROLE_OFFER_REFUSE_DURATION.as_(Time.SECONDS) self._metrics.offers_incompatible_role.increment() break if task_id: self._metrics.tasks_launched.increment() self._tasks[task_id] = launcher.cluster_name break if task_id: break if not filters: log.debug("Declining unused offer %s because no launcher accepted this offer: %s" % ( offer.id.value, offer)) filters = mesos_pb2.Filters() self._metrics.offers_unused.increment() self._driver.declineOffer(offer.id, filters) @logged def statusUpdate(self, driver, status): with self._lock: task_id = status.task_id.value launcher = self._get_launcher_by_task_id(task_id) if not launcher: log.info("Cluster for task %s doesn't exist. It could have been removed" % task_id) return try: launcher.status_update(status) except MySQLClusterLauncher.Error as e: log.error("Status update failed due to launcher error: %s" % e.message) self._stop() if status.state == mesos_pb2.TASK_FINISHED: self._metrics.tasks_finished.increment() elif status.state == mesos_pb2.TASK_FAILED: self._metrics.tasks_failed.increment() elif status.state == mesos_pb2.TASK_KILLED: self._metrics.tasks_killed.increment() elif status.state == mesos_pb2.TASK_LOST: self._metrics.tasks_lost.increment() if launcher.terminated: log.info("Deleting the launcher for cluster %s because the cluster has terminated" % launcher.cluster_name) self._delete_launcher(launcher) def _delete_launcher(self, launcher): assert launcher.terminated self._state.clusters.discard(launcher.cluster_name) self._state_provider.dump_scheduler_state(self._state) self._launchers[launcher.cluster_name].stop() del self._launchers[launcher.cluster_name] @logged def frameworkMessage(self, driver, executorId, slaveId, message): with self._lock: log.info('Received framework message %s' % message) task_id = executorId.value launcher = self._get_launcher_by_task_id(task_id) launcher.framework_message(task_id, slaveId.value, message) @logged def slaveLost(self, driver, slaveId): pass @logged def error(self, driver, message): log.error('Received error from mesos: %s' % message) self._stop() def _get_launcher_by_task_id(self, task_id): assert task_id in self._tasks cluster_name = self._tasks[task_id] return self._launchers.get(cluster_name) def shuffled(li): copy = li[:] random.shuffle(copy) return copy
Apache License 2.0
net-ng/kansha
kansha/authentication/database/captcha.py
Captcha._generate_image
python
def _generate_image(self): font = ImageFont.truetype(self.font_path, self.font_size) image = Image.open(self.background_path) image = image.convert("RGBA") for i, l in enumerate(self.text): txt = Image.new('RGBA', (80, 50), (0, 0, 0, 0)) d = ImageDraw.Draw(txt) d.text((0, 0), l, font=font, fill=(9, 92, 110, 255)) txt = txt.rotate(random.randint(-20, 20)) image.paste(txt, (30 + 30 * i, 5), txt) return image
Generate the captcha image
https://github.com/net-ng/kansha/blob/85b5816da126b1c7098707c98f217d8b2e524ff2/kansha/authentication/database/captcha.py#L48-L59
import pkg_resources import random import os import string from cStringIO import StringIO from PIL import Image from PIL import ImageFont from PIL import ImageDraw from nagare import presentation APP_NAME = 'kansha' _PACKAGE = pkg_resources.Requirement.parse(APP_NAME) def get_resource_filename(path): return pkg_resources.resource_filename(_PACKAGE, path) def random_text(length=5): chars = string.ascii_uppercase + string.digits return ''.join(random.choice(chars) for dummy in range(length)) class Captcha(object): def __init__(self, text='', background_name='captcha-bg.png', font_name='ashcanbb_bold.ttf', font_size=30): self.text = random_text() self.background_path = get_resource_filename( os.path.join('data', 'captcha', background_name)) self.font_path = get_resource_filename( os.path.join('data', 'captcha', font_name)) self.font_size = font_size
BSD 3-Clause New or Revised License
nlp-uoregon/trankit
trankit/adapter_transformers/trainer_tf.py
TFTrainer.predict
python
def predict(self, test_dataset: tf.data.Dataset) -> PredictionOutput: test_dataset = test_dataset.batch(self.args.eval_batch_size) test_dataset = self.args.strategy.experimental_distribute_dataset(test_dataset) return self._prediction_loop(test_dataset, description="Prediction")
Run prediction and return predictions and potential metrics. Depending on the dataset and your use case, your test dataset may contain labels. In that case, this method will also return metrics, like in evaluate(). Args: test_dataset: something similar to a PT Dataset. This is just temporary before to have a framework-agnostic approach for datasets.
https://github.com/nlp-uoregon/trankit/blob/5b56554efb9123758615a74cfa4d0f1a7d746d67/trankit/adapter_transformers/trainer_tf.py#L412-L424
import logging import math import os from typing import Callable, Dict, Optional import numpy as np import tensorflow as tf from .modeling_tf_utils import TFPreTrainedModel, shape_list from .optimization_tf import GradientAccumulator, create_optimizer from .trainer_utils import PREFIX_CHECKPOINT_DIR, EvalPrediction, PredictionOutput from .training_args_tf import TFTrainingArguments logger = logging.getLogger(__name__) class TFTrainer: model: TFPreTrainedModel args: TFTrainingArguments train_dataset: Optional[tf.data.Dataset] eval_dataset: Optional[tf.data.Dataset] compute_metrics: Optional[Callable[[EvalPrediction], Dict]] = None prediction_loss_only: bool def __init__( self, model: TFPreTrainedModel, args: TFTrainingArguments, train_dataset: Optional[tf.data.Dataset] = None, eval_dataset: Optional[tf.data.Dataset] = None, compute_metrics: Optional[Callable[[EvalPrediction], Dict]] = None, prediction_loss_only=False, ): self.model = model self.args = args self.train_dataset = train_dataset self.eval_dataset = eval_dataset self.compute_metrics = compute_metrics self.prediction_loss_only = prediction_loss_only self.gradient_accumulator = GradientAccumulator() self._setup_training() def _setup_training(self) -> None: self._prepare_dataset() with self.args.strategy.scope(): self._create_optimizer() _ = self.optimizer.iterations self._set_loss_and_metric() self._create_checkpoint_manager() self._create_summary_writer() def _set_loss_and_metric(self) -> None: try: self.loss = tf.keras.losses.get( { "class_name": self.args.loss_name, "config": {"from_logits": True, "reduction": tf.keras.losses.Reduction.NONE}, } ) except TypeError: self.loss = tf.keras.losses.get( {"class_name": self.args.loss_name, "config": {"reduction": tf.keras.losses.Reduction.NONE}} ) def _create_summary_writer(self) -> None: self.writer = tf.summary.create_file_writer(self.args.logging_dir) def _prepare_dataset(self) -> None: if self.train_dataset is not None: self.num_train_examples = self.train_dataset.reduce(tf.constant(0), lambda x, _: x + 1).numpy() if self.args.max_steps > 0: self.train_steps = self.args.max_steps else: self.train_steps: int = math.ceil(self.num_train_examples / self.args.train_batch_size) self.train_dataset = ( self.train_dataset.cache() .shuffle(self.num_train_examples) .batch(self.args.train_batch_size) .prefetch(tf.data.experimental.AUTOTUNE) ) if self.args.max_steps > 0: self.train_dataset = self.train_dataset.repeat(-1) self.train_dataset = self.args.strategy.experimental_distribute_dataset(self.train_dataset) else: self.train_steps = 0 if self.eval_dataset is not None: self.eval_dataset = ( self.eval_dataset.batch(self.args.eval_batch_size).cache().prefetch(tf.data.experimental.AUTOTUNE) ) self.eval_dataset = self.args.strategy.experimental_distribute_dataset(self.eval_dataset) def _create_optimizer(self) -> None: if self.args.optimizer_name == "adamw": self.optimizer = create_optimizer( self.args.learning_rate, self.train_steps, self.args.warmup_steps, self.args.end_lr ) else: try: self.optimizer = tf.keras.optimizers.get( { "class_name": self.args.optimizer_name, "config": {"learning_rate": self.args.learning_rate, "epsilon": self.args.adam_epsilon}, } ) except TypeError: self.optimizer = tf.keras.optimizers.get( {"class_name": self.args.optimizer_name, "config": {"learning_rate": self.args.learning_rate}} ) logger.info("Created an/a {} optimizer".format(self.args.optimizer_name)) def _create_checkpoint_manager(self, max_to_keep: int = 5, load_model: bool = True) -> None: ckpt = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) self.model.ckpt_manager = tf.train.CheckpointManager(ckpt, PREFIX_CHECKPOINT_DIR, max_to_keep=max_to_keep) if load_model: ckpt.restore(self.model.ckpt_manager.latest_checkpoint).expect_partial() @tf.function def _evaluate_steps(self, per_replica_features, per_replica_labels): per_replica_loss, per_replica_logits = self.args.strategy.experimental_run_v2( self._run_model, args=(per_replica_features, per_replica_labels, False) ) try: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, axis=0) except ValueError: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, None) return reduced_loss, per_replica_logits def _prediction_loop( self, dataset: tf.data.Dataset, description: str, prediction_loss_only: Optional[bool] = None ) -> PredictionOutput: logger.info("***** Running %s *****", description) logger.info(" Batch size = %d", self.args.eval_batch_size) label_ids: np.ndarray = None preds: np.ndarray = None step: int = 1 for features, labels in dataset: step = tf.convert_to_tensor(step, dtype=tf.int64) loss, logits = self._evaluate_steps(features, labels) loss = tf.reduce_mean(loss) if not prediction_loss_only: if self.args.n_gpu > 1: for val in logits.values: if preds is None: preds = val.numpy() else: preds = np.append(preds, val.numpy(), axis=0) for val in labels.values: if label_ids is None: label_ids = val.numpy() else: label_ids = np.append(label_ids, val.numpy(), axis=0) else: if preds is None: preds = logits.numpy() else: preds = np.append(preds, logits.numpy(), axis=0) if label_ids is None: label_ids = labels.numpy() else: label_ids = np.append(label_ids, labels.numpy(), axis=0) step += 1 if self.compute_metrics is not None and preds is not None and label_ids is not None: metrics = self.compute_metrics(EvalPrediction(predictions=preds, label_ids=label_ids)) else: metrics = {} metrics["eval_loss"] = loss.numpy() for key in list(metrics.keys()): if not key.startswith("eval_"): metrics[f"eval_{key}"] = metrics.pop(key) return PredictionOutput(predictions=preds, label_ids=label_ids, metrics=metrics) def evaluate( self, eval_dataset: Optional[tf.data.Dataset] = None, prediction_loss_only: Optional[bool] = None ) -> Dict[str, float]: if eval_dataset is None: eval_dataset = self.eval_dataset output = self._prediction_loop(eval_dataset, description="Evaluation") return output.metrics def train(self) -> None: if self.args.debug: tf.summary.trace_on(graph=True, profiler=True) self.gradient_accumulator.reset() iterations = self.optimizer.iterations if iterations.numpy() > 0: logger.info("Start the training from the last checkpoint") start_epoch = (iterations.numpy() // self.train_steps) + 1 else: start_epoch = 1 tf.summary.experimental.set_step(iterations) epochs = 1 if self.args.max_steps > 0 else self.args.num_train_epochs logger.info("***** Running training *****") logger.info(" Num examples = %d", self.num_train_examples) logger.info(" Num Epochs = %d", epochs) logger.info(" Total optimization steps = %d", self.train_steps) for epoch in range(start_epoch, int(epochs + 1)): for training_loss in self._training_steps(): step = iterations.numpy() if self.args.debug: with self.writer.as_default(): tf.summary.scalar("loss", training_loss, step=step) if step == 1 and self.args.debug: with self.writer.as_default(): tf.summary.trace_export(name="training", step=step, profiler_outdir=self.args.logging_dir) if self.args.evaluate_during_training and step % self.args.eval_steps == 0: logs = {} results = self.evaluate() for key, value in results.items(): eval_key = "eval_{}".format(key) logs[eval_key] = value if callable(self.optimizer.learning_rate): logs["learning_rate"] = self.optimizer.learning_rate(step).numpy() else: logs["learning_rate"] = self.optimizer.learning_rate.numpy() logger.info("Epoch {} Step {} Validation Metrics {}".format(epoch, step, logs)) with self.writer.as_default(): for k, v in logs.items(): tf.summary.scalar(k, v, step=step) if step % self.args.logging_steps == 0: logger.info("Epoch {} Step {} Train Loss {:.4f}".format(epoch, step, training_loss.numpy())) if step % self.args.save_steps == 0: ckpt_save_path = self.model.ckpt_manager.save() logger.info("Saving checkpoint for step {} at {}".format(step, ckpt_save_path)) if step % self.train_steps == 0: break def _training_steps(self): for i, loss in enumerate(self._accumulate_next_gradients()): if i % self.args.gradient_accumulation_steps == 0: self._apply_gradients() yield loss @tf.function def _apply_gradients(self): self.args.strategy.experimental_run_v2(self._step) def _step(self): gradient_scale = self.gradient_accumulator.step * self.args.strategy.num_replicas_in_sync gradients = [ gradient / tf.cast(gradient_scale, gradient.dtype) for gradient in self.gradient_accumulator.gradients ] gradients = [(tf.clip_by_value(grad, -self.args.max_grad_norm, self.args.max_grad_norm)) for grad in gradients] self.optimizer.apply_gradients(list(zip(gradients, self.model.trainable_variables))) self.gradient_accumulator.reset() def _accumulate_next_gradients(self): iterator = iter(self.train_dataset) @tf.function def _accumulate_next(): per_replica_features, per_replica_labels = next(iterator) return self._accumulate_gradients(per_replica_features, per_replica_labels) while True: try: yield _accumulate_next() except tf.errors.OutOfRangeError: break def _accumulate_gradients(self, per_replica_features, per_replica_labels): per_replica_loss = self.args.strategy.experimental_run_v2( self._forward, args=(per_replica_features, per_replica_labels) ) try: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, axis=0) except ValueError: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, None) return reduced_loss def _forward(self, features, labels): per_example_loss, _ = self._run_model(features, labels, True) gradients = tf.gradients(per_example_loss, self.model.trainable_variables) gradients = [ g if g is not None else tf.zeros_like(v) for g, v in zip(gradients, self.model.trainable_variables) ] self.gradient_accumulator(gradients) return per_example_loss def _run_model(self, features, labels, training): if self.args.mode == "text-classification" or self.args.mode == "token-classification": logits = self.model(features, training=training)[0] else: logits = self.model(features, training=training) if self.args.mode == "token-classification": active_loss = tf.reshape(labels, (-1,)) != -1 reduced_logits = tf.boolean_mask(tf.reshape(logits, (-1, shape_list(logits)[2])), active_loss) labels = tf.boolean_mask(tf.reshape(labels, (-1,)), active_loss) loss = self.loss(labels, reduced_logits) elif self.args.mode == "question-answering": start_loss = self.loss(labels["start_position"], logits[0]) end_loss = self.loss(labels["end_position"], logits[1]) loss = (start_loss + end_loss) / 2.0 else: loss = self.loss(labels, logits) loss += sum(self.model.losses) * (1.0 / self.args.n_gpu) return loss, logits
Apache License 2.0
titanentertainmentgroup/django-filemaker
filemaker/manager.py
Manager.get
python
def get(self, **kwargs): try: return self.filter(**kwargs)[0] except IndexError: raise self.cls.DoesNotExist('Could not find item in FileMaker')
Returns the first item found by filtering the queryset by ``**kwargs``. Will raise the ``DoesNotExist`` exception on the managers model class if no items are found, however, unlike the Django ORM, will silently return the first result if multiple results are found. :param \**kwargs: Field and value queries to be passed to :py:meth:`filter`
https://github.com/titanentertainmentgroup/django-filemaker/blob/4c6b0cdf1606d2a4de10637e9155120b1a4c2bdb/filemaker/manager.py#L433-L447
from __future__ import unicode_literals import copy import re import requests from django.http import QueryDict from django.utils import six from urlobject import URLObject from filemaker.exceptions import FileMakerConnectionError from filemaker.parser import FMXMLObject OPERATORS = { 'exact': 'eq', 'contains': 'cn', 'startswith': 'bw', 'endswith': 'ew', 'gt': 'gt', 'gte': 'gte', 'lt': 'lt', 'lte': 'lte', 'neq': 'neq', } class RawManager(object): def __init__(self, url, db, layout, response_layout=None, **kwargs): self.url = URLObject(url).without_auth() self.url = self.url.with_path( self.url.path or '/fmi/xml/fmresultset.xml') self.auth = URLObject(url).auth self.params = QueryDict('', mutable=True) self.dbparams = QueryDict('', mutable=True) self.dbparams.update({ '-db': db, '-lay': layout, }) if response_layout: self.dbparams['-lay.response'] = response_layout self.params['-max'] = '50' def __repr__(self): return '<RawManager: {0} {1} {2}>'.format( self.url, self.dbparams, self.params) def _clone(self): return copy.copy(self) def set_script(self, name, option=None): mgr = self._clone() key = '-script' if option in ('prefind', 'presort'): key = '{0}.{1}'.format(key, option) mgr.params[key] = name return mgr def set_record_id(self, recid): mgr = self._clone() mgr.params['-recid'] = recid return mgr def set_modifier_id(self, modid): mgr = self._clone() mgr.params['-modid'] = modid return mgr def set_logical_operator(self, op): mgr = self._clone() if op in ('and', 'or'): mgr.params['-lop'] = op return mgr def set_group_size(self, max): self.params['-max'] = max return self def set_skip_records(self, skip): self.params['-skip'] = skip return self def add_db_param(self, field, value, op=None): mgr = self._clone() mgr.params.appendlist(field, value) if op: mgr.params.appendlist('{0}.op'.format(field), op) return mgr def add_sort_param(self, field, order='ascend', priority=0): mgr = self._clone() mgr.params['-sortfield.{0}'.format(priority)] = field mgr.params['-sortorder.{0}'.format(priority)] = order return mgr def find(self, **kwargs): self.params.update(kwargs) return self._commit('find') def find_all(self, **kwargs): self.params.update(kwargs) return self._commit('findall') def edit(self, **kwargs): self.params.update(kwargs) return self._commit('edit') def new(self, **kwargs): self.params.update(kwargs) return self._commit('new') def delete(self, **kwargs): self.params.update(kwargs) return self._commit('delete') def _commit(self, action): if 'RECORDID' in self.params and not '-recid' in self.params: self.params['-recid'] = self.params['RECORDID'] del self.params['RECORDID'] if 'MODID' in self.params and not '-modid' in self.params: self.params['-modid'] = self.params['MODID'] del self.params['MODID'] data = '&'.join([ self.dbparams.urlencode(), self.params.urlencode(), '-{0}'.format(action), ]) try: resp = requests.post(self.url, auth=self.auth, data=data) resp.raise_for_status() except requests.exceptions.RequestException as e: raise FileMakerConnectionError(e) return FMXMLObject(resp.content) class Manager(RawManager): def __init__(self, cls): self.cls = cls super(Manager, self).__init__(**self.cls._meta.get('connection')) self._result_cache = None self._fm_data = None def __iter__(self): return self.iterator() def iterator(self): if not self._result_cache: self._result_cache = self.preprocess_resultset(self._get_fm_data().resultset) for result in self._result_cache: yield self.cls(result) def __len__(self): return len(self._get_fm_data().resultset) def __getitem__(self, k): mgr = self if not isinstance(k, (slice,) + six.integer_types): raise TypeError assert ((not isinstance(k, slice) and (k >= 0)) or (isinstance(k, slice) and (k.start is None or k.start >= 0) and (k.stop is None or k.stop >= 0))), 'Negative indexing is not supported.' if isinstance(k, slice): if k.start: mgr = mgr.set_skip_records(k.start) if k.stop: mgr = mgr.set_group_size(k.stop - (k.start or 0)) return list(mgr)[k] def __repr__(self): return '<{0} query with {1} records...>'.format( self.cls.__name__, len(self)) def _get_fm_data(self): if self._fm_data is None: self._fm_data = self.find() return self._fm_data def _clone(self): mgr = super(Manager, self)._clone() mgr._result_cache = None mgr._fm_data = None return mgr def _resolve_fm_field(self, field): from filemaker.fields import ModelField parts = field.split('__') fm_attr_path = [] klass = self.cls resolved_field = None for part in parts: try: klass = resolved_field.model if resolved_field else self.cls except AttributeError: raise ValueError('Cound not resolve field: {0}'.format(field)) resolved_field = klass._fields.get(part) if resolved_field is None: raise ValueError('Cound not resolve field: {0}'.format(field)) path = resolved_field.fm_attr.replace('.', '::') if not path == '+self' and not isinstance( resolved_field, ModelField): fm_attr_path.append(path) return '::'.join(fm_attr_path) def preprocess_resultset(self, resultset): return resultset def all(self): return self._clone() def filter(self, **kwargs): mgr = self for k, v in kwargs.items(): operator = 'eq' for op, code in OPERATORS.items(): if k.endswith('__{0}'.format(op)): k = re.sub(r'__{0}$'.format(op), '', k) operator = code break try: mgr = mgr.add_db_param( self._resolve_fm_field(k), v, op=operator) except (KeyError, ValueError): raise ValueError('Invalid filter argument: {0}'.format(k)) return mgr
BSD 2-Clause Simplified License
alleninstitute/bmtk
bmtk/simulator/core/graph.py
SimGraph.add_edges
python
def add_edges(self, sonata_file, populations=None, source_pop=None, target_pop=None): edges = sonata_file.edges selected_populations = edges.population_names if populations is None else populations for pop_name in selected_populations: if pop_name not in edges: continue edge_pop = edges[pop_name] self._preprocess_edge_types(edge_pop) src_pop = source_pop if source_pop is not None else edge_pop.source_population is_internal_src = src_pop in self._internal_populations_map.keys() is_external_src = src_pop in self._virtual_populations_map.keys() trg_pop = target_pop if target_pop is not None else edge_pop.target_population is_internal_trg = trg_pop in self._internal_populations_map.keys() if not is_internal_trg: self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' + '{} edges cannot create connections.').format(trg_pop, pop_name)) if not (is_internal_src or is_external_src): self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop, pop_name)) if is_internal_src: if trg_pop not in self._recurrent_edges: self._recurrent_edges[trg_pop] = [] self._recurrent_edges[trg_pop].append(edge_pop) if is_external_src: if trg_pop not in self._external_edges: self._external_edges[(src_pop, trg_pop)] = [] self._external_edges[(src_pop, trg_pop)].append(edge_pop)
:param sonata_file: :param populations: :param source_pop: :param target_pop: :return:
https://github.com/alleninstitute/bmtk/blob/ddebb3504808ceee5fdc41f5e3225716683acdfc/bmtk/simulator/core/graph.py#L257-L299
import os import json import ast import numpy as np from bmtk.simulator.core.simulation_config import SimulationConfig as ConfigDict from bmtk.utils import sonata from bmtk.simulator.core.io_tools import io from bmtk.simulator.core.node_sets import NodeSet, NodeSetAll class SimGraph(object): model_type_col = 'model_type' def __init__(self): self._components = {} self._io = io self._node_populations = {} self._internal_populations_map = {} self._virtual_populations_map = {} self._virtual_cells_nid = {} self._recurrent_edges = {} self._external_edges = {} self._node_sets = {} self._using_gids = False @property def io(self): return self._io @property def node_populations(self): return list(self._node_populations.keys()) def get_node_set(self, node_set): if node_set in self._node_sets.keys(): return self._node_sets[node_set] elif isinstance(node_set, (dict, list)): return NodeSet(node_set, self) else: self.io.log_exception('Unable to load or find node_set "{}"'.format(node_set)) def get_node_populations(self): return self._node_populations.values() def get_node_population(self, population_name): return self._node_populations[population_name] def get_component(self, key): return self._components[key] def add_component(self, key, value): self._components[key] = value def _validate_components(self): return True def __avail_model_types(self, population): model_types = set() for grp in population.groups: if self.model_type_col not in grp.all_columns: self.io.log_exception('model_type is missing from nodes.') model_types.update(set(np.unique(grp.get_values(self.model_type_col)))) return model_types def _preprocess_node_types(self, node_population): node_type_ids = node_population.type_ids node_types_table = node_population.types_table morph_dir = self.get_component('morphologies_dir') if morph_dir is not None and 'morphology' in node_types_table.columns: for nt_id in node_type_ids: node_type = node_types_table[nt_id] if node_type['morphology'] is None: continue node_type['morphology'] = os.path.join(morph_dir, node_type['morphology']) if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns: for nt_id in node_type_ids: node_type = node_types_table[nt_id] dynamics_params = node_type['dynamics_params'] if isinstance(dynamics_params, dict): continue model_type = node_type['model_type'] if model_type == 'biophysical': params_dir = self.get_component('biophysical_neuron_models_dir') elif model_type == 'point_process': params_dir = self.get_component('point_neuron_models_dir') elif model_type == 'point_soma': params_dir = self.get_component('point_neuron_models_dir') else: params_dir = self.get_component('custom_neuron_models') params_path = os.path.join(params_dir, dynamics_params) try: params_val = json.load(open(params_path, 'r')) node_type['dynamics_params'] = params_val except Exception: self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path)) def _preprocess_edge_types(self, edge_pop): edge_types_table = edge_pop.types_table edge_type_ids = np.unique(edge_pop.type_ids) for et_id in edge_type_ids: edge_type = edge_types_table[et_id] if 'dynamics_params' in edge_types_table.columns: dynamics_params = edge_type['dynamics_params'] params_dir = self.get_component('synaptic_models_dir') params_path = os.path.join(params_dir, dynamics_params) try: params_val = json.load(open(params_path, 'r')) edge_type['dynamics_params'] = params_val except Exception: self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path)) if 'target_sections' in edge_type: trg_sec = edge_type['target_sections'] if trg_sec is not None: try: edge_type['target_sections'] = ast.literal_eval(trg_sec) except Exception as exc: self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec)) edge_type['target_sections'] = None if 'distance_range' in edge_type: dist_range = edge_type['distance_range'] if dist_range is not None: try: edge_type['distance_range'] = json.loads(dist_range) except Exception as e: try: edge_type['distance_range'] = [0.0, float(dist_range)] except Exception as e: self.io.log_warning('Unable to parse distance_range {}'.format(dist_range)) edge_type['distance_range'] = None def external_edge_populations(self, src_pop, trg_pop): return self._external_edges.get((src_pop, trg_pop), []) def add_nodes(self, sonata_file, populations=None): nodes = sonata_file.nodes selected_populations = nodes.population_names if populations is None else populations for pop_name in selected_populations: if pop_name not in nodes: print('HERE') continue if pop_name in self.node_populations: self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name)) node_pop = nodes[pop_name] self._preprocess_node_types(node_pop) self._node_populations[pop_name] = node_pop model_types = self.__avail_model_types(node_pop) if 'virtual' in model_types: self._virtual_populations_map[pop_name] = node_pop self._virtual_cells_nid[pop_name] = {} model_types -= set(['virtual']) if model_types: self.io.log_warning('Node population {} contains both virtual and non-virtual nodes which can '.format(pop_name) + 'cause memory and build-time inefficency. Consider separating virtual nodes ' + 'into their own population') if model_types: self._internal_populations_map[pop_name] = node_pop self._node_sets[pop_name] = NodeSet({'population': pop_name}, self) def build_nodes(self): raise NotImplementedError def build_recurrent_edges(self): raise NotImplementedError
BSD 3-Clause New or Revised License
mobleylab/alchemical-analysis
alchemical_analysis/utils/corruptxvg.py
findDataStart
python
def findDataStart(lines, delin = ' '): for i, l in enumerate(lines): test = l.split(delin)[0] try: float(test) return i, lines[0:i], lines[i:] except: continue return -1, lines, []
Finds the line where the data starts input: lines = list of strings (probably from a data file) delin = optional string, tells how the data would be separated, default is a space (' ') output: i = integer where the data stops being a string (returns -1 if no data was found) header = lines that make up the strings at the top of the file datalines = lines starting from where the data ends
https://github.com/mobleylab/alchemical-analysis/blob/507e157b1a9658aa21c7bee100695ce66f2ad71d/alchemical_analysis/utils/corruptxvg.py#L10-L32
import os import numpy as np import commands as c from shutil import copy2
MIT License
nils-braun/dask-sql
dask_sql/context.py
Context._get_tables_from_stack
python
def _get_tables_from_stack(self): stack = inspect.stack() tables = {} for frame_info in stack: for var_name, variable in frame_info.frame.f_locals.items(): if var_name.startswith("_"): continue if not isinstance(variable, (pd.DataFrame, dd.DataFrame)): continue tables[var_name] = tables.get(var_name, variable) return tables
Helper function to return all dask/pandas dataframes from the calling stack
https://github.com/nils-braun/dask-sql/blob/0c3a6a164f29fd3781daa55018d30b8c11fd7f8a/dask_sql/context.py#L783-L800
import asyncio import inspect import logging import warnings from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple, Union import dask.dataframe as dd import pandas as pd from dask.base import optimize from dask.distributed import Client from dask_sql import input_utils from dask_sql.datacontainer import ( UDF, DataContainer, FunctionDescription, SchemaContainer, ) from dask_sql.input_utils import InputType, InputUtil from dask_sql.integrations.ipython import ipython_integration from dask_sql.java import ( DaskAggregateFunction, DaskScalarFunction, DaskSchema, DaskTable, RelationalAlgebraGenerator, RelationalAlgebraGeneratorBuilder, SqlParseException, ValidationException, get_java_class, ) from dask_sql.mappings import python_to_sql_type from dask_sql.physical.rel import RelConverter, custom, logical from dask_sql.physical.rex import RexConverter, core from dask_sql.utils import ParsingException if TYPE_CHECKING: from dask_sql.java import org logger = logging.getLogger(__name__) class Context: DEFAULT_SCHEMA_NAME = "root" def __init__(self): self.schema_name = self.DEFAULT_SCHEMA_NAME self.schema = {self.schema_name: SchemaContainer(self.schema_name)} self.sql_server = None RelConverter.add_plugin_class(logical.LogicalAggregatePlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalFilterPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalJoinPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalProjectPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalSortPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalTableScanPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalUnionPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalValuesPlugin, replace=False) RelConverter.add_plugin_class(logical.LogicalWindowPlugin, replace=False) RelConverter.add_plugin_class(logical.SamplePlugin, replace=False) RelConverter.add_plugin_class(custom.AnalyzeTablePlugin, replace=False) RelConverter.add_plugin_class(custom.CreateExperimentPlugin, replace=False) RelConverter.add_plugin_class(custom.CreateModelPlugin, replace=False) RelConverter.add_plugin_class(custom.CreateSchemaPlugin, replace=False) RelConverter.add_plugin_class(custom.CreateTableAsPlugin, replace=False) RelConverter.add_plugin_class(custom.CreateTablePlugin, replace=False) RelConverter.add_plugin_class(custom.DropModelPlugin, replace=False) RelConverter.add_plugin_class(custom.DropSchemaPlugin, replace=False) RelConverter.add_plugin_class(custom.DropTablePlugin, replace=False) RelConverter.add_plugin_class(custom.ExportModelPlugin, replace=False) RelConverter.add_plugin_class(custom.PredictModelPlugin, replace=False) RelConverter.add_plugin_class(custom.ShowColumnsPlugin, replace=False) RelConverter.add_plugin_class(custom.ShowModelParamsPlugin, replace=False) RelConverter.add_plugin_class(custom.ShowModelsPlugin, replace=False) RelConverter.add_plugin_class(custom.ShowSchemasPlugin, replace=False) RelConverter.add_plugin_class(custom.ShowTablesPlugin, replace=False) RelConverter.add_plugin_class(custom.SwitchSchemaPlugin, replace=False) RexConverter.add_plugin_class(core.RexCallPlugin, replace=False) RexConverter.add_plugin_class(core.RexInputRefPlugin, replace=False) RexConverter.add_plugin_class(core.RexLiteralPlugin, replace=False) InputUtil.add_plugin_class(input_utils.DaskInputPlugin, replace=False) InputUtil.add_plugin_class(input_utils.PandasLikeInputPlugin, replace=False) InputUtil.add_plugin_class(input_utils.HiveInputPlugin, replace=False) InputUtil.add_plugin_class(input_utils.IntakeCatalogInputPlugin, replace=False) InputUtil.add_plugin_class(input_utils.SqlalchemyHiveInputPlugin, replace=False) InputUtil.add_plugin_class(input_utils.LocationInputPlugin, replace=False) def create_table( self, table_name: str, input_table: InputType, format: str = None, persist: bool = False, schema_name: str = None, gpu: bool = False, **kwargs, ): if "file_format" in kwargs: warnings.warn("file_format is renamed to format", DeprecationWarning) format = kwargs.pop("file_format") schema_name = schema_name or self.schema_name dc = InputUtil.to_dc( input_table, table_name=table_name, format=format, persist=persist, gpu=gpu, **kwargs, ) self.schema[schema_name].tables[table_name.lower()] = dc def register_dask_table(self, df: dd.DataFrame, name: str, *args, **kwargs): warnings.warn( "register_dask_table is deprecated, use the more general create_table instead.", DeprecationWarning, ) return self.create_table(name, df, *args, **kwargs) def drop_table(self, table_name: str, schema_name: str = None): schema_name = schema_name or self.schema_name del self.schema[schema_name].tables[table_name] def drop_schema(self, schema_name: str): if schema_name == self.DEFAULT_SCHEMA_NAME: raise RuntimeError(f"Default Schema `{schema_name}` cannot be deleted") del self.schema[schema_name] if self.schema_name == schema_name: self.schema_name = self.DEFAULT_SCHEMA_NAME def register_function( self, f: Callable, name: str, parameters: List[Tuple[str, type]], return_type: type, replace: bool = False, schema_name: str = None, row_udf: bool = False, ): self._register_callable( f, name, aggregation=False, parameters=parameters, return_type=return_type, replace=replace, schema_name=schema_name, row_udf=row_udf, ) def register_aggregation( self, f: dd.Aggregation, name: str, parameters: List[Tuple[str, type]], return_type: type, replace: bool = False, schema_name: str = None, ): self._register_callable( f, name, aggregation=True, parameters=parameters, return_type=return_type, replace=replace, schema_name=schema_name, ) def sql( self, sql: str, return_futures: bool = True, dataframes: Dict[str, Union[dd.DataFrame, pd.DataFrame]] = None, ) -> Union[dd.DataFrame, pd.DataFrame]: if dataframes is not None: for df_name, df in dataframes.items(): self.create_table(df_name, df) rel, select_names, _ = self._get_ral(sql) dc = RelConverter.convert(rel, context=self) if dc is None: return if select_names: cc = dc.column_container cc = cc.rename( { df_col: select_name for df_col, select_name in zip(cc.columns, select_names) } ) dc = DataContainer(dc.df, cc) df = dc.assign() if not return_futures: df = df.compute() return df def explain( self, sql: str, dataframes: Dict[str, Union[dd.DataFrame, pd.DataFrame]] = None ) -> str: if dataframes is not None: for df_name, df in dataframes.items(): self.create_table(df_name, df) _, _, rel_string = self._get_ral(sql) return rel_string def visualize(self, sql: str, filename="mydask.png") -> None: result = self.sql(sql, return_futures=True) (result,) = optimize(result) result.visualize(filename) def create_schema(self, schema_name: str): self.schema[schema_name] = SchemaContainer(schema_name) def register_experiment( self, experiment_name: str, experiment_results: pd.DataFrame, schema_name: str = None, ): schema_name = schema_name or self.schema_name self.schema[schema_name].experiments[ experiment_name.lower() ] = experiment_results def register_model( self, model_name: str, model: Any, training_columns: List[str], schema_name: str = None, ): schema_name = schema_name or self.schema_name self.schema[schema_name].models[model_name.lower()] = (model, training_columns) def ipython_magic(self, auto_include=False): ipython_integration(self, auto_include=auto_include) def run_server( self, client: Client = None, host: str = "0.0.0.0", port: int = 8080, log_level=None, blocking: bool = True, ): from dask_sql.server.app import run_server self.stop_server() self.server = run_server( context=self, client=client, host=host, port=port, log_level=log_level, blocking=blocking, ) def stop_server(self): if self.sql_server is not None: loop = asyncio.get_event_loop() assert loop loop.create_task(self.sql_server.shutdown()) self.sql_server = None def fqn( self, identifier: "org.apache.calcite.sql.SqlIdentifier" ) -> Tuple[str, str]: components = [str(n) for n in identifier.names] if len(components) == 2: schema = components[0] name = components[1] elif len(components) == 1: schema = self.schema_name name = components[0] else: raise AttributeError( f"Do not understand the identifier {identifier} (too many components)" ) return schema, name def _prepare_schemas(self): schema_list = [] for schema_name, schema in self.schema.items(): java_schema = DaskSchema(schema_name) if not schema.tables: logger.warning("No tables are registered.") for name, dc in schema.tables.items(): table = DaskTable(name) df = dc.df logger.debug( f"Adding table '{name}' to schema with columns: {list(df.columns)}" ) for column in df.columns: data_type = df[column].dtype sql_data_type = python_to_sql_type(data_type) table.addColumn(column, sql_data_type) java_schema.addTable(table) if not schema.functions: logger.debug("No custom functions defined.") for function_description in schema.function_lists: name = function_description.name sql_return_type = python_to_sql_type(function_description.return_type) if function_description.aggregation: logger.debug(f"Adding function '{name}' to schema as aggregation.") dask_function = DaskAggregateFunction(name, sql_return_type) else: logger.debug( f"Adding function '{name}' to schema as scalar function." ) dask_function = DaskScalarFunction(name, sql_return_type) dask_function = self._add_parameters_from_description( function_description, dask_function ) java_schema.addFunction(dask_function) schema_list.append(java_schema) return schema_list @staticmethod def _add_parameters_from_description(function_description, dask_function): for parameter in function_description.parameters: param_name, param_type = parameter sql_param_type = python_to_sql_type(param_type) dask_function.addParameter(param_name, sql_param_type, False) return dask_function def _get_ral(self, sql): schemas = self._prepare_schemas() generator_builder = RelationalAlgebraGeneratorBuilder(self.schema_name) for schema in schemas: generator_builder.addSchema(schema) generator = generator_builder.build() default_dialect = generator.getDialect() logger.debug(f"Using dialect: {get_java_class(default_dialect)}") try: sqlNode = generator.getSqlNode(sql) sqlNodeClass = get_java_class(sqlNode) select_names = None rel = sqlNode rel_string = "" if not sqlNodeClass.startswith("com.dask.sql.parser."): validatedSqlNode = generator.getValidatedNode(sqlNode) nonOptimizedRelNode = generator.getRelationalAlgebra(validatedSqlNode) select_names = [ str(name) for name in nonOptimizedRelNode.getRowType().getFieldNames() ] rel = generator.getOptimizedRelationalAlgebra(nonOptimizedRelNode) rel_string = str(generator.getRelationalAlgebraString(rel)) except (ValidationException, SqlParseException) as e: logger.debug(f"Original exception raised by Java:\n {e}") raise ParsingException(sql, str(e.message())) from None if sqlNodeClass == "org.apache.calcite.sql.SqlOrderBy": sqlNode = sqlNode.query sqlNodeClass = get_java_class(sqlNode) if sqlNodeClass == "org.apache.calcite.sql.SqlSelect": select_names = [ self._to_sql_string(s, default_dialect=default_dialect) if current_name.startswith("EXPR$") else current_name for s, current_name in zip(sqlNode.getSelectList(), select_names) ] else: logger.debug( "Not extracting output column names as the SQL is not a SELECT call" ) logger.debug(f"Extracted relational algebra:\n {rel_string}") return rel, select_names, rel_string def _to_sql_string(self, s: "org.apache.calcite.sql.SqlNode", default_dialect=None): if default_dialect is None: default_dialect = RelationalAlgebraGenerator.getDialect() try: return str(s.toSqlString(default_dialect)) except Exception: return str(s)
MIT License
juju/charm-helpers
charmhelpers/contrib/network/ip.py
is_bridge_member
python
def is_bridge_member(nic): for bridge in get_bridges(): if nic in get_bridge_nics(bridge): return True return False
Check if a given nic is a member of a bridge.
https://github.com/juju/charm-helpers/blob/25b740578385d15b38f11bed8e4b6e732bdfb7c6/charmhelpers/contrib/network/ip.py#L440-L446
import glob import re import subprocess import six import socket from functools import partial from charmhelpers.fetch import apt_install, apt_update from charmhelpers.core.hookenv import ( config, log, network_get_primary_address, unit_get, WARNING, NoNetworkBinding, ) from charmhelpers.core.host import ( lsb_release, CompareHostReleases, ) try: import netifaces except ImportError: apt_update(fatal=True) if six.PY2: apt_install('python-netifaces', fatal=True) else: apt_install('python3-netifaces', fatal=True) import netifaces try: import netaddr except ImportError: apt_update(fatal=True) if six.PY2: apt_install('python-netaddr', fatal=True) else: apt_install('python3-netaddr', fatal=True) import netaddr def _validate_cidr(network): try: netaddr.IPNetwork(network) except (netaddr.core.AddrFormatError, ValueError): raise ValueError("Network (%s) is not in CIDR presentation format" % network) def no_ip_found_error_out(network): errmsg = ("No IP address found in network(s): %s" % network) raise ValueError(errmsg) def _get_ipv6_network_from_address(address): if address['addr'].startswith('fe80') or address['addr'] == "::1": return None prefix = address['netmask'].split("/") if len(prefix) > 1: netmask = prefix[1] else: netmask = address['netmask'] return netaddr.IPNetwork("%s/%s" % (address['addr'], netmask)) def get_address_in_network(network, fallback=None, fatal=False): if network is None: if fallback is not None: return fallback if fatal: no_ip_found_error_out(network) else: return None networks = network.split() or [network] for network in networks: _validate_cidr(network) network = netaddr.IPNetwork(network) for iface in netifaces.interfaces(): try: addresses = netifaces.ifaddresses(iface) except ValueError: continue if network.version == 4 and netifaces.AF_INET in addresses: for addr in addresses[netifaces.AF_INET]: cidr = netaddr.IPNetwork("%s/%s" % (addr['addr'], addr['netmask'])) if cidr in network: return str(cidr.ip) if network.version == 6 and netifaces.AF_INET6 in addresses: for addr in addresses[netifaces.AF_INET6]: cidr = _get_ipv6_network_from_address(addr) if cidr and cidr in network: return str(cidr.ip) if fallback is not None: return fallback if fatal: no_ip_found_error_out(network) return None def is_ipv6(address): try: address = netaddr.IPAddress(address) except netaddr.AddrFormatError: return False return address.version == 6 def is_address_in_network(network, address): try: network = netaddr.IPNetwork(network) except (netaddr.core.AddrFormatError, ValueError): raise ValueError("Network (%s) is not in CIDR presentation format" % network) try: address = netaddr.IPAddress(address) except (netaddr.core.AddrFormatError, ValueError): raise ValueError("Address (%s) is not in correct presentation format" % address) if address in network: return True else: return False def _get_for_address(address, key): address = netaddr.IPAddress(address) for iface in netifaces.interfaces(): addresses = netifaces.ifaddresses(iface) if address.version == 4 and netifaces.AF_INET in addresses: addr = addresses[netifaces.AF_INET][0]['addr'] netmask = addresses[netifaces.AF_INET][0]['netmask'] network = netaddr.IPNetwork("%s/%s" % (addr, netmask)) cidr = network.cidr if address in cidr: if key == 'iface': return iface else: return addresses[netifaces.AF_INET][0][key] if address.version == 6 and netifaces.AF_INET6 in addresses: for addr in addresses[netifaces.AF_INET6]: network = _get_ipv6_network_from_address(addr) if not network: continue cidr = network.cidr if address in cidr: if key == 'iface': return iface elif key == 'netmask' and cidr: return str(cidr).split('/')[1] else: return addr[key] return None get_iface_for_address = partial(_get_for_address, key='iface') get_netmask_for_address = partial(_get_for_address, key='netmask') def resolve_network_cidr(ip_address): netmask = get_netmask_for_address(ip_address) return str(netaddr.IPNetwork("%s/%s" % (ip_address, netmask)).cidr) def format_ipv6_addr(address): if is_ipv6(address): return "[%s]" % address return None def is_ipv6_disabled(): try: result = subprocess.check_output( ['sysctl', 'net.ipv6.conf.all.disable_ipv6'], stderr=subprocess.STDOUT, universal_newlines=True) except subprocess.CalledProcessError: return True return "net.ipv6.conf.all.disable_ipv6 = 1" in result def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False, fatal=True, exc_list=None): if '/' in iface: iface = iface.split('/')[-1] if not exc_list: exc_list = [] try: inet_num = getattr(netifaces, inet_type) except AttributeError: raise Exception("Unknown inet type '%s'" % str(inet_type)) interfaces = netifaces.interfaces() if inc_aliases: ifaces = [] for _iface in interfaces: if iface == _iface or _iface.split(':')[0] == iface: ifaces.append(_iface) if fatal and not ifaces: raise Exception("Invalid interface '%s'" % iface) ifaces.sort() else: if iface not in interfaces: if fatal: raise Exception("Interface '%s' not found " % (iface)) else: return [] else: ifaces = [iface] addresses = [] for netiface in ifaces: net_info = netifaces.ifaddresses(netiface) if inet_num in net_info: for entry in net_info[inet_num]: if 'addr' in entry and entry['addr'] not in exc_list: addresses.append(entry['addr']) if fatal and not addresses: raise Exception("Interface '%s' doesn't have any %s addresses." % (iface, inet_type)) return sorted(addresses) get_ipv4_addr = partial(get_iface_addr, inet_type='AF_INET') def get_iface_from_addr(addr): for iface in netifaces.interfaces(): addresses = netifaces.ifaddresses(iface) for inet_type in addresses: for _addr in addresses[inet_type]: _addr = _addr['addr'] ll_key = re.compile("(.+)%.*") raw = re.match(ll_key, _addr) if raw: _addr = raw.group(1) if _addr == addr: log("Address '%s' is configured on iface '%s'" % (addr, iface)) return iface msg = "Unable to infer net iface on which '%s' is configured" % (addr) raise Exception(msg) def sniff_iface(f): def iface_sniffer(*args, **kwargs): if not kwargs.get('iface', None): kwargs['iface'] = get_iface_from_addr(unit_get('private-address')) return f(*args, **kwargs) return iface_sniffer @sniff_iface def get_ipv6_addr(iface=None, inc_aliases=False, fatal=True, exc_list=None, dynamic_only=True): addresses = get_iface_addr(iface=iface, inet_type='AF_INET6', inc_aliases=inc_aliases, fatal=fatal, exc_list=exc_list) if addresses: global_addrs = [] for addr in addresses: key_scope_link_local = re.compile("^fe80::..(.+)%(.+)") m = re.match(key_scope_link_local, addr) if m: eui_64_mac = m.group(1) iface = m.group(2) else: global_addrs.append(addr) if global_addrs: cmd = ['ip', 'addr', 'show', iface] out = subprocess.check_output( cmd).decode('UTF-8', errors='replace') if dynamic_only: key = re.compile("inet6 (.+)/[0-9]+ scope global.* dynamic.*") else: key = re.compile("inet6 (.+)/[0-9]+ scope global.*") addrs = [] for line in out.split('\n'): line = line.strip() m = re.match(key, line) if m and 'temporary' not in line: for addr in global_addrs: if m.group(1) == addr: if not dynamic_only or m.group(1).endswith(eui_64_mac): addrs.append(addr) if addrs: return addrs if fatal: raise Exception("Interface '%s' does not have a scope global " "non-temporary ipv6 address." % iface) return [] def get_bridges(vnic_dir='/sys/devices/virtual/net'): b_regex = "%s/*/bridge" % vnic_dir return [x.replace(vnic_dir, '').split('/')[1] for x in glob.glob(b_regex)] def get_bridge_nics(bridge, vnic_dir='/sys/devices/virtual/net'): brif_regex = "%s/%s/brif/*" % (vnic_dir, bridge) return [x.split('/')[-1] for x in glob.glob(brif_regex)]
Apache License 2.0
luci/luci-py
appengine/components/components/auth/api.py
AuthDB.is_in_ip_whitelist
python
def is_in_ip_whitelist(self, whitelist_name, ip, warn_if_missing=True): subnets = self._ip_whitelists.get(whitelist_name) if not subnets: if warn_if_missing: logging.error('Unknown IP whitelist: %s', whitelist_name) return False return any( ipaddr.is_in_subnet(ip, ipaddr.subnet_from_string(net)) for net in subnets)
Returns True if the given IP belongs to the given IP whitelist. Missing IP whitelists are considered empty. Args: whitelist_name: name of the IP whitelist (e.g. 'bots'). ip: instance of ipaddr.IP. warn_if_missing: if True and IP whitelist is missing, logs a warning.
https://github.com/luci/luci-py/blob/0417a3f6d73d0bcb92626dafe277ef79214c9087/appengine/components/components/auth/api.py#L777-L796
import collections import functools import json import logging import os import re import threading import time from six.moves import urllib from google.appengine.api import app_identity from google.appengine.api import oauth from google.appengine.api import urlfetch from google.appengine.ext import ndb from google.appengine.runtime import apiproxy_errors from components.datastore_utils import config as ds_config from components import utils from . import config from . import ipaddr from . import model from . import realms from . import replication from .proto import delegation_pb2 from .proto import realms_pb2 from .proto import security_config_pb2 __all__ = [ 'AuthDetails', 'AuthenticationError', 'AuthorizationError', 'Error', 'GroupListing', 'Permission', 'SecretKey', 'autologin', 'disable_process_cache', 'get_auth_details', 'get_current_identity', 'get_delegation_token', 'get_peer_identity', 'get_peer_ip', 'get_process_cache_expiration_sec', 'get_realm_data', 'get_request_auth_db', 'get_secret', 'get_web_client_id', 'has_permission', 'has_permission_dryrun', 'is_admin', 'is_group_member', 'is_in_ip_whitelist', 'is_internal_domain', 'is_superuser', 'legacy_realm', 'list_group', 'new_auth_details', 'public', 'require', 'root_realm', 'should_enforce_realm_acl', 'validate_realm_name', 'verify_ip_whitelisted', 'warmup', ] _additional_client_ids_cb = None _process_cache_expiration_sec = 30 _lazy_bootstrap_ran = False _auth_db_lock = threading.Lock() _auth_db = None _auth_db_expiration = None _auth_db_fetching_thread = None _auth_db_fetch_lock = threading.Lock() _thread_local = threading.local() TOKEN_INFO_ENDPOINT = 'https://www.googleapis.com/oauth2/v1/tokeninfo' API_EXPLORER_CLIENT_ID = '292824132082.apps.googleusercontent.com' class Error(Exception): def __init__(self, message=None): super(Error, self).__init__(message or self.__doc__) class AuthenticationError(Error): class AuthorizationError(Error): class RealmsError(Error): SecretKey = collections.namedtuple('SecretKey', ['name']) CachedGroup = collections.namedtuple('CachedGroup', [ 'members', 'globs', 'nested', 'description', 'owners', 'created_ts', 'created_by', 'modified_ts', 'modified_by', ]) GroupListing = collections.namedtuple('GroupListing', [ 'members', 'globs', 'nested', ]) OAuthConfig = collections.namedtuple('OAuthConfig', [ 'oauth_client_id', 'oauth_client_secret', 'oauth_additional_client_ids', ]) CachedRealm = collections.namedtuple('CachedRealm', [ 'per_permission_sets', 'data', ]) PrincipalsSet = collections.namedtuple('PrincipalsSet', [ 'groups', 'idents', ]) class AuthDB(object): @staticmethod def empty(): return AuthDB( from_what='empty', replication_state=model.AuthReplicationState(), oauth_config=OAuthConfig('', '', []), token_server_url='', groups={}, ip_whitelist_assignments={}, ip_whitelists={}, realms_pb=realms_pb2.Realms(api_version=realms.API_VERSION), security_config_blob=None, additional_client_ids=[]) @staticmethod def from_entities( replication_state, global_config, groups, ip_whitelist_assignments, ip_whitelists, additional_client_ids ): cached_groups = {} for entity in (groups or []): cached_groups[entity.key.string_id()] = CachedGroup( members=frozenset(m.to_bytes() for m in entity.members), globs=tuple(entity.globs or ()), nested=tuple(entity.nested or ()), description=entity.description, owners=entity.owners, created_ts=entity.created_ts, created_by=entity.created_by, modified_ts=entity.modified_ts, modified_by=entity.modified_by) return AuthDB( from_what='from_entities', replication_state=replication_state, oauth_config=OAuthConfig( global_config.oauth_client_id, global_config.oauth_client_secret, global_config.oauth_additional_client_ids), token_server_url=global_config.token_server_url, groups=cached_groups, ip_whitelist_assignments={ e.identity: e.ip_whitelist for e in ip_whitelist_assignments.assignments }, ip_whitelists={e.key.id(): list(e.subnets) for e in ip_whitelists}, realms_pb=None, security_config_blob=global_config.security_config, additional_client_ids=additional_client_ids) @staticmethod def from_proto(replication_state, auth_db, additional_client_ids): cached_groups = {} for gr in auth_db.groups: cached_groups[gr.name] = CachedGroup( members=frozenset(gr.members), globs=tuple(model.IdentityGlob.from_bytes(x) for x in gr.globs), nested=tuple(gr.nested), description=gr.description, owners=gr.owners or model.ADMIN_GROUP, created_ts=utils.timestamp_to_datetime(gr.created_ts), created_by=model.Identity.from_bytes(gr.created_by), modified_ts=utils.timestamp_to_datetime(gr.modified_ts), modified_by=model.Identity.from_bytes(gr.modified_by)) return AuthDB( from_what='from_proto', replication_state=replication_state, oauth_config=OAuthConfig( oauth_client_id=auth_db.oauth_client_id, oauth_client_secret=auth_db.oauth_client_secret, oauth_additional_client_ids=list( auth_db.oauth_additional_client_ids)), token_server_url=auth_db.token_server_url, groups=cached_groups, ip_whitelist_assignments={ model.Identity.from_bytes(e.identity): e.ip_whitelist for e in auth_db.ip_whitelist_assignments }, ip_whitelists={ e.name: list(e.subnets) for e in auth_db.ip_whitelists }, realms_pb=auth_db.realms if auth_db.HasField('realms') else None, security_config_blob=auth_db.security_config, additional_client_ids=additional_client_ids) def __init__( self, from_what, replication_state, oauth_config, token_server_url, groups, ip_whitelist_assignments, ip_whitelists, realms_pb, security_config_blob, additional_client_ids ): self._from_what = from_what self._replication_state = replication_state self._oauth_config = oauth_config self._token_server_url = token_server_url self._groups = groups self._ip_whitelists = ip_whitelists self._ip_whitelist_assignments = ip_whitelist_assignments self._secrets_lock = threading.Lock() self._secrets = {} client_ids = [] if self._oauth_config.oauth_client_id: client_ids.append(self._oauth_config.oauth_client_id) if self._oauth_config.oauth_additional_client_ids: client_ids.extend(self._oauth_config.oauth_additional_client_ids) client_ids.append(API_EXPLORER_CLIENT_ID) if additional_client_ids: client_ids.extend(additional_client_ids) self._allowed_client_ids = set(c for c in client_ids if c) self._use_realms = realms_pb is not None self._permissions = {} self._realms = {} if realms_pb: with _all_perms_lock: registered_perms = list(_all_perms) self._init_realms(realms_pb, registered_perms) self._internal_domains_re = None if security_config_blob and security_config_blob != 'empty': self._init_security_config(security_config_blob) self._lock = threading.Lock() self._members_idx = None self._globs_idx = None self._nested_idx = None self._owned_idx = None def _init_realms(self, realms_pb, registered_perms): assert isinstance(realms_pb, realms_pb2.Realms), realms_pb assert not self._permissions assert not self._realms logging.info('Loading realms...') if realms_pb.api_version != realms.API_VERSION: raise RealmsError( 'Realms proto has api_version %d not compatible with this service ' '(it expects %d)' % (realms_pb.api_version, realms.API_VERSION)) for idx, perm in enumerate(realms_pb.permissions): self._permissions[perm.name] = idx for p in registered_perms: if p not in self._permissions: logging.warning( 'Permission %r is not in the AuthDB rev %d', p, self.auth_db_rev) for realm in realms_pb.realms: per_permission_sets = {} for b in realm.bindings: groups, idents = [], [] for p in b.principals: if p.startswith('group:'): groups.append(p[6:]) else: idents.append(p) principals_set = PrincipalsSet(tuple(groups), frozenset(idents)) for perm_idx in b.permissions: per_permission_sets.setdefault(perm_idx, []).append(principals_set) self._realms[realm.name] = CachedRealm(per_permission_sets, realm.data) logging.info('Loaded %d realms', len(self._realms)) def _init_security_config(self, blob): msg = security_config_pb2.SecurityConfig.FromString(blob) if msg.internal_service_regexp: merged = '|'.join('(%s)' % r for r in msg.internal_service_regexp) self._internal_domains_re = re.compile('^(%s)$' % merged) def _indexes(self): with self._lock: if self._members_idx is not None: assert self._globs_idx is not None assert self._nested_idx is not None assert self._owned_idx is not None return ( self._members_idx, self._globs_idx, self._nested_idx, self._owned_idx) logging.info('Building in-memory indexes...') members_idx = collections.defaultdict(list) globs_idx = collections.defaultdict(list) nested_idx = collections.defaultdict(list) owned_idx = collections.defaultdict(list) for name, group in sorted(self._groups.items()): for member in group.members: members_idx[member].append(name) for glob in group.globs: globs_idx[glob].append(name) for nested in group.nested: nested_idx[nested].append(name) owned_idx[group.owners].append(name) logging.info('Finished building in-memory indexes') self._members_idx = members_idx self._globs_idx = collections.OrderedDict(sorted(globs_idx.items())) self._nested_idx = nested_idx self._owned_idx = owned_idx return members_idx, globs_idx, nested_idx, owned_idx @property def auth_db_rev(self): return self._replication_state.auth_db_rev @property def primary_id(self): return self._replication_state.primary_id @property def primary_url(self): return self._replication_state.primary_url @property def token_server_url(self): return self._token_server_url def is_group_member(self, group_name, identity): ident_as_bytes = identity.to_bytes() current = [] visited = set() def is_member(group_name): if group_name == model.GROUP_ALL: return True group_obj = self._groups.get(group_name) if not group_obj: logging.warning( 'Querying unknown group: %s via %s', group_name, current) return False if group_name in current: logging.warning( 'Cycle in a group graph: %s via %s', group_name, current) return False if group_name in visited: return False current.append(group_name) try: if ident_as_bytes in group_obj.members: return True if any(glob.match(identity) for glob in group_obj.globs): return True return any(is_member(nested) for nested in group_obj.nested) finally: current.pop() visited.add(group_name) return is_member(group_name) def get_group(self, group_name): g = self._groups.get(group_name) if not g: return None return model.AuthGroup( key=model.group_key(group_name), members=[model.Identity.from_bytes(m) for m in sorted(g.members)], globs=list(g.globs), nested=list(g.nested), description=g.description, owners=g.owners, created_ts=g.created_ts, created_by=g.created_by, modified_ts=g.modified_ts, modified_by=g.modified_by) def list_group(self, group_name, recursive=True): members = set() globs = set() nested = set() def accumulate(group_obj): members.update(group_obj.members) globs.update(group_obj.globs) nested.update(group_obj.nested) def finalize_listing(): return GroupListing( members=[model.Identity.from_bytes(m) for m in members], globs=list(globs), nested=list(nested)) if not recursive: group_obj = self._groups.get(group_name) if group_obj: accumulate(group_obj) return finalize_listing() visited = set() def visit_group(name): group_obj = self._groups.get(name) if not group_obj or name in visited: return visited.add(name) accumulate(group_obj) for nested in group_obj.nested: visit_group(nested) visit_group(group_name) return finalize_listing() def fetch_groups_with_member(self, ident): return {g for g in self._groups if self.is_group_member(g, ident)} def get_group_names(self): return sorted(self._groups) def get_group_names_with_prefix(self, prefix): return sorted(g for g in self._groups if g.startswith(prefix)) def get_relevant_subgraph(self, principal): members_idx, globs_idx, nested_idx, owned_idx = self._indexes() graph = Graph() add_node = graph.add_node add_edge = graph.add_edge if isinstance(principal, basestring) and principal not in self._groups: return graph def traverse(group): group_id, added = add_node(group) if added: for supergroup in nested_idx.get(group, ()): add_edge(group_id, Graph.IN, traverse(supergroup)) for owned in owned_idx.get(group, ()): add_edge(group_id, Graph.OWNS, traverse(owned)) return group_id if isinstance(principal, model.Identity): graph.root_id, _ = add_node(principal) for glob, groups_that_have_glob in globs_idx.items(): if glob.match(principal): glob_id, _ = add_node(glob) add_edge(graph.root_id, Graph.IN, glob_id) for group in groups_that_have_glob: add_edge(glob_id, Graph.IN, traverse(group)) for group in members_idx.get(principal.to_bytes(), ()): add_edge(graph.root_id, Graph.IN, traverse(group)) elif isinstance(principal, model.IdentityGlob): graph.root_id, _ = add_node(principal) for group in globs_idx.get(principal, ()): add_edge(graph.root_id, Graph.IN, traverse(group)) elif isinstance(principal, basestring): graph.root_id = traverse(principal) else: raise TypeError('Wrong "principal" type %s' % type(principal)) return graph def get_secret(self, key): with self._secrets_lock: if key.name not in self._secrets: self._secrets[key.name] = model.AuthSecret.bootstrap(key.name) entity = self._secrets[key.name] return list(entity.values)
Apache License 2.0
netease/airtest
airtest/devsuit.py
DeviceSuit.mustFind
python
def mustFind(self, imgfile): pt = self.find(imgfile) if not pt: raise RuntimeError("Image[%s] not found" %(imgfile)) return pt
Raise Error if image not found
https://github.com/netease/airtest/blob/6f5b952c930629b92487bfd8cf1394c96e38bd0a/airtest/devsuit.py#L311-L318
import collections import os import platform import time import threading import json import cv2 import aircv as ac from . import base from . import proto from . import patch from .image import sift as imtsift from .image import template as imttemplate log = base.getLogger('devsuit') class DeviceSuit(object): def __init__(self, devtype, dev, logfile='log/airtest.log'): self.dev = dev self._devtype = devtype self._inside_depth = 0 self._image_exts = ['.jpg', '.png'] self._image_dirs = ['.', 'image'] self._rotation = None self._tmpdir = 'tmp' self._click_timeout = 20.0 self._delay_after_click = 0.5 self._screen_resolution = None self._snapshot_file = None self._keep_capture = False self._logfile = logfile self._loglock = threading.Lock() self._operation_mark = False self._image_match_method = 'auto' self._threshold = 0.3 if self._logfile: logdir = os.path.dirname(logfile) or '.' if not os.path.exists(logdir): os.makedirs(logdir) if os.path.exists(logfile): backfile = logfile+'.'+time.strftime('%Y%m%d%H%M%S') os.rename(logfile, backfile) def _snapshot_method(method): if method and self._devtype == 'android': self.dev._snapshot_method = method self._snapshot_method = _snapshot_method def __getattribute__(self, name): v = object.__getattribute__(self, name) if isinstance(v, collections.Callable): objdict = object.__getattribute__(self, '__dict__') def _wrapper(*args, **kwargs): objdict['_inside_depth'] += 1 ret = v(*args, **kwargs) if objdict['_inside_depth'] == 1 and not v.__name__.startswith('_') and not v.__name__ == 'log': self.log(proto.TAG_FUNCTION, dict(name=v.__name__, args=args, kwargs=kwargs)) objdict['_inside_depth'] -= 1 return ret return _wrapper return v def _imfind(self, bgimg, search): method = self._image_match_method print 'match-method:', method imsrc, imsch = ac.imread(bgimg), ac.imread(search) if method == 'auto': point = ac.find(imsrc, imsch) elif method == 'template': res = ac.find_template(imsrc, imsch, self._threshold) if res: point, score = res print 'match result:', point, score return point return None elif method == 'sift': point = imtsift.find(search, bgimg) else: raise RuntimeError("Unknown image match method: %s" %(method)) return point def _imfindall(self, bgimg, search, maxcnt, sort): if not maxcnt: maxcnt = 0 method = self._image_match_method imsrc, imsch = ac.imread(bgimg), ac.imread(search) if method == 'auto': points = ac.find_all(imsrc, imsch, maxcnt=5) elif method == 'template': points = imttemplate.findall(search, bgimg, self._threshold, maxcnt=maxcnt) elif method == 'sift': points = imtsift.findall(search, bgimg, maxcnt=maxcnt) else: raise RuntimeError("Unknown image match method: %s" %(method)) if sort: def cmpy((x0, y0), (x1, y1)): return y1<y0 def cmpx((x0, y0), (x1, y1)): return x1<x1 m = {'x': cmpx, 'y': cmpy} points.sort(cmp=m[sort]) return points def rotation(self): if self._rotation: return self._rotation if hasattr(self.dev, 'rotation'): return self.dev.rotation() if self._devtype == 'windows': return proto.ROTATION_0 return proto.ROTATION_0 def _fixPoint(self, (x, y)): w, h = self.shape() if self.rotation() % 2 == 1: w, h = h, w if isinstance(x, float) and x <= 1.0: x = int(w*x) if isinstance(y, float) and y <= 1.0: y = int(h*y) return (x, y) def _searchImage(self, filename): if isinstance(filename, unicode) and platform.system() == 'Windows': filename = filename.encode('gbk') basename, ext = os.path.splitext(filename) exts = [ext] if ext else self._image_exts for folder in self._image_dirs: for ext in exts: fullpath = os.path.join(folder, basename+ext) if os.path.exists(fullpath): return fullpath raise RuntimeError('Image file(%s) not found in %s' %(filename, self._image_dirs)) def _PS2Point(self, PS): if isinstance(PS, basestring): PS = self.find(PS) if not PS: return None (x, y) = self._fixPoint(PS) return (x, y) def _saveScreen(self, filename, random_name=True, tempdir=True): if self._snapshot_file and self._keep_capture: return self._snapshot_file if random_name: filename = base.random_name(filename) if tempdir: filename = os.path.join(self._tmpdir, filename) parent_dir = os.path.dirname(filename) or '.' if not os.path.exists(parent_dir): base.makedirs(parent_dir) self.dev.snapshot(filename) if tempdir: self.log(proto.TAG_SNAPSHOT, dict(filename=filename)) self._snapshot_file = filename return filename def log(self, tag, message): if not self._logfile: return self._loglock.acquire() timestamp = time.time() try: dirname = os.path.dirname(self._logfile) or '.' if not os.path.exists(dirname): os.path.makedirs(dirname) except: pass with open(self._logfile, 'a') as file: data = dict(timestamp=int(timestamp), tag=tag, data=message) file.write(json.dumps(data) + '\n') self._loglock.release() def keepCapture(self): self._keep_capture = True def releaseCapture(self): self._keep_capture = False def takeSnapshot(self, filename): savefile = self._saveScreen(filename, random_name=False, tempdir=False) return savefile def globalSet(self, *args, **kwargs): if len(args) > 0: m = args[0] assert isinstance(m, dict) else: m = kwargs for k,v in m.items(): key = '_'+k if hasattr(self, key): item = getattr(self, key) if callable(item): item(v) else: setattr(self, key, v) else: print 'not have such setting: %s' %(k) def globalGet(self, key): if hasattr(self, '_'+key): return getattr(self, '_'+key) return None def startApp(self, appname, activity): self.dev.start_app(appname, activity) def stopApp(self, appname): self.dev.stop_app(appname) def find(self, imgfile): filepath = self._searchImage(imgfile) log.debug('Locate image path: %s', filepath) screen = self._saveScreen('screen-{t}-XXXX.png'.format(t=time.strftime("%y%m%d%H%M%S"))) if self._screen_resolution: ow, oh = self._screen_resolution cw, ch = self.shape() (ratew, rateh) = cw/float(ow), ch/float(oh) im = cv2.imread(filepath, cv2.IMREAD_UNCHANGED) nim = cv2.resize(im, (0, 0), fx=ratew, fy=rateh) new_name = base.random_name('resize-{t}-XXXX.png'.format(t=time.strftime("%y%m%d%H%M%S"))) filepath = new_name = os.path.join(self._tmpdir, new_name) cv2.imwrite(new_name, nim) pt = self._imfind(screen, filepath) return pt
BSD 3-Clause New or Revised License
obaraemmanuel/formation
studio/main.py
StudioApplication.new_action
python
def new_action(self, action: Action): self._redo_stack.clear() if len(self._undo_stack) >= pref.get("studio::undo_depth") and pref.get("studio::use_undo_depth"): self._undo_stack.pop(0) self._undo_stack.append(action)
Register a undo redo point :param action: An action object implementing undo and redo methods :return:
https://github.com/obaraemmanuel/formation/blob/31244cbceb1bb405007f5f051ae2102ab021e779/studio/main.py#L257-L266
import functools import os import sys import time import webbrowser from tkinter import filedialog, Toplevel from studio.feature.design import Designer from studio.feature import FEATURES, StylePane from studio.feature._base import BaseFeature, FeaturePane from studio.tools import ToolManager from studio.ui.widgets import SideBar from studio.ui.about import about_window from studio.preferences import Preferences, open_preferences from studio.resource_loader import ResourceLoader from studio.updates import Updater import studio from hoverset.ui.widgets import Application, Frame, PanedWindow, Button, ActionNotifier from hoverset.ui.icons import get_icon_image from hoverset.data.images import load_tk_image from hoverset.util.execution import Action from hoverset.data.utils import get_resource_path from hoverset.ui.dialogs import MessageDialog from hoverset.ui.menu import MenuUtils, EnableIf, dynamic_menu, LoadLater from hoverset.data import actions from hoverset.data.keymap import ShortcutManager, CharKey, KeyMap, BlankKey from hoverset.platform import platform_is, MAC from formation import AppBuilder from formation.formats import get_file_types pref = Preferences.acquire() class StudioApplication(Application): ICON_PATH = get_resource_path(studio, "resources/images/formation_icon.png") THEME_PATH = pref.get("resource::theme") def __init__(self, master=None, **cnf): super().__init__(master, **cnf) icon_image = load_tk_image(self.ICON_PATH) self.load_styles(self.THEME_PATH) self.iconphoto(True, icon_image) self.pref = pref self._restore_position() self.title('Formation Studio') self.protocol('WM_DELETE_WINDOW', self._on_close) self.shortcuts = ShortcutManager(self, pref) self.shortcuts.bind_all() self._register_actions() self._toolbar = Frame(self, **self.style.surface, height=30) self._toolbar.pack(side="top", fill="x") self._toolbar.pack_propagate(0) self._statusbar = Frame(self, **self.style.surface, height=20) self._statusbar.pack(side="bottom", fill="x") self._statusbar.pack_propagate(0) body = Frame(self, **self.style.surface) body.pack(fill="both", expand=True, side="top") self._right_bar = SideBar(body) self._right_bar.pack(side="right", fill="y") self._left_bar = SideBar(body) self._left_bar.pack(side="left", fill="y") self._pane = PanedWindow(body, **self.style.pane_horizontal) self._pane.pack(side="left", fill="both", expand=True) self._left = FeaturePane(self._pane, **self.style.pane_vertical) self._center = PanedWindow(self._pane, **self.style.pane_vertical) self._right = FeaturePane(self._pane, **self.style.pane_vertical) self._bin = [] self._clipboard = None self._undo_stack = [] self._redo_stack = [] self.current_preview = None self._pane.add(self._left, minsize=320, sticky='nswe', width=320) self._pane.add(self._center, minsize=400, width=16000, sticky='nswe') self._pane.add(self._right, minsize=320, sticky='nswe', width=320) self._panes = { "left": (self._left, self._left_bar), "right": (self._right, self._right_bar), "center": (self._center, None) } icon = get_icon_image self.actions = ( ("Delete", icon("delete", 20, 20), lambda e: self.delete(), "Delete selected widget"), ("Undo", icon("undo", 20, 20), lambda e: self.undo(), "Undo action"), ("Redo", icon("redo", 20, 20), lambda e: self.redo(), "Redo action"), ("Cut", icon("cut", 20, 20), lambda e: self.cut(), "Cut selected widget"), ("separator",), ("Fullscreen", icon("image_editor", 20, 20), lambda e: self.close_all(), "Design mode"), ("Separate", icon("separate", 20, 20), lambda e: self.features_as_windows(), "Open features in window mode"), ("Dock", icon("flip_horizontal", 15, 15), lambda e: self.features_as_docked(), "Dock all features"), ("separator",), ("New", icon("add", 20, 20), lambda e: self.open_new(), "New design"), ("Save", icon("save", 20, 20), lambda e: self.save(), "Save design"), ("Preview", icon("play", 20, 20), lambda e: self.preview(), "Preview design"), ) self.init_toolbar() self.selected = None self.blank_img = blank_img = icon("blank", 14, 14) self.tool_manager = ToolManager(self) self.menu_template = (EnableIf( lambda: self.selected, ("separator",), ("command", "copy", icon("copy", 14, 14), actions.get('STUDIO_COPY'), {}), ("command", "duplicate", icon("copy", 14, 14), actions.get('STUDIO_DUPLICATE'), {}), EnableIf( lambda: self._clipboard is not None, ("command", "paste", icon("clipboard", 14, 14), actions.get('STUDIO_PASTE'), {}) ), ("command", "cut", icon("cut", 14, 14), actions.get('STUDIO_CUT'), {}), ("separator",), ("command", "delete", icon("delete", 14, 14), actions.get('STUDIO_DELETE'), {}), ),) self.menu_bar = MenuUtils.make_dynamic( (( ("cascade", "formation", None, None, {"menu": ( ("command", "Restart", None, actions.get('STUDIO_RESTART'), {}), ("separator", ), ("command", "About Formation", icon("formation", 14, 14), lambda: about_window(self), {}), ), "name": "apple"}), ) if platform_is(MAC) else ()) + ( ("cascade", "File", None, None, {"menu": ( ("command", "New", icon("add", 14, 14), actions.get('STUDIO_NEW'), {}), ("command", "Open", icon("folder", 14, 14), actions.get('STUDIO_OPEN'), {}), ("cascade", "Recent", icon("clock", 14, 14), None, {"menu": self._create_recent_menu()}), ("separator",), ("command", "Save", icon("save", 14, 14), actions.get('STUDIO_SAVE'), {}), ("command", "Save As", icon("save", 14, 14), actions.get('STUDIO_SAVE_AS'), {}), ("separator",), ("command", "Settings", icon("settings", 14, 14), actions.get('STUDIO_SETTINGS'), {}), ("command", "Restart", icon("blank", 14, 14), actions.get('STUDIO_RESTART'), {}), ("command", "Exit", icon("close", 14, 14), actions.get('STUDIO_EXIT'), {}), )}), ("cascade", "Edit", None, None, {"menu": ( EnableIf(lambda: len(self._undo_stack), ("command", "undo", icon("undo", 14, 14), actions.get('STUDIO_UNDO'), {})), EnableIf(lambda: len(self._redo_stack), ("command", "redo", icon("redo", 14, 14), actions.get('STUDIO_REDO'), {})), *self.menu_template, )}), ("cascade", "Code", None, None, {"menu": ( EnableIf( lambda: self.designer and self.designer.root_obj, ("command", "Preview design", icon("play", 14, 14), actions.get('STUDIO_PREVIEW'), {}), ("command", "close preview", icon("close", 14, 14), actions.get('STUDIO_PREVIEW_CLOSE'), {}) ) )}), ("cascade", "View", None, None, {"menu": ( ("command", "show all", blank_img, actions.get('FEATURE_SHOW_ALL'), {}), ("command", "close all", icon("close", 14, 14), actions.get('FEATURE_CLOSE_ALL'), {}), ("command", "close all on the right", blank_img, actions.get('FEATURE_CLOSE_RIGHT'), {}), ("command", "close all on the left", blank_img, actions.get('FEATURE_CLOSE_LEFT'), {}), ("separator",), ("command", "Undock all windows", blank_img, actions.get('FEATURE_UNDOCK_ALL'), {}), ("command", "Dock all windows", blank_img, actions.get('FEATURE_DOCK_ALL'), {}), ("separator",), LoadLater(self.get_features_as_menu), ("separator",), ("command", "Save window positions", blank_img, actions.get('FEATURE_SAVE_POS'), {}) )}), ("cascade", "Tools", None, None, {"menu": (LoadLater(self.tool_manager.get_tools_as_menu), )}), ("cascade", "Help", None, None, {"menu": ( ("command", "Help", icon('dialog_info', 14, 14), actions.get('STUDIO_HELP'), {}), ("command", "Check for updates", icon("cloud", 14, 14), self._check_updates, {}), ("separator",), ("command", "About Formation", icon("formation", 14, 14), lambda: about_window(self), {}), )}) ), self, self.style, False) self.config(menu=self.menu_bar) if platform_is(MAC): self.createcommand("tk::mac::ShowPreferences", lambda: actions.get('STUDIO_SETTINGS').invoke()) self.createcommand("tk::mac::ShowHelp", lambda: actions.get('STUDIO_HELP').invoke()) self.createcommand("tk::mac::Quit", lambda: actions.get('STUDIO_EXIT').invoke()) self.features = [] self.designer = Designer(self._center, self) self._center.add(self.designer, sticky='nswe') for feature in FEATURES: self.install(feature) self.style_pane = self.get_feature(StylePane) self.tool_manager.initialize() self._startup() self._restore_position() self._exit_failures = 0 def _startup(self): on_startup = pref.get("studio::on_startup") if on_startup == "new": self.open_new() elif on_startup == "recent": latest = pref.get_latest() if latest: self.open_file(latest) def _get_window_state(self): try: if self.wm_attributes("-zoomed"): return 'zoomed' return 'normal' except: return self.state() def _set_window_state(self, state): try: self.state(state) except: self.wm_attributes('-zoomed', state == 'zoomed') def _save_position(self): pref.set("studio::pos", dict( width=self.width, height=self.height, x=self.winfo_x(), y=self.winfo_y(), state=self._get_window_state(), )) def _restore_position(self): pos = pref.get("studio::pos") self._set_window_state(pos.get("state")) if pos.get("state") == 'normal': self.geometry('{width}x{height}+{x}+{y}'.format(**pos))
MIT License
opentoallctf/ota-challenge-bot
tests/slackwrapper_mock.py
SlackWrapperMock.archive_private_channel
python
def archive_private_channel(self, channel_id): pass
Archive a private channel
https://github.com/opentoallctf/ota-challenge-bot/blob/6deea8c059d28ddb86dce277158a39a5ad9517e4/tests/slackwrapper_mock.py#L119-L122
import json from tests.slack_test_response import SlackResponse from util.util import load_json class SlackWrapperMock: def __init__(self, api_key): self.server = None self.username = None self.user_id = None self.connected = True self.message_list = [] self.create_channel_private_response = self.read_test_file( "tests/testfiles/create_channel_private_response_default.json") self.create_channel_public_response = self.read_test_file( "tests/testfiles/create_channel_public_response_default.json") self.get_members_response = self.read_test_file("tests/testfiles/get_members_response_default.json") self.get_member_response = self.read_test_file("tests/testfiles/get_member_response_default.json") self.get_private_channels_response = self.read_test_file( "tests/testfiles/get_private_channels_response_default.json") self.get_public_channels_response = self.read_test_file( "tests/testfiles/get_public_channels_response_default.json") self.set_purpose_response = self.read_test_file("tests/testfiles/set_purpose_response_default.json") def read_test_file(self, file): with open(file, "r") as f: return f.read() def push_message(self, channel, msg): self.message_list.append(SlackResponse(msg, channel)) def read(self): return "mocked response" def invite_user(self, user, channel, is_private=False): return None def set_purpose(self, channel, purpose, is_private=False): return json.loads(self.set_purpose_response.replace("PURPOSE_PH", json.dumps(purpose))) def get_members(self): return json.loads(self.get_members_response) def get_member(self, user_id): return json.loads(self.get_member_response) def create_channel(self, name, is_private=False): if is_private: return json.loads(self.create_channel_private_response.replace("NAME_PH", name)) else: return json.loads(self.create_channel_public_response.replace("NAME_PH", name)) def rename_channel(self, channel_id, new_name, is_private=False): return None def get_channel_info(self, channel_id, is_private=False): return "" def update_channel_purpose_name(self, channel_id, new_name, is_private=False): channel_info = self.get_channel_info(channel_id, is_private) key = "group" if is_private else "channel" if channel_info: purpose = load_json(channel_info[key]['purpose']['value']) purpose['name'] = new_name self.set_purpose(channel_id, json.dumps(purpose), is_private) def post_message(self, channel_id, text, timestamp="", parse="full"): self.push_message(channel_id, str(text)) def post_message_with_react(self, channel_id, text, reaction, parse="full"): self.push_message(channel_id, text) def get_message(self, channel_id, timestamp): return None def update_message(self, channel_id, msg_timestamp, text, parse="full"): pass def get_public_channels(self): return json.loads(self.get_public_channels_response) def get_private_channels(self): return json.loads(self.get_private_channels_response)
MIT License
janpipek/physt
tests/plotting/conftest.py
default_kwargs
python
def default_kwargs() -> Dict[str, Any]: return {}
Arguments to add to each plotting method.
https://github.com/janpipek/physt/blob/bf6b05952b7d09bbbdae2b077f0989c392eac13e/tests/plotting/conftest.py#L7-L9
from typing import Any, Dict import pytest @pytest.fixture()
MIT License
kivy/python-for-android
pythonforandroid/bootstrap.py
Bootstrap.all_bootstraps
python
def all_bootstraps(cls): forbidden_dirs = ('__pycache__', 'common') bootstraps_dir = join(dirname(__file__), 'bootstraps') result = set() for name in listdir(bootstraps_dir): if name in forbidden_dirs: continue filen = join(bootstraps_dir, name) if isdir(filen): result.add(name) return result
Find all the available bootstraps and return them.
https://github.com/kivy/python-for-android/blob/3a9bcabd91aa498982ab42ef7e59846f90df25d7/pythonforandroid/bootstrap.py#L194-L205
import functools import glob import importlib import os from os.path import (join, dirname, isdir, normpath, splitext, basename) from os import listdir, walk, sep import sh import shlex import shutil from pythonforandroid.logger import (shprint, info, logger, debug) from pythonforandroid.util import ( current_directory, ensure_dir, temp_directory, BuildInterruptingException) from pythonforandroid.recipe import Recipe def copy_files(src_root, dest_root, override=True, symlink=False): for root, dirnames, filenames in walk(src_root): for filename in filenames: subdir = normpath(root.replace(src_root, "")) if subdir.startswith(sep): subdir = subdir[1:] dest_dir = join(dest_root, subdir) if not os.path.exists(dest_dir): os.makedirs(dest_dir) src_file = join(root, filename) dest_file = join(dest_dir, filename) if os.path.isfile(src_file): if override and os.path.exists(dest_file): os.unlink(dest_file) if not os.path.exists(dest_file): if symlink: os.symlink(src_file, dest_file) else: shutil.copy(src_file, dest_file) else: os.makedirs(dest_file) default_recipe_priorities = [ "webview", "sdl2", "service_only" ] def _cmp_bootstraps_by_priority(a, b): def rank_bootstrap(bootstrap): if bootstrap.name in default_recipe_priorities: return default_recipe_priorities.index(bootstrap.name) + 1 return 0 rank_a = rank_bootstrap(a) rank_b = rank_bootstrap(b) if rank_a != rank_b: return (rank_b - rank_a) else: if a.name < b.name: return -1 else: return 1 class Bootstrap: name = '' jni_subdir = '/jni' ctx = None bootstrap_dir = None build_dir = None dist_name = None distribution = None recipe_depends = ['python3', 'android'] can_be_chosen_automatically = True @property def dist_dir(self): if self.distribution is None: raise BuildInterruptingException( 'Internal error: tried to access {}.dist_dir, but {}.distribution ' 'is None'.format(self, self)) return self.distribution.dist_dir @property def jni_dir(self): return self.name + self.jni_subdir def check_recipe_choices(self): recipes = [] built_recipes = self.ctx.recipe_build_order or [] for recipe in self.recipe_depends: if isinstance(recipe, (tuple, list)): for alternative in recipe: if alternative in built_recipes: recipes.append(alternative) break return sorted(recipes) def get_build_dir_name(self): choices = self.check_recipe_choices() dir_name = '-'.join([self.name] + choices) return dir_name def get_build_dir(self): return join(self.ctx.build_dir, 'bootstrap_builds', self.get_build_dir_name()) def get_dist_dir(self, name): return join(self.ctx.dist_dir, name) @property def name(self): modname = self.__class__.__module__ return modname.split(".", 2)[-1] def get_bootstrap_dirs(self): classes = self.__class__.__mro__[:-2] bootstrap_names = [cls.name for cls in classes] + ['common'] bootstrap_dirs = [ join(self.ctx.root_dir, 'bootstraps', bootstrap_name) for bootstrap_name in reversed(bootstrap_names) ] return bootstrap_dirs def _copy_in_final_files(self): if self.name == "sdl2": sdl2_recipe = Recipe.get_recipe("sdl2", self.ctx) sdl2_build_dir = sdl2_recipe.get_jni_dir() src_dir = join(sdl2_build_dir, "SDL", "android-project", "app", "src", "main", "java", "org", "libsdl", "app") target_dir = join(self.dist_dir, 'src', 'main', 'java', 'org', 'libsdl', 'app') info('Copying in SDL2 .java files from: ' + str(src_dir)) if not os.path.exists(target_dir): os.makedirs(target_dir) copy_files(src_dir, target_dir, override=True) def prepare_build_dir(self): bootstrap_dirs = self.get_bootstrap_dirs() self.build_dir = self.get_build_dir() for bootstrap_dir in bootstrap_dirs: copy_files(join(bootstrap_dir, 'build'), self.build_dir, symlink=self.ctx.symlink_bootstrap_files) with current_directory(self.build_dir): with open('project.properties', 'w') as fileh: fileh.write('target=android-{}'.format(self.ctx.android_api)) def prepare_dist_dir(self): ensure_dir(self.dist_dir) def assemble_distribution(self): self._copy_in_final_files() self.distribution.save_info(self.dist_dir) @classmethod
MIT License
julienr/meshcut
meshcut.py
cross_section_mesh
python
def cross_section_mesh(mesh, plane, dist_tol=1e-8): T = set(range(len(mesh.tris))) P = [] while len(T) > 0: tid = T.pop() intersections = compute_triangle_plane_intersections( mesh, tid, plane, dist_tol) if len(intersections) == 2: for intersection in intersections: p, T = _walk_polyline(tid, intersection, T, mesh, plane, dist_tol) if len(p) > 1: P.append(np.array(p)) return P
Args: mesh: A geom.TriangleMesh instance plane: The cut plane : geom.Plane instance dist_tol: If two points are closer than dist_tol, they are considered the same
https://github.com/julienr/meshcut/blob/226c79d8da52b657d904f783940c258093c929a5/meshcut.py#L240-L265
import numpy as np import numpy.linalg as la try: import scipy.spatial.distance as spdist USE_SCIPY = True except ImportError: USE_SCIPY = False import collections def make_edge(v1, v2): return tuple(sorted((v1, v2))) class TriangleMesh(object): def __init__(self, verts, tris): self.verts = np.array(verts) self.edges_to_tris = collections.defaultdict(lambda: []) self.tris_to_edges = {} self.verts_to_tris = collections.defaultdict(lambda: []) self.tris = tris for tid, f in enumerate(tris): tri_edges = [] for i in range(3): v1 = f[i] v2 = f[(i + 1) % 3] e = make_edge(v1, v2) self.edges_to_tris[e].append(tid) tri_edges.append(e) self.verts_to_tris[f[i]].append(tid) self.tris_to_edges[tid] = tri_edges for e, tris in self.edges_to_tris.items(): assert len(tris) <= 2 def edges_for_triangle(self, tidx): return self.tris_to_edges[tidx] def triangles_for_edge(self, edge): return self.edges_to_tris[edge] def triangles_for_vert(self, vidx): return self.verts_to_tris[vidx] class Plane(object): def __init__(self, orig, normal): self.orig = orig self.n = normal / la.norm(normal) def __str__(self): return 'plane(o=%s, n=%s)' % (self.orig, self.n) def point_to_plane_dist(p, plane): return np.dot((p - plane.orig), plane.n) def triangle_intersects_plane(mesh, tid, plane): dists = [point_to_plane_dist(mesh.verts[vid], plane) for vid in mesh.tris[tid]] side = np.sign(dists) return not (side[0] == side[1] == side[2]) INTERSECT_EDGE = 0 INTERSECT_VERTEX = 1 def compute_triangle_plane_intersections(mesh, tid, plane, dist_tol=1e-8): dists = {vid: point_to_plane_dist(mesh.verts[vid], plane) for vid in mesh.tris[tid]} vert_intersect = {vid: False for vid in dists.keys()} intersections = [] for e in mesh.edges_for_triangle(tid): v1 = mesh.verts[e[0]] d1 = dists[e[0]] v2 = mesh.verts[e[1]] d2 = dists[e[1]] if np.fabs(d1) < dist_tol: if not vert_intersect[e[0]]: intersections.append((INTERSECT_VERTEX, v1, e[0])) vert_intersect[e[0]] = True if np.fabs(d2) < dist_tol: if not vert_intersect[e[1]]: intersections.append((INTERSECT_VERTEX, v2, e[1])) vert_intersect[e[1]] = True if d1 * d2 < 0: if not vert_intersect[e[0]] and not vert_intersect[e[1]]: s = d1 / (d1 - d2) vdir = v2 - v1 ipos = v1 + vdir * s intersections.append((INTERSECT_EDGE, ipos, e)) return intersections def get_next_triangle(mesh, T, plane, intersection, dist_tol): if intersection[0] == INTERSECT_EDGE: tris = mesh.triangles_for_edge(intersection[2]) elif intersection[0] == INTERSECT_VERTEX: tris = mesh.triangles_for_vert(intersection[2]) else: assert False, 'Invalid intersection[0] value : %d' % intersection[0] T = set(T) for tid in tris: if tid in T: intersections = compute_triangle_plane_intersections( mesh, tid, plane, dist_tol) if len(intersections) == 2: T = T.difference(tris) return tid, intersections, T return None, [], T def _walk_polyline(tid, intersect, T, mesh, plane, dist_tol): T = set(T) p = [] while True: p.append(intersect[1]) tid, intersections, T = get_next_triangle(mesh, T, plane, intersect, dist_tol) if tid is None: break assert len(intersections) == 2 if la.norm(intersections[0][1] - p[-1]) < dist_tol: intersect = intersections[1] else: assert la.norm(intersections[1][1] - p[-1]) < dist_tol, '%s not close to %s' % (str(p[-1]), str(intersections)) intersect = intersections[0] return p, T
MIT License
hewlettpackard/python-hponeview
hpOneView/resources/networking/sas_interconnects.py
SasInterconnects.get_all
python
def get_all(self, start=0, count=-1, fields='', filter='', query='', sort='', view=''): return self._client.get_all(start=start, count=count, filter=filter, query=query, sort=sort, view=view, fields=fields)
Get list of SAS interconnects each with port details. Args: start: The first item to return, using 0-based indexing. If not specified, the default is 0 - start with the first available item. count: The number of resources to return. A count of -1 requests all items. The actual number of items in the response may differ from the requested count if the sum of start and count exceeds the total number of items. fields: Specifies which fields should be returned in the result set. filter (list or str): A general filter/query string to narrow the list of items returned. The default is no filter; all resources are returned. query: A general query string to narrow the list of resources returned. The default is no query (all resources are returned). sort: The sort order of the returned data set. By default, the sort order is based on create time, with the oldest entry first. view: Returns a specific subset of the attributes of the resource or collection, by specifying the name of a predefined view. The default view is expand (show all attributes of the resource and all elements of collections of resources). Returns: list: A list of SAS interconnects.
https://github.com/hewlettpackard/python-hponeview/blob/678d53b338f6bc7af7adb63153d7d8d99dc94ac0/hpOneView/resources/networking/sas_interconnects.py#L49-L81
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from hpOneView.resources.resource import ResourceClient class SasInterconnects(object): URI = '/rest/sas-interconnects' def __init__(self, con): self._client = ResourceClient(con, self.URI)
MIT License
google/gumbel_sinkhorn
optimizer.py
set_optimizer
python
def set_optimizer(optimizer, lr, opt_eps=1.0, opt_momentum=0.9, rms_decay=0.9, adam_beta1=0.9, adam_beta2=0.999): if optimizer == "sgd": opt = tf.train.GradientDescentOptimizer(lr) elif optimizer == "momentum": opt = tf.train.MomentumOptimizer(lr, opt_momentum) elif optimizer == "adagrad": opt = tf.train.AdagradOptimizer(lr) elif optimizer == "adam": opt = tf.train.AdamOptimizer(lr, beta1=adam_beta1, beta2=adam_beta2, epsilon=opt_eps) elif optimizer == "rmsprop": opt = tf.train.RMSPropOptimizer(lr, rms_decay, opt_momentum, opt_eps) return opt
Sets optimizer optimizer op. Args: optimizer: A string (sgd, momentum, adagrad, adam, rmsprop). lr: learning rate, a float. opt_eps: Optimizer epsilon (for ADAM and RMSprop). opt_momentum: Optimizer momentum. Common for Momentum and RMSProp. rms_decay: RMSProp decay parameter. adam_beta1: beta_1 parameter for ADAM. adam_beta2: beta_2 parameter for ADAM. Returns: opt, the optimizer op.
https://github.com/google/gumbel_sinkhorn/blob/312c73f61a8731960fc1addecef157cd4274a993/optimizer.py#L25-L53
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf
Apache License 2.0
chartbeat/elk-scripts
elasticsearch/rolling_restart.py
cluster_status
python
def cluster_status(port): req = requests.get('http://{0}:{1}/_cluster/health'.format(env.host, port)) status = req.json()['status'] print 'Cluster status: ', if status == 'green': print green(status, bold=True) elif status == 'yellow': print yellow(status, bold=True) elif status == 'red': print red(status, bold=True) return status
Returns the status of the ES cluster
https://github.com/chartbeat/elk-scripts/blob/8e9ab0432186aeca9ba6f78b65e00525366ebf6a/elasticsearch/rolling_restart.py#L40-L53
import argparse import requests from requests.exceptions import ConnectionError from time import sleep, time from fabric.api import sudo, env from fabric.decorators import task from fabric.colors import red, yellow, green from fabric.tasks import execute from fabric.utils import abort from fabric.contrib.console import confirm def setup_fabric(hosts): assert(type(hosts) == list) env.disable_known_hosts = True env.hosts = hosts env.parallel = False print 'Performing rolling restart on {0}'.format(hosts) def data_node_count(port): try: req = requests.get( 'http://{0}:{1}/_cluster/health'.format(env.host, port)) except ConnectionError: print red("Couldn't reach {0} to get data node count".format(env.host)) return -1 return req.json()['number_of_data_nodes']
Apache License 2.0
mediacloud/date_guesser
date_guesser/guess_date.py
guess_date
python
def guess_date(url, html): return DateGuesser().guess_date(url, html)
Guess the date of publication of a webpage. Attributes ---------- url : str url used to retrieve the webpage html : str raw html of the webpage Returns ------- date_guesser.constants.Guess object.
https://github.com/mediacloud/date_guesser/blob/db78c5654fdf8e2a7f6f0cc4faf558b8b96bea90/date_guesser/guess_date.py#L10-L24
import arrow from bs4 import BeautifulSoup from .constants import Accuracy, LOCALE, NO_METHOD, Guess from .dates import MultiDateParser from .html import get_tag_checkers, get_image_url_checker from .urls import parse_url_for_date, filter_url_for_undateable
MIT License
melchisalins/pyindependentreserve
independentreserve/public.py
PublicMethods.get_valid_secondary_currency_codes
python
def get_valid_secondary_currency_codes(): response = requests.get( PublicMethods.api_url + "/Public/GetValidSecondaryCurrencyCodes" ) return response
Returns a list of valid secondary currency codes. These are the fiat currencies which are supported by Independent Reserve for trading purposes. :return: list ["Usd","Aud", "Nzd"]
https://github.com/melchisalins/pyindependentreserve/blob/2a33746924a784ed767622af0924f3402774d0c6/independentreserve/public.py#L43-L55
import requests from .exceptions import http_exception_handler class PublicMethods(object): """ Independent Reserve API Url. Can override this for testing purposes. """ api_url = "https://api.independentreserve.com" def __init__(self, api_url="https://api.independentreserve.com"): PublicMethods.api_url = api_url pass @staticmethod @http_exception_handler def get_valid_primary_currency_codes(): response = requests.get( PublicMethods.api_url + "/Public/GetValidPrimaryCurrencyCodes" ) return response @staticmethod @http_exception_handler
MIT License
ithinksw/philo
philo/contrib/winer/models.py
FeedView.page_view
python
def page_view(self, get_items_attr, page_attr): get_items = get_items_attr if callable(get_items_attr) else getattr(self, get_items_attr) def inner(request, extra_context=None, *args, **kwargs): obj = self.get_object(request, *args, **kwargs) items, extra_context = get_items(obj, request, extra_context=extra_context, *args, **kwargs) items, item_context = self.process_page_items(request, items) context = self.get_context() context.update(extra_context or {}) context.update(item_context or {}) page = page_attr if isinstance(page_attr, Page) else getattr(self, page_attr) return page.render_to_response(request, extra_context=context) return inner
:param get_items_attr: A callable or the name of a callable on the :class:`FeedView` that will return a (items, extra_context) tuple when called with view arguments. :param page_attr: A :class:`.Page` instance or the name of an attribute on the :class:`FeedView` which contains a :class:`.Page` instance. This will be rendered with the items from ``get_items_attr``. :returns: A view function that renders a list of items as an :class:`HttpResponse`.
https://github.com/ithinksw/philo/blob/8a772dd4761e3a4b926358d6ebf87c9fc7033ba5/philo/contrib/winer/models.py#L129-L150
from django.conf import settings from django.conf.urls.defaults import url, patterns, include from django.contrib.sites.models import Site, RequestSite from django.contrib.syndication.views import add_domain from django.db import models from django.http import HttpResponse from django.template import RequestContext, Template as DjangoTemplate from django.utils import feedgenerator, tzinfo from django.utils.encoding import smart_unicode, force_unicode from django.utils.html import escape from philo.contrib.winer.exceptions import HttpNotAcceptable from philo.contrib.winer.feeds import registry, DEFAULT_FEED from philo.contrib.winer.middleware import http_not_acceptable from philo.models import Page, Template, MultiView try: import mimeparse except: mimeparse = None class FeedView(MultiView): feed_type = models.CharField(max_length=50, choices=registry.choices, default=registry.get_slug(DEFAULT_FEED)) feed_suffix = models.CharField(max_length=255, blank=False, default="feed") feeds_enabled = models.BooleanField(default=True) feed_length = models.PositiveIntegerField(blank=True, null=True, default=15, help_text="The maximum number of items to return for this feed. All items will be returned if this field is blank.") item_title_template = models.ForeignKey(Template, blank=True, null=True, related_name="%(app_label)s_%(class)s_title_related") item_description_template = models.ForeignKey(Template, blank=True, null=True, related_name="%(app_label)s_%(class)s_description_related") item_context_var = 'items' object_attr = 'object' description = "" def feed_patterns(self, base, get_items_attr, page_attr, reverse_name): feed_patterns = () if self.feeds_enabled: suffixes = [(self.feed_suffix, None)] + [(slug, slug) for slug in registry] for suffix, feed_type in suffixes: feed_view = http_not_acceptable(self.feed_view(get_items_attr, reverse_name, feed_type)) feed_pattern = r'%s%s%s$' % (base, "/" if base and base[-1] != "^" else "", suffix) feed_patterns += (url(feed_pattern, feed_view, name="%s_%s" % (reverse_name, suffix)),) feed_patterns += (url(r"%s$" % base, self.page_view(get_items_attr, page_attr), name=reverse_name),) return patterns('', *feed_patterns) def get_object(self, request, **kwargs): return getattr(self, self.object_attr) def feed_view(self, get_items_attr, reverse_name, feed_type=None): get_items = get_items_attr if callable(get_items_attr) else getattr(self, get_items_attr) def inner(request, extra_context=None, *args, **kwargs): obj = self.get_object(request, *args, **kwargs) feed = self.get_feed(obj, request, reverse_name, feed_type, *args, **kwargs) items, xxx = get_items(obj, request, extra_context=extra_context, *args, **kwargs) self.populate_feed(feed, items, request) response = HttpResponse(mimetype=feed.mime_type) feed.write(response, 'utf-8') return response return inner
ISC License
cyq373/ssd-gan
ssd_gan.py
SSD_Generator.generate_images
python
def generate_images(self, num_images, device=None): if device is None: device = self.device noise = torch.randn((num_images, self.nz), device=device) fake_images = self.forward(noise) return fake_images
r""" Generates num_images randomly. Args: num_images (int): Number of images to generate device (torch.device): Device to send images to. Returns: Tensor: A batch of generated images.
https://github.com/cyq373/ssd-gan/blob/9dc956fd79cc2b21492fcc9bf1e4cdc5b276bdaf/ssd_gan.py#L29-L47
import torch from torch_mimicry.nets.basemodel import basemodel from torch_mimicry.modules import losses import numpy as np class SSD_Generator(basemodel.BaseModel): def __init__(self, nz, ngf, bottom_width, loss_type, **kwargs): super().__init__(**kwargs) self.nz = nz self.ngf = ngf self.bottom_width = bottom_width self.loss_type = loss_type
MIT License
pansila/auto-test-system
webserver/task_runner/patch/patch.py
PatchSet.apply
python
def apply(self, strip=0, root=None): if root: prevdir = os.getcwd() os.chdir(root) total = len(self.items) errors = 0 if strip: try: strip = int(strip) except ValueError: errors += 1 warning("error: strip parameter '%s' must be an integer" % strip) strip = 0 for i,p in enumerate(self.items): if strip: debug("stripping %s leading component(s) from:" % strip) debug(" %s" % p.source) debug(" %s" % p.target) old = pathstrip(p.source, strip) new = pathstrip(p.target, strip) else: old, new = p.source, p.target old, new = self.pathmodify(old, new) filename = self.findfile(old, new) if not filename: if len(p.hunks)==1 and p.hunks[0].startsrc==0 and p.hunks[0].linessrc==0: filename=new debug("patch creates new file %s " % filename) filenameDir=os.path.join(os.path.dirname(filename), b".") if not exists(filenameDir): os.makedirs(filenameDir) open(filename, "a").close() else: warning("source/target file does not exist:\n --- %s\n +++ %s" % (old, new)) errors += 1 continue if not isfile(filename): warning("not a file - %s" % filename) errors += 1 continue debug("processing %d/%d:\t %s" % (i+1, total, filename)) f2fp = open(filename, 'rb') hunkno = 0 hunk = p.hunks[hunkno] hunkfind = [] hunkreplace = [] validhunks = 0 canpatch = False for lineno, line in enumerate(f2fp): if lineno+1 < hunk.startsrc: continue elif lineno+1 == hunk.startsrc: hunkfind = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" -"] hunkreplace = [x[1:].rstrip(b"\r\n") for x in hunk.text if x[0] in b" +"] hunklineno = 0 if lineno+1 < hunk.startsrc+len(hunkfind)-1: if line.rstrip(b"\r\n") == hunkfind[hunklineno]: hunklineno+=1 else: info("file %d/%d:\t %s" % (i+1, total, filename)) info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno+1)) info(" expected: %s" % hunkfind[hunklineno]) info(" actual : %s" % line.rstrip(b"\r\n")) hunkno += 1 if hunkno < len(p.hunks): hunk = p.hunks[hunkno] continue else: break if lineno+1 == hunk.startsrc+len(hunkfind)-1: debug(" hunk no.%d for file %s -- is ready to be patched" % (hunkno+1, filename)) hunkno+=1 validhunks+=1 if hunkno < len(p.hunks): hunk = p.hunks[hunkno] else: if validhunks == len(p.hunks): canpatch = True break else: if hunk.startsrc==0 and hunk.linessrc==0 and len(p.hunks)==1: debug(" patch represents a new file") validhunks=1 canpatch=True elif hunkno < len(p.hunks): warning("premature end of source file %s at hunk %d" % (filename, hunkno+1)) errors += 1 f2fp.close() if validhunks < len(p.hunks): if self._match_file_hunks(filename, p.hunks): warning("already patched %s" % filename) else: warning("source file is different - %s" % filename) errors += 1 if canpatch: backupname = filename+b".orig" if exists(backupname): warning("can't backup original file to %s - aborting" % backupname) else: import shutil shutil.move(filename, backupname) if self.write_hunks(backupname, filename, p.hunks): info("successfully patched %d/%d:\t %s" % (i+1, total, filename)) os.unlink(backupname) else: errors += 1 warning("error patching file %s" % filename) shutil.copy(filename, filename+".invalid") warning("invalid version is saved to %s" % filename+".invalid") shutil.move(backupname, filename) if root: os.chdir(prevdir) return (errors == 0)
Apply parsed patch, optionally stripping leading components from file paths. `root` parameter specifies working dir. return True on success
https://github.com/pansila/auto-test-system/blob/bfe51a277466939a32daa08f27a89cf3c1900def/webserver/task_runner/patch/patch.py#L828-L980
from __future__ import print_function __author__ = "anatoly techtonik <techtonik@gmail.com>" __version__ = "1.16" __license__ = "MIT" __url__ = "https://github.com/techtonik/python-patch" import copy import logging import re try: from StringIO import StringIO except ImportError: from io import BytesIO as StringIO try: import urllib2 as urllib_request except ImportError: import urllib.request as urllib_request from os.path import exists, isfile, abspath import os import posixpath import shutil import sys PY3K = sys.version_info >= (3, 0) if not PY3K: compat_next = lambda gen: gen.next() else: compat_next = lambda gen: gen.__next__() def tostr(b): if not PY3K: return b return b.decode('utf-8') logger = logging.getLogger(__name__) debug = logger.debug info = logger.info warning = logger.warning class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None streamhandler = logging.StreamHandler() logger.addHandler(NullHandler()) debugmode = False def setdebug(): global debugmode, streamhandler debugmode = True loglevel = logging.DEBUG logformat = "%(levelname)8s %(message)s" logger.setLevel(loglevel) if streamhandler not in logger.handlers: logger.addHandler(streamhandler) streamhandler.setFormatter(logging.Formatter(logformat)) DIFF = PLAIN = "plain" GIT = "git" HG = MERCURIAL = "mercurial" SVN = SUBVERSION = "svn" MIXED = MIXED = "mixed" def xisabs(filename): if filename.startswith(b'/'): return True elif filename.startswith(b'\\'): return True elif re.match(b'\\w:[\\\\/]', filename): return True return False def xnormpath(path): normalized = posixpath.normpath(path).replace(b'\\', b'/') return posixpath.normpath(normalized) def xstrip(filename): while xisabs(filename): if re.match(b'\\w:[\\\\/]', filename): filename = re.sub(b'^\\w+:[\\\\/]+', b'', filename) elif re.match(b'[\\\\/]', filename): filename = re.sub(b'^[\\\\/]+', b'', filename) return filename def fromfile(filename): patchset = PatchSet() debug("reading %s" % filename) fp = open(filename, "rb") res = patchset.parse(fp) fp.close() if res == True: return patchset return False def fromstring(s): ps = PatchSet( StringIO(s) ) if ps.errors == 0: return ps return False def fromurl(url): ps = PatchSet( urllib_request.urlopen(url) ) if ps.errors == 0: return ps return False def pathstrip(path, n): pathlist = [path] while os.path.dirname(pathlist[0]) != b'': pathlist[0:1] = os.path.split(pathlist[0]) return b'/'.join(pathlist[n:]) class Hunk(object): def __init__(self): self.startsrc=None self.linessrc=None self.starttgt=None self.linestgt=None self.invalid=False self.desc='' self.text=[] class Patch(object): def __init__(self): self.source = None self.target = None self.hunks = [] self.hunkends = [] self.header = [] self.type = None def __iter__(self): for h in self.hunks: yield h class PatchSet(object): def __init__(self, stream=None): self.name = None self.type = None self.items = [] self.errors = 0 self.warnings = 0 if stream: self.parse(stream) def __len__(self): return len(self.items) def __iter__(self): for i in self.items: yield i def parse(self, stream): lineends = dict(lf=0, crlf=0, cr=0) nexthunkno = 0 p = None hunk = None hunkactual = dict(linessrc=None, linestgt=None) class wrapumerate(enumerate): def __init__(self, *args, **kwargs): self._exhausted = False self._lineno = False self._line = False def next(self): if self._exhausted: return False try: self._lineno, self._line = compat_next(super(wrapumerate, self)) except StopIteration: self._exhausted = True self._line = False return False return True @property def is_empty(self): return self._exhausted @property def line(self): return self._line @property def lineno(self): return self._lineno headscan = True filenames = False hunkhead = False hunkbody = False hunkskip = False hunkparsed = False re_hunk_start = re.compile(b"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@") self.errors = 0 header = [] srcname = None tgtname = None fe = wrapumerate(stream) while fe.next(): if hunkparsed: hunkparsed = False if re_hunk_start.match(fe.line): hunkhead = True elif fe.line.startswith(b"--- "): filenames = True else: headscan = True if headscan: while not fe.is_empty and not fe.line.startswith(b"--- "): header.append(fe.line) fe.next() if fe.is_empty: if p == None: debug("no patch data found") self.errors += 1 else: info("%d unparsed bytes left at the end of stream" % len(b''.join(header))) self.warnings += 1 continue headscan = False filenames = True line = fe.line lineno = fe.lineno if hunkbody: if line.strip(b"\r\n") == b"": debug("expanding empty line in a middle of hunk body") self.warnings += 1 line = b' ' + line if re.match(b"^[- \\+\\\\]", line): if line.endswith(b"\r\n"): p.hunkends["crlf"] += 1 elif line.endswith(b"\n"): p.hunkends["lf"] += 1 elif line.endswith(b"\r"): p.hunkends["cr"] += 1 if line.startswith(b"-"): hunkactual["linessrc"] += 1 elif line.startswith(b"+"): hunkactual["linestgt"] += 1 elif not line.startswith(b"\\"): hunkactual["linessrc"] += 1 hunkactual["linestgt"] += 1 hunk.text.append(line) else: warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, p.target)) hunk.invalid = True p.hunks.append(hunk) self.errors += 1 hunkbody = False hunkskip = True if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt: warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno+1, p.target)) hunk.invalid = True p.hunks.append(hunk) self.errors += 1 hunkbody = False hunkskip = True elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]: p.hunks.append(hunk) hunkbody = False hunkparsed = True ends = p.hunkends if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1: warning("inconsistent line ends in patch hunks for %s" % p.source) self.warnings += 1 if debugmode: debuglines = dict(ends) debuglines.update(file=p.target, hunk=nexthunkno) debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines) continue if hunkskip: if re_hunk_start.match(line): hunkskip = False hunkhead = True elif line.startswith(b"--- "): hunkskip = False filenames = True if debugmode and len(self.items) > 0: debug("- %2d hunks for %s" % (len(p.hunks), p.source)) if filenames: if line.startswith(b"--- "): if srcname != None: warning("skipping false patch for %s" % srcname) srcname = None re_filename = b"^--- ([^\t]+)" match = re.match(re_filename, line) if match: srcname = match.group(1).strip() else: warning("skipping invalid filename at line %d" % (lineno+1)) self.errors += 1 filenames = False headscan = True elif not line.startswith(b"+++ "): if srcname != None: warning("skipping invalid patch with no target for %s" % srcname) self.errors += 1 srcname = None else: warning("skipping invalid target patch") filenames = False headscan = True else: if tgtname != None: warning("skipping invalid patch - double target at line %d" % (lineno+1)) self.errors += 1 srcname = None tgtname = None filenames = False headscan = True else: re_filename = b"^\+\+\+ ([^\t]+)" match = re.match(re_filename, line) if not match: warning("skipping invalid patch - no target filename at line %d" % (lineno+1)) self.errors += 1 srcname = None filenames = False headscan = True else: if p: self.items.append(p) p = Patch() p.source = srcname srcname = None p.target = match.group(1).strip() p.header = header header = [] filenames = False hunkhead = True nexthunkno = 0 p.hunkends = lineends.copy() continue if hunkhead: match = re.match(b"^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@(.*)", line) if not match: if not p.hunks: warning("skipping invalid patch with no hunks for file %s" % p.source) self.errors += 1 hunkhead = False headscan = True continue else: hunkhead = False headscan = True else: hunk = Hunk() hunk.startsrc = int(match.group(1)) hunk.linessrc = 1 if match.group(3): hunk.linessrc = int(match.group(3)) hunk.starttgt = int(match.group(4)) hunk.linestgt = 1 if match.group(6): hunk.linestgt = int(match.group(6)) hunk.invalid = False hunk.desc = match.group(7)[1:].rstrip() hunk.text = [] hunkactual["linessrc"] = hunkactual["linestgt"] = 0 hunkhead = False hunkbody = True nexthunkno += 1 continue if p: self.items.append(p) if not hunkparsed: if hunkskip: warning("warning: finished with errors, some hunks may be invalid") elif headscan: if len(self.items) == 0: warning("error: no patch data found!") return False else: pass else: warning("error: patch stream is incomplete!") self.errors += 1 if len(self.items) == 0: return False if debugmode and len(self.items) > 0: debug("- %2d hunks for %s" % (len(p.hunks), p.source)) debug("total files: %d total hunks: %d" % (len(self.items), sum(len(p.hunks) for p in self.items))) for idx, p in enumerate(self.items): self.items[idx].type = self._detect_type(p) types = set([p.type for p in self.items]) if len(types) > 1: self.type = MIXED else: self.type = types.pop() self._normalize_filenames() return (self.errors == 0) def _detect_type(self, p): if (len(p.header) > 1 and p.header[-2].startswith(b"Index: ") and p.header[-1].startswith(b"="*67)): return SVN DVCS = ((p.source.startswith(b'a/') or p.source == b'/dev/null') and (p.target.startswith(b'b/') or p.target == b'/dev/null')) if len(p.header) > 1: for idx in reversed(range(len(p.header))): if p.header[idx].startswith(b"diff --git"): break if p.header[idx].startswith(b'diff --git a/'): if (idx+1 < len(p.header) and re.match(b'index \\w{7}..\\w{7} \\d{6}', p.header[idx+1])): if DVCS: return GIT if len(p.header) > 0: if DVCS and re.match(b'diff -r \\w{12} .*', p.header[-1]): return HG if DVCS and p.header[-1].startswith(b'diff --git a/'): if len(p.header) == 1: return HG elif p.header[0].startswith(b'# HG changeset patch'): return HG return PLAIN def _normalize_filenames(self): if debugmode: debug("normalize filenames") for i,p in enumerate(self.items): if debugmode: debug(" patch type = " + p.type) debug(" source = " + p.source) debug(" target = " + p.target) if p.type in (HG, GIT): debug("stripping a/ and b/ prefixes") if p.source != '/dev/null': if not p.source.startswith(b"a/"): warning("invalid source filename") else: p.source = p.source[2:] if p.target != '/dev/null': if not p.target.startswith(b"b/"): warning("invalid target filename") else: p.target = p.target[2:] p.source = xnormpath(p.source) p.target = xnormpath(p.target) sep = b'/' if p.source.startswith(b".." + sep): warning("error: stripping parent path for source file patch no.%d" % (i+1)) self.warnings += 1 while p.source.startswith(b".." + sep): p.source = p.source.partition(sep)[2] if p.target.startswith(b".." + sep): warning("error: stripping parent path for target file patch no.%d" % (i+1)) self.warnings += 1 while p.target.startswith(b".." + sep): p.target = p.target.partition(sep)[2] if xisabs(p.source) or xisabs(p.target): warning("error: absolute paths are not allowed - file no.%d" % (i+1)) self.warnings += 1 if xisabs(p.source): warning("stripping absolute path from source name '%s'" % p.source) p.source = xstrip(p.source) if xisabs(p.target): warning("stripping absolute path from target name '%s'" % p.target) p.target = xstrip(p.target) self.items[i].source = p.source self.items[i].target = p.target def diffstat(self): names = [] insert = [] delete = [] delta = 0 namelen = 0 maxdiff = 0 for patch in self.items: i,d = 0,0 for hunk in patch.hunks: for line in hunk.text: if line.startswith(b'+'): i += 1 delta += len(line)-1 elif line.startswith(b'-'): d += 1 delta -= len(line)-1 names.append(patch.target) insert.append(i) delete.append(d) namelen = max(namelen, len(patch.target)) maxdiff = max(maxdiff, i+d) output = '' statlen = len(str(maxdiff)) for i,n in enumerate(names): format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n" hist = '' width = len(format % ('', '', '')) histwidth = max(2, 80 - width) if maxdiff < histwidth: hist = "+"*insert[i] + "-"*delete[i] else: iratio = (float(insert[i]) / maxdiff) * histwidth dratio = (float(delete[i]) / maxdiff) * histwidth iwidth = 1 if 0 < iratio < 1 else int(iratio) dwidth = 1 if 0 < dratio < 1 else int(dratio) hist = "+"*int(iwidth) + "-"*int(dwidth) output += (format % (tostr(names[i]), str(insert[i] + delete[i]), hist)) output += (" %d files changed, %d insertions(+), %d deletions(-), %+d bytes" % (len(names), sum(insert), sum(delete), delta)) return output def pathmodify(self, old ,new): if old[0] == '"' and old[-1] == '"' and new[0] == '"' and new[-1] == '"': old = old[1:-1] new = new[1:-1] debug("broken patch from Google Code, stripping prefixes..") if old.startswith(b'a/') and new.startswith(b'b/'): old, new = old[2:], new[2:] debug(" %s" % old) debug(" %s" % new) return old, new def findfile(self, old, new): if exists(old): return old elif exists(new): return new else: return None
MIT License
jsignell/dask-geopandas
dask_geopandas/hilbert_distance.py
_transpose_to_hilbert_integer
python
def _transpose_to_hilbert_integer(p, coord): n = len(coord) bins = [_int_2_binary(v, p) for v in coord] concat = np.zeros(n * p, dtype=np.uint8) for i in range(p): for j in range(n): concat[n * i + j] = bins[j][i] h = _binary_2_int(concat) return h
Calculate hilbert distance for a single coord Parameters ---------- p : The number of iterations used in constructing the Hilbert curve coord : Array of coordinates Returns --------- Array of hilbert distances for a single coord
https://github.com/jsignell/dask-geopandas/blob/7b78ed518c61969d0841087bd1eee5318b690b20/dask_geopandas/hilbert_distance.py#L182-L206
import numpy as np import pandas as pd from numba import jit ngjit = jit(nopython=True, nogil=True) def _hilbert_distance(gdf, total_bounds, p): bounds = gdf.bounds.to_numpy() coords = _continuous_to_discrete_coords(total_bounds, bounds, p) distances = _distances_from_coordinates(p, coords) return pd.Series(distances, index=gdf.index, name="hilbert_distance") @ngjit def _continuous_to_discrete_coords(total_bounds, bounds, p): side_length = 2 ** p xmin, ymin, xmax, ymax = total_bounds x_mids = (bounds[:, 0] + bounds[:, 2]) / 2.0 y_mids = (bounds[:, 1] + bounds[:, 3]) / 2.0 x_int = _continuous_to_discrete(x_mids, (xmin, xmax), side_length) y_int = _continuous_to_discrete(y_mids, (ymin, ymax), side_length) coords = np.stack((x_int, y_int), axis=1) return coords @ngjit def _continuous_to_discrete(vals, val_range, n): width = val_range[1] - val_range[0] res = ((vals - val_range[0]) * (n / width)).astype(np.int64) res[res < 0] = 0 res[res > n - 1] = n - 1 return res @ngjit def _distances_from_coordinates(p, coords): result = np.zeros(coords.shape[0], dtype=np.int64) for i in range(coords.shape[0]): coord = coords[i, :] result[i] = _distance_from_coordinate(p, coord) return result @ngjit def _distance_from_coordinate(p, coord): n = len(coord) M = 1 << (p - 1) Q = M while Q > 1: P = Q - 1 for i in range(n): if coord[i] & Q: coord[0] ^= P else: t = (coord[0] ^ coord[i]) & P coord[0] ^= t coord[i] ^= t Q >>= 1 for i in range(1, n): coord[i] ^= coord[i - 1] t = 0 Q = M while Q > 1: if coord[n - 1] & Q: t ^= Q - 1 Q >>= 1 for i in range(n): coord[i] ^= t h = _transpose_to_hilbert_integer(p, coord) return h @ngjit
BSD 3-Clause New or Revised License
jramapuram/byol
main.py
build_loader_model_grapher
python
def build_loader_model_grapher(args): train_transform, test_transform = build_train_and_test_transforms() loader_dict = {'train_transform': train_transform, 'test_transform': test_transform, **vars(args)} loader = get_loader(**loader_dict) args.input_shape = loader.input_shape args.num_train_samples = loader.num_train_samples // args.num_replicas args.num_test_samples = loader.num_test_samples args.num_valid_samples = loader.num_valid_samples // args.num_replicas args.steps_per_train_epoch = args.num_train_samples // args.batch_size args.total_train_steps = args.epochs * args.steps_per_train_epoch network = BYOL(base_network_output_size=args.representation_size, projection_output_size=args.projection_size, classifier_output_size=loader.output_size, total_training_steps=args.total_train_steps, base_decay=args.base_decay) network = nn.SyncBatchNorm.convert_sync_batchnorm(network) if args.convert_to_sync_bn else network network = network.cuda() if args.cuda else network lazy_generate_modules(network, loader.train_loader) network = layers.init_weights(network, init=args.weight_initialization) if args.num_replicas > 1: print("wrapping model with DDP...") network = layers.DistributedDataParallelPassthrough(network, device_ids=[0], output_device=0, find_unused_parameters=True) print(network) print("model has {} million parameters.".format( utils.number_of_parameters(network) / 1e6 )) grapher = None if args.visdom_url is not None and args.distributed_rank == 0: grapher = Grapher('visdom', env=utils.get_name(args), server=args.visdom_url, port=args.visdom_port, log_folder=args.log_dir) elif args.distributed_rank == 0: grapher = Grapher( 'tensorboard', logdir=os.path.join(args.log_dir, utils.get_name(args))) return loader, network, grapher
builds a model, a dataloader and a grapher :param args: argparse :param transform: the dataloader transform :returns: a dataloader, a grapher and a model :rtype: list
https://github.com/jramapuram/byol/blob/5ea487e7304df0e0983e8fc6c01bf74a4ce3ce36/main.py#L403-L462
import os import time import tree import argparse import functools import pprint import torch import torchvision import torch.nn as nn import torch.optim as optim import torch.nn.functional as F import torch.multiprocessing as mp import numpy as np from torchvision import transforms import torchvision.models as models from objective import loss_function import helpers.metrics as metrics import helpers.layers as layers import helpers.utils as utils import optimizers.scheduler as scheduler from datasets.loader import get_loader from helpers.grapher import Grapher from optimizers.lars import LARS model_names = sorted(name for name in models.__dict__ if name.islower() and not name.startswith("__") and callable(models.__dict__[name])) parser = argparse.ArgumentParser(description='BYOL Pytorch') parser.add_argument('--task', type=str, default="multi_augment_image_folder", help="""task to work on (default: multi_augment_image_folder).""") parser.add_argument('--batch-size', type=int, default=4096, metavar='N', help='input batch size for training (default: 4096)') parser.add_argument('--epochs', type=int, default=3000, metavar='N', help='minimum number of epochs to train (default: 3000)') parser.add_argument('--download', type=int, default=1, help='download simple datasets like MNIST/CIFAR10 (default: 1)') parser.add_argument('--image-size-override', type=int, default=224, help='Override and force resizing of images to this specific size (default: None)') parser.add_argument('--data-dir', type=str, default='./.datasets', metavar='DD', help='directory which contains input data') parser.add_argument('--log-dir', type=str, default='./runs', help='directory to store logs to (default: ./runs)') parser.add_argument('--uid', type=str, default="", help='uid for current session (default: empty-str)') parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet50', choices=model_names, help='model architecture: ' + ' | '.join(model_names) + ' (default: resnet18)') parser.add_argument('--representation-size', type=int, default=2048, help='size of the representation (eg: final AvgPool for resnet) (default: 2048)') parser.add_argument('--projection-size', type=int, default=256, help='output size for projection head (default: 256)') parser.add_argument('--head-latent-size', type=int, default=4096, help='size for hidden layer for the MLP projection head (default: 4096)') parser.add_argument('--base-decay', type=float, default=0.996, help='decay for target network (default: 0.996)') parser.add_argument('--weight-initialization', type=str, default=None, help='weight initialization type; None uses default pytorch init. (default: None)') parser.add_argument('--model-dir', type=str, default='.models', help='directory which contains saved models (default: .models)') parser.add_argument('--color-jitter-strength', type=float, default=1.0, help='scalar weighting for the color jitter (default: 1.0)') parser.add_argument('--weight-decay', type=float, default=1e-6, help='weight decay (default: 1.5e-6)') parser.add_argument('--polyak-ema', type=float, default=0, help='Polyak weight averaging co-ef (default: 0)') parser.add_argument('--convert-to-sync-bn', action='store_true', default=False, help='converts all BNs to SyncBNs (default: True)') parser.add_argument('--clip', type=float, default=0, help='gradient clipping value (default: 0)') parser.add_argument('--lr', type=float, default=0.2, metavar='LR', help='learning rate (default: 0.2)') parser.add_argument('--lr-update-schedule', type=str, default='cosine', help='learning rate schedule fixed/step/cosine (default: cosine)') parser.add_argument('--warmup', type=int, default=10, help='warmup epochs (default: 10)') parser.add_argument('--optimizer', type=str, default="lars_momentum", help="specify optimizer (default: lars_momentum)") parser.add_argument('--early-stop', action='store_true', default=False, help='enable early stopping (default: False)') parser.add_argument('--visdom-url', type=str, default=None, help='visdom URL for graphs, needs http://url (default: None)') parser.add_argument('--visdom-port', type=int, default=None, help='visdom port for graphs (default: None)') parser.add_argument('--num-replicas', type=int, default=8, help='number of compute devices available; 1 means just local (default: 8)') parser.add_argument('--workers-per-replica', type=int, default=2, help='threads per replica for the data loader (default: 2)') parser.add_argument('--distributed-master', type=str, default=None, help='hostname or IP to use for distributed master (default: None)') parser.add_argument('--distributed-rank', type=int, default=0, help='rank of the current replica in the world (default: None)') parser.add_argument('--distributed-port', type=int, default=29300, help='port to use for distributed framework (default: 29300)') parser.add_argument('--debug-step', action='store_true', default=False, help='only does one step of the execute_graph function per call instead of all minibatches') parser.add_argument('--seed', type=int, default=None, help='seed for numpy and pytorch (default: None)') parser.add_argument('--no-cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--half', action='store_true', default=False, help='enables half precision training') args = parser.parse_args() if args.half: from apex.fp16_utils import * from apex import amp, optimizers aws_instance_id = utils.get_aws_instance_id() if aws_instance_id is not None: args.instance_id = aws_instance_id class CosEMA(nn.Module): def __init__(self, total_steps, base_decay=0.996): super(CosEMA, self).__init__() self.step = 0 self.total_steps = total_steps self.base_decay = base_decay self.register_buffer('mean', None) def forward(self, x): if self.mean is None: self.mean = torch.zeros_like(x) if self.training: decay = 1 - (1 - self.base_decay) * (np.cos(np.pi * self.step / self.total_steps) + 1) / 2.0 self.mean = (1 - decay) * x.detach() + decay * self.mean self.step += 1 return x class BYOL(nn.Module): def __init__(self, base_network_output_size, projection_output_size, classifier_output_size, total_training_steps, base_decay=0.996): super(BYOL, self).__init__() self.base_network_output_size = base_network_output_size model_fn = models.__dict__[args.arch] self.base_network = nn.Sequential( *list(model_fn(pretrained=False).children())[:-1] ) self.head = nn.Sequential( nn.Linear(base_network_output_size, args.head_latent_size), nn.BatchNorm1d(args.head_latent_size), nn.ReLU(), nn.Linear(args.head_latent_size, projection_output_size), ) self.predictor = nn.Sequential( nn.Linear(projection_output_size, args.head_latent_size), nn.BatchNorm1d(args.head_latent_size), nn.ReLU(), nn.Linear(args.head_latent_size, projection_output_size), ) self.linear_classifier = nn.Linear(base_network_output_size, classifier_output_size) self.target_network = CosEMA(total_training_steps, base_decay) self.target_network(nn.utils.parameters_to_vector(self.parameters())) def target_prediction(self, augmentation2): mean = self.target_network.mean original_params = nn.utils.parameters_to_vector(self.parameters()) nn.utils.vector_to_parameters(mean, self.parameters()) preds = self.prediction(augmentation2) nn.utils.vector_to_parameters(original_params, self.parameters()) return preds def prediction(self, augmentation): representation = self.base_network(augmentation).view(-1, self.base_network_output_size) projection = self.head(representation) prediction = self.predictor(projection) return representation, projection, prediction def forward(self, augmentation1, augmentation2): online_representation1, online_projection1, online_prediction1 = self.prediction(augmentation1) online_representation2, online_projection2, online_prediction2 = self.prediction(augmentation2) target_representation1, target_projection1, target_prediction1 = self.target_prediction(augmentation1) target_representation2, target_projection2, target_prediction2 = self.target_prediction(augmentation2) repr_to_classifier = torch.cat([online_representation1, online_representation2], 0) if self.training else online_representation1 linear_preds = self.linear_classifier(repr_to_classifier.clone().detach()) self.target_network(nn.utils.parameters_to_vector(self.parameters())) return { 'linear_preds': linear_preds, 'online_representation1': online_representation1, 'online_projection1': online_projection1, 'online_prediction1': online_prediction1, 'online_representation2': online_representation2, 'online_projection2': online_projection2, 'online_prediction2': online_prediction2, 'target_representation1': target_representation1, 'target_projection1': target_projection1, 'target_prediction1': target_prediction1, 'target_representation2': target_representation2, 'target_projection2': target_projection2, 'target_prediction2': target_prediction2, } def build_lr_schedule(optimizer, last_epoch=-1): if args.lr_update_schedule == 'fixed': sched = optim.lr_scheduler.LambdaLR(optimizer, lambda epoch: 1.0, last_epoch=last_epoch) elif args.lr_update_schedule == 'cosine': total_epochs = args.epochs - args.warmup sched = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=total_epochs, last_epoch=last_epoch) else: raise NotImplementedError("lr scheduler {} not implemented".format(args.lr_update_schedule)) if args.warmup > 0: warmup = scheduler.LinearWarmup(optimizer, warmup_steps=args.warmup, last_epoch=last_epoch) sched = scheduler.Scheduler(sched, warmup) return sched def build_optimizer(model, last_epoch=-1): optim_map = { "rmsprop": optim.RMSprop, "adam": optim.Adam, "adadelta": optim.Adadelta, "sgd": optim.SGD, "momentum": functools.partial(optim.SGD, momentum=0.9), "lbfgs": optim.LBFGS, } params_to_optimize = layers.add_weight_decay(model, args.weight_decay) full_opt_name = args.optimizer.lower().strip() is_lars = 'lars' in full_opt_name if full_opt_name == 'lamb': assert args.half, "Need fp16 precision to use Apex FusedLAMB." optim_map['lamb'] = optimizers.fused_lamb.FusedLAMB opt_name = full_opt_name.split('_')[-1] if is_lars else full_opt_name print("using {} optimizer {} lars.".format(opt_name, 'with'if is_lars else 'without')) lr = args.lr if opt_name in ["momentum", "sgd"]: lr = args.lr * (args.batch_size * args.num_replicas / 256) opt = optim_map[opt_name](params_to_optimize, lr=lr) if is_lars: opt = LARS(opt, eps=0.0) sched = build_lr_schedule(opt, last_epoch=last_epoch) return opt, sched def build_train_and_test_transforms(): resize_shape = (args.image_size_override, args.image_size_override) if 'dali' in args.task: import nvidia.dali.ops as ops import nvidia.dali.types as types from datasets.dali_imagefolder import ColorJitter, RandomHorizontalFlip, RandomGrayScale train_transform = [ ops.RandomResizedCrop(device="gpu" if args.cuda else "cpu", size=resize_shape, random_area=(0.08, 1.0), random_aspect_ratio=(3./4, 4./3)), RandomHorizontalFlip(prob=0.2, cuda=args.cuda), ColorJitter(brightness=0.8 * args.color_jitter_strength, contrast=0.8 * args.color_jitter_strength, saturation=0.2 * args.color_jitter_strength, hue=0.2 * args.color_jitter_strength, prob=0.8, cuda=args.cuda), RandomGrayScale(prob=0.2, cuda=args.cuda) ] test_transform = [ ops.Resize(resize_x=resize_shape[0], resize_y=resize_shape[1], device="gpu" if args.cuda else "cpu", image_type=types.RGB, interp_type=types.INTERP_LINEAR) ] else: from datasets.utils import GaussianBlur train_transform = [ transforms.RandomResizedCrop((args.image_size_override, args.image_size_override)), transforms.RandomHorizontalFlip(p=0.5), transforms.RandomApply([transforms.ColorJitter( brightness=0.8 * args.color_jitter_strength, contrast=0.8 * args.color_jitter_strength, saturation=0.8 * args.color_jitter_strength, hue=0.2 * args.color_jitter_strength)], p=0.8), transforms.RandomGrayscale(p=0.2), GaussianBlur(kernel_size=int(0.1 * args.image_size_override), p=0.5) ] test_transform = [transforms.Resize(resize_shape)] return train_transform, test_transform
MIT License
bomquote/transistor
transistor/utility/browsercookie.py
Chrome._darwin_key
python
def _darwin_key(self, salt, length): my_pass = keyring.get_password('Chrome Safe Storage', 'Chrome') my_pass = my_pass.encode('utf8') iterations = 1003 key = PBKDF2(my_pass, salt, length, iterations) return key
return key if sys.platform == 'darwin' :return: PBKDF2 instance
https://github.com/bomquote/transistor/blob/4bc5eaa1beac334cd05f2149a1dd584e0d803921/transistor/utility/browsercookie.py#L127-L137
__doc__ = 'Load browser cookies into a cookiejar' import os import sys import time import glob try: import cookielib except ImportError: import http.cookiejar as cookielib from contextlib import contextmanager import tempfile try: import json except ImportError: import simplejson as json try: import ConfigParser as configparser except ImportError: import configparser try: from pysqlite2 import dbapi2 as sqlite3 except ImportError: import sqlite3 import lz4.block import keyring from Crypto.Protocol.KDF import PBKDF2 from Crypto.Cipher import AES class BrowserCookieError(Exception): pass @contextmanager def create_local_copy(cookie_file): if os.path.exists(cookie_file): tmp_cookie_file = tempfile.NamedTemporaryFile(suffix='.sqlite').name open(tmp_cookie_file, 'wb').write(open(cookie_file, 'rb').read()) yield tmp_cookie_file else: raise BrowserCookieError('Can not find cookie file at: ' + cookie_file) os.remove(tmp_cookie_file) class BrowserCookieLoader(object): def __init__(self, cookie_files=None): cookie_files = cookie_files or self.find_cookie_files() self.cookie_files = list(cookie_files) def find_cookie_files(self): raise NotImplementedError def get_cookies(self): raise NotImplementedError def load(self): cookie_jar = cookielib.CookieJar() for cookie in self.get_cookies(): cookie_jar.set_cookie(cookie) return cookie_jar class Chrome(BrowserCookieLoader): def __str__(self): return 'chrome' def find_cookie_files(self): for pattern in [ os.path.expanduser( '~/Library/Application Support/Google/Chrome/Default/Cookies'), os.path.expanduser('~/Library/Application Support/Vivaldi/Default/Cookies'), os.path.expanduser('~/.config/chromium/Default/Cookies'), os.path.expanduser('~/.config/chromium/Profile */Cookies'), os.path.expanduser('~/.config/google-chrome/Default/Cookies'), os.path.expanduser('~/.config/google-chrome/Profile */Cookies'), os.path.expanduser('~/.config/vivaldi/Default/Cookies'), os.path.join(os.getenv('APPDATA', ''), r'..\Local\Google\Chrome\User Data\Default\Cookies'), os.path.join(os.getenv('APPDATA', ''), r'..\Local\Vivaldi\User Data\Default\Cookies'), ]: for result in glob.glob(pattern): yield result
MIT License
poliastro/poliastro
src/poliastro/twobody/orbit.py
Orbit.from_equinoctial
python
def from_equinoctial( cls, attractor, p, f, g, h, k, L, epoch=J2000, plane=Planes.EARTH_EQUATOR ): ss = ModifiedEquinoctialState(attractor, p, f, g, h, k, L, plane) return cls(ss, epoch)
Return `Orbit` from modified equinoctial elements. Parameters ---------- attractor : Body Main attractor. p : ~astropy.units.Quantity Semilatus rectum. f : ~astropy.units.Quantity Second modified equinoctial element. g : ~astropy.units.Quantity Third modified equinoctial element. h : ~astropy.units.Quantity Fourth modified equinoctial element. k : ~astropy.units.Quantity Fifth modified equinoctial element. L : ~astropy.units.Quantity True longitude. epoch : ~astropy.time.Time, optional Epoch, default to J2000. plane : ~poliastro.frames.Planes Fundamental plane of the frame.
https://github.com/poliastro/poliastro/blob/d0a13af27e5971e3435c9a762942041201ee13a6/src/poliastro/twobody/orbit.py#L389-L417
from typing import List, Union from warnings import warn import numpy as np from astropy import time, units as u from astropy.coordinates import ( GCRS, ICRS, Angle, CartesianDifferential, CartesianRepresentation, get_body_barycentric, get_body_barycentric_posvel, ) from astroquery.jplsbdb import SBDB from poliastro.constants import J2000 from poliastro.core.elements import coe2rv_many from poliastro.core.propagation.farnocchia import ( delta_t_from_nu as delta_t_from_nu_fast, ) from poliastro.frames import Planes from poliastro.frames.util import get_frame from poliastro.threebody.soi import laplace_radius from poliastro.twobody.angles import ( D_to_nu, E_to_nu, F_to_nu, M_to_D, M_to_E, M_to_F, raan_from_ltan, ) from poliastro.twobody.elements import ( get_eccentricity_critical_argp, get_eccentricity_critical_inc, get_inclination_critical_argp, hyp_nu_limit, ) from poliastro.twobody.mean_elements import get_mean_elements from poliastro.twobody.propagation import farnocchia, propagate from poliastro.twobody.sampling import sample_closed from poliastro.twobody.states import ( BaseState, ClassicalState, ModifiedEquinoctialState, RVState, ) from poliastro.util import find_closest_value, norm from poliastro.warnings import ( OrbitSamplingWarning, PatchedConicsWarning, TimeScaleWarning, ) try: from functools import cached_property except ImportError: from cached_property import cached_property ORBIT_FORMAT = "{r_p:.0f} x {r_a:.0f} x {inc:.1f} ({frame}) orbit around {body} at epoch {epoch} ({scale})" ORBIT_NO_FRAME_FORMAT = ( "{r_p:.0f} x {r_a:.0f} x {inc:.1f} orbit around {body} at epoch {epoch} ({scale})" ) class Orbit: def __init__(self, state, epoch): self._state = state self._epoch = epoch self._frame = None @property def attractor(self): return self._state.attractor @property def epoch(self): return self._epoch @property def plane(self): return self._state.plane @cached_property def r(self): return self._state.to_vectors().r @cached_property def v(self): return self._state.to_vectors().v @cached_property def a(self): return self._state.to_classical().a @cached_property def p(self): return self._state.to_classical().p @cached_property def r_p(self): return self.a * (1 - self.ecc) @cached_property def r_a(self): return self.a * (1 + self.ecc) @cached_property def ecc(self): return self._state.to_classical().ecc @cached_property def inc(self): return self._state.to_classical().inc @cached_property def raan(self): return self._state.to_classical().raan @cached_property def argp(self): return self._state.to_classical().argp @property def nu(self): return self._state.to_classical().nu @cached_property def f(self): return self._state.to_equinoctial().f @cached_property def g(self): return self._state.to_equinoctial().g @cached_property def h(self): return self._state.to_equinoctial().h @cached_property def k(self): return self._state.to_equinoctial().k @cached_property def L(self): return self.raan + self.argp + self.nu @cached_property def period(self): return self._state.period @cached_property def n(self): return self._state.n @cached_property def energy(self): return self.v.dot(self.v) / 2 - self.attractor.k / np.sqrt(self.r.dot(self.r)) @cached_property def e_vec(self): r, v = self.rv() k = self.attractor.k e_vec = ((v.dot(v) - k / (norm(r))) * r - r.dot(v) * v) / k return e_vec.decompose() @cached_property def h_vec(self): h_vec = np.cross(self.r.to(u.km).value, self.v.to(u.km / u.s)) * u.km ** 2 / u.s return h_vec @cached_property def h_mag(self): h_mag = norm(self.h_vec) return h_mag @cached_property def arglat(self): arglat = (self.argp + self.nu) % (360 * u.deg) return arglat @cached_property def t_p(self): t_p = ( delta_t_from_nu_fast( self.nu.to_value(u.rad), self.ecc.value, self.attractor.k.to_value(u.km ** 3 / u.s ** 2), self.r_p.to_value(u.km), ) * u.s ) return t_p @classmethod @u.quantity_input(r=u.m, v=u.m / u.s) def from_vectors(cls, attractor, r, v, epoch=J2000, plane=Planes.EARTH_EQUATOR): assert np.any(r.value), "Position vector must be non zero" if r.ndim != 1 or v.ndim != 1: raise ValueError( f"Vectors must have dimension 1, got {r.ndim} and {v.ndim}" ) ss = RVState(attractor, r, v, plane) return cls(ss, epoch) @classmethod def from_coords(cls, attractor, coord, plane=Planes.EARTH_EQUATOR): if "s" not in coord.cartesian.differentials: raise ValueError( "Coordinate instance doesn't have a differential with respect to time" ) if coord.size != 1: raise ValueError( "Coordinate instance must represents exactly 1 position, found: %d" % coord.size ) coord = coord.reshape(()) inertial_frame_at_body_centre = get_frame(attractor, plane, coord.obstime) if not coord.is_equivalent_frame(inertial_frame_at_body_centre): coord_in_irf = coord.transform_to(inertial_frame_at_body_centre) else: coord_in_irf = coord pos = coord_in_irf.cartesian.xyz vel = coord_in_irf.cartesian.differentials["s"].d_xyz return cls.from_vectors(attractor, pos, vel, epoch=coord.obstime, plane=plane) @classmethod @u.quantity_input(a=u.m, ecc=u.one, inc=u.rad, raan=u.rad, argp=u.rad, nu=u.rad) def from_classical( cls, attractor, a, ecc, inc, raan, argp, nu, epoch=J2000, plane=Planes.EARTH_EQUATOR, ): for element in a, ecc, inc, raan, argp, nu, epoch: if not element.isscalar: raise ValueError(f"Elements must be scalar, got {element}") if ecc == 1.0 * u.one: raise ValueError("For parabolic orbits use Orbit.parabolic instead") if not 0 * u.deg <= inc <= 180 * u.deg: raise ValueError("Inclination must be between 0 and 180 degrees") if ecc > 1 and a > 0: raise ValueError("Hyperbolic orbits have negative semimajor axis") if not -np.pi * u.rad <= nu < np.pi * u.rad: warn("Wrapping true anomaly to -π <= nu < π", stacklevel=2) nu = ((nu + np.pi * u.rad) % (2 * np.pi * u.rad) - np.pi * u.rad).to( nu.unit ) ss = ClassicalState( attractor, a * (1 - ecc ** 2), ecc, inc, raan, argp, nu, plane ) return cls(ss, epoch) @classmethod @u.quantity_input(p=u.m, f=u.one, g=u.rad, h=u.rad, k=u.rad, L=u.rad)
MIT License
pytorchlightning/lightning-bolts
pl_bolts/optimizers/lr_scheduler.py
LinearWarmupCosineAnnealingLR.__init__
python
def __init__( self, optimizer: Optimizer, warmup_epochs: int, max_epochs: int, warmup_start_lr: float = 0.0, eta_min: float = 0.0, last_epoch: int = -1, ) -> None: self.warmup_epochs = warmup_epochs self.max_epochs = max_epochs self.warmup_start_lr = warmup_start_lr self.eta_min = eta_min super().__init__(optimizer, last_epoch)
Args: optimizer (Optimizer): Wrapped optimizer. warmup_epochs (int): Maximum number of iterations for linear warmup max_epochs (int): Maximum number of iterations warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0. eta_min (float): Minimum learning rate. Default: 0. last_epoch (int): The index of last epoch. Default: -1.
https://github.com/pytorchlightning/lightning-bolts/blob/f4f6d53a039c521f3441750fa5297c7694320119/pl_bolts/optimizers/lr_scheduler.py#L44-L67
import math import warnings from typing import List from torch import nn from torch.optim import Adam, Optimizer from torch.optim.lr_scheduler import _LRScheduler class LinearWarmupCosineAnnealingLR(_LRScheduler):
Apache License 2.0
kxsystems/pyq
src/pyq/__init__.py
K.keys
python
def keys(self): return self._k(0, 'key', self)
returns q('key', self) Among other uses, enables interoperability between q and python dicts. >>> from collections import OrderedDict >>> OrderedDict(q('`a`b!1 2')) OrderedDict([('a', 1), ('b', 2)]) >>> d = {}; d.update(q('`a`b!1 2')) >>> list(sorted(d.items())) [('a', 1), ('b', 2)]
https://github.com/kxsystems/pyq/blob/8c31f19e6ee8970d5b4146c5593cb1ed174e1385/src/pyq/__init__.py#L342-L355
import itertools from datetime import datetime, date, time from collections import Mapping as _Mapping import builtins import sys import os try: import numpy as _np except ImportError: _np = None try: from ._k import K as _K, error as kerr, Q_VERSION, Q_DATE, Q_OS except ImportError: import ctypes import platform if not hasattr(ctypes.CDLL(None), 'b9'): message = ("Importing the pyq package from " "standalone python is not supported. ") if platform.system() == 'Windows': message += "Run path\\to\\q.exe python.q." else: message += "Use pyq executable." raise ImportError(message) raise try: from .version import version as __version__ except ImportError: __version__ = 'unknown' __metaclass__ = type _KX3 = Q_VERSION >= 3 _Q_RES = ['abs', 'acos', 'asin', 'atan', 'avg', 'bin', 'cor', 'cos', 'cov', 'dev', 'div', 'ema', 'enlist', 'exp', 'getenv', 'in', 'insert', 'last', 'like', 'log', 'max', 'min', 'prd', 'reval', 'setenv', 'sin', 'sqrt', 'ss', 'sum', 'tan', 'var', 'wavg', 'within', 'wsum', 'xexp'] if _KX3 and Q_DATE >= date(2012, 7, 26): _Q_RES.append('binr') if Q_VERSION >= 3.6: _Q_RES.append('hopen') class K(_K): __slots__ = () def _set_mask(self, mask): return q("{?[y;((),x)0N;x]}", self, mask) @classmethod def _from_record_array(cls, x): fields = [f for f, t in x.dtype.descr] k = q('!', list(fields), [K(x[f]) for f in fields]) if x.ndim: k = k.flip return k @classmethod def _from_sequence(cls, x, elm=None): r = cls._ktn(0, 0) g = iter(x) try: i = next(g) except StopIteration: return r en = _K_k(0, 'enlist') r._ja(en) if elm is None: elm = cls for i in itertools.chain([i], g): i = elm(i) if i._t in (-11, 11, 0): i = i.enlist r._ja(i) return r.eval @classmethod def _convert(cls, x): for t in type(x).mro(): c = converters.get(t) if c is not None: return c(x) return cls._from_sequence(x) def __reduce_ex__(self, proto): x = self._b9(1, self) b = memoryview(x).tobytes() return (d9, (b,)) def __getitem__(self, x): try: return _K.__getitem__(self, x) except (TypeError, NotImplementedError): pass try: start, stop, step = x.indices(len(self)) except AttributeError: i = K(x) if self._t == 99 and i._t < 0: return self.value[self._k(0, "?", self.key, i)] else: return self._k(0, "@", self, i) if step == 1: return self._k(0, "sublist", self._J([start, stop - start]), self) i = start + step * q.til(max(0, (stop - start) // step)) return self._k(0, "{$[99=type x;(key[x]y)!value[x]y;x y]}", self, i) def __getattr__(self, a): t = self._t if t == 98 and not a.startswith('_'): return self._k(0, '{x`%s}' % a, self) if t == 99: if self._k(0, "{11h~type key x}", self): if a == 'items': raise AttributeError return self._k(0, '{x`%s}' % a, self) return self._k(0, '{(0!x)`%s}' % a, self) if 12 <= abs(t) < 20: try: return self._k(0, "`%s$" % a, self) except kerr: pass raise AttributeError(a) _fields = " g@ ghijefgsjiifjiii" def __int__(self): t = self._t if t >= 0: raise TypeError("cannot convert non-scalar to int") return int(self.inspect(self._fields[-t])) def __float__(self): t = self._t if t >= 0: raise TypeError("cannot convert non-scalar to float") return float(self.inspect(self._fields[-t])) def __index__(self): t = self._t if -5 >= t >= -7: return int(self) raise TypeError("Only scalar short/int/long K objects " "can be converted to an index") def __bytes__(self): t = self._t if -5 >= t >= -7: return bytes(int(self)) if 0 < abs(t) < 11: if abs(t) == 2: from uuid import UUID x = q('(),', self) return b''.join(UUID(int=i).bytes for i in x) return bytes(self.data) raise BufferError("k object of type %d" % t) def __eq__(self, other): try: other = K(other) except TypeError: return False return bool(k('~')(self, other)) def __ne__(self, other): return bool(k('~~')(self, other)) def __contains__(self, item): if self._t: x = q('in', item, self) else: x = q('{sum x~/:y}', item, self) return bool(x)
Apache License 2.0
rucio/rucio
lib/rucio/common/schema/__init__.py
get_scope_name_regexps
python
def get_scope_name_regexps(): if len(scope_name_regexps) == 0: from rucio.core.vo import list_vos vos = list_vos() for vo in vos: if not vo['vo'] in schema_modules: load_schema_for_vo(vo['vo']) scope_name_regexp = schema_modules[vo['vo']].SCOPE_NAME_REGEXP if scope_name_regexp not in scope_name_regexps: scope_name_regexps.append(scope_name_regexp) return scope_name_regexps
returns a list of all unique SCOPE_NAME_REGEXPs from all schemas
https://github.com/rucio/rucio/blob/6a6092798bb8220dec07328d0e3f7f42d1b931cd/lib/rucio/common/schema/__init__.py#L106-L119
try: from ConfigParser import NoOptionError, NoSectionError except ImportError: from configparser import NoOptionError, NoSectionError from rucio.common import config, exception import importlib schema_modules = {} scope_name_regexps = [] try: multivo = config.config_get_bool('common', 'multi_vo', check_config_table=False) except (NoOptionError, NoSectionError): multivo = False if not multivo: GENERIC_FALLBACK = 'generic' if config.config_has_section('policy'): try: POLICY = config.config_get('policy', 'package', check_config_table=False) + ".schema" except (NoOptionError, NoSectionError): try: POLICY = config.config_get('policy', 'schema', check_config_table=False) except (NoOptionError, NoSectionError): POLICY = GENERIC_FALLBACK POLICY = 'rucio.common.schema.' + POLICY.lower() else: POLICY = 'rucio.common.schema.' + GENERIC_FALLBACK.lower() try: module = importlib.import_module(POLICY) except ImportError: raise exception.PolicyPackageNotFound('Module ' + POLICY + ' not found') schema_modules["def"] = module scope_name_regexps.append(module.SCOPE_NAME_REGEXP) def load_schema_for_vo(vo): GENERIC_FALLBACK = 'generic_multi_vo' if config.config_has_section('policy'): try: POLICY = config.config_get('policy', 'package-' + vo, check_config_table=False) + ".schema" except (NoOptionError, NoSectionError): try: POLICY = config.config_get('policy', 'schema', check_config_table=False) except (NoOptionError, NoSectionError): POLICY = GENERIC_FALLBACK POLICY = 'rucio.common.schema.' + POLICY.lower() else: POLICY = 'rucio.common.schema.' + GENERIC_FALLBACK.lower() try: module = importlib.import_module(POLICY) except ImportError: raise exception.PolicyPackageNotFound('Module ' + POLICY + ' not found') schema_modules[vo] = module def validate_schema(name, obj, vo='def'): if vo not in schema_modules: load_schema_for_vo(vo) schema_modules[vo].validate_schema(name, obj) def get_schema_value(key, vo='def'): if vo not in schema_modules: load_schema_for_vo(vo) return getattr(schema_modules[vo], key)
Apache License 2.0
bitlabstudio/django-libs
django_libs/tests/mixins.py
ViewRequestFactoryTestMixin.setUpRequest
python
def setUpRequest(self, request): return request
The request is passed through this method on each run to allow adding additional attributes to it or change certain values.
https://github.com/bitlabstudio/django-libs/blob/12131ea20b22339b60437b05ecda9ee1df9ce2be/django_libs/tests/mixins.py#L801-L807
import sys from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.contrib.messages.storage.fallback import FallbackStorage from django.contrib.sessions.middleware import SessionMiddleware from django.http import Http404 from django.test import RequestFactory try: from django.core.urlresolvers import resolve, reverse except ImportError: from django.urls import resolve, reverse class ViewTestMixin(object): longMessage = True def _check_callable(self, method='get', data=None, message=None, kwargs=None, user=None, anonymous=False, and_redirects_to=None, status_code=None, called_by='is_callable', ajax=False, no_redirect=False, extra=None): if extra is None: extra = {} if called_by == 'is_not_callable': message_addin = ' not' elif called_by == 'is_callable': message_addin = '' if user: self.login(user) if anonymous: self.client.logout() if not status_code and and_redirects_to: status_code = 302 if not status_code and called_by == 'is_callable': status_code = 200 if not status_code and called_by == 'is_not_callable': status_code = 404 client_args = ( self.get_url(view_kwargs=kwargs or self.get_view_kwargs()), data or self.get_data_payload(), ) if ajax: extra.update({'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}) if method.lower() == 'get': resp = self.client.get(*client_args, **extra) elif method.lower() == 'post': resp = self.client.post(*client_args, **extra) else: raise Exception('Not a valid request method: "{0}"'.format(method)) if resp.status_code == 302 and not and_redirects_to and not ( status_code in [200, 404]): sys.stderr.write( '\n\033[1;31mDeprecationWarning:\033[1;m' ' Your response status code' ' was 302, although ``and_redirects_to`` was not set.\n' 'Please use ``and_redirects_to`` for a test on redirects since' ' the callable methods will default to 200 or 404 in the' ' future.\n') if and_redirects_to: self.assertRedirects( resp, and_redirects_to, status_code=status_code, msg_prefix=('The view did not redirect as expected.')) else: self.assertIn( resp.status_code, [status_code, 302], msg=(message or 'The view should{0} be callable'.format(message_addin))) return resp def is_callable(self, method='get', data=None, message=None, kwargs=None, user=None, anonymous=False, and_redirects_to=None, status_code=None, ajax=False, no_redirect=False, extra=None): return self._check_callable( method=method, data=data, message=message, kwargs=kwargs, user=user, anonymous=anonymous, and_redirects_to=and_redirects_to, status_code=status_code, ajax=ajax, no_redirect=no_redirect, called_by='is_callable', extra=extra) def is_not_callable(self, method='get', message=None, data=None, kwargs=None, user=None, anonymous=False, and_redirects_to=None, status_code=None, ajax=False, no_redirect=False, extra=None): return self._check_callable( method=method, data=data, message=message, kwargs=kwargs, user=user, anonymous=anonymous, and_redirects_to=and_redirects_to, status_code=status_code, ajax=ajax, no_redirect=no_redirect, called_by='is_not_callable', extra=extra) def get_data_payload(self): if hasattr(self, 'data_payload'): return self.data_payload return {} def get_view_name(self): return NotImplementedError def get_view_args(self): return None def get_view_kwargs(self): return None def get_url(self, view_name=None, view_args=None, view_kwargs=None): if view_name is None: view_name = self.get_view_name() if view_args is None: view_args = self.get_view_args() if view_kwargs is None: view_kwargs = self.get_view_kwargs() return reverse(view_name, args=view_args, kwargs=view_kwargs) def login(self, user, password='test123'): self.client.login(username=user.username, password=password) def get_login_url(self): login_url = getattr(settings, 'LOGIN_URL') if login_url is None: return reverse('auth_login') return login_url def should_redirect_to_login_when_anonymous(self, url=None): if not url: url = self.get_url() resp = self.client.get(url) self.assertRedirects(resp, '{0}?next={1}'.format( self.get_login_url(), url)) return resp def should_be_callable_when_anonymous(self, url=None): if not url: url = self.get_url() resp = self.client.get(url, data=self.get_data_payload()) self.assertEqual(resp.status_code, 200) return resp def should_be_callable_when_authenticated(self, user, url=None): if not url: url = self.get_url() self.login(user) resp = self.client.get(url, data=self.get_data_payload()) self.assertEqual(resp.status_code, 200) return resp def should_be_callable_when_has_correct_permissions(self, user, url=None): if not url: url = self.get_url() user_no_permissions = AnonymousUser() self.login(user_no_permissions) resp = self.client.get(url, data=self.get_data_payload()) self.assertRedirects(resp, '{0}?next={1}'.format( reverse('auth_login'), url)) self.login(user) resp = self.client.get(url, data=self.get_data_payload()) self.assertEqual(resp.status_code, 200) class ViewRequestFactoryTestMixin(object): longMessage = True _logged_in_user = None view_class = None def assertRedirects(self, resp, redirect_url, msg=None): self.assertIn( resp.status_code, [301, 302], msg=msg or ('Should redirect')) self.assertEqual( resp._headers['location'][1], redirect_url, msg=msg or ('Should redirect to correct url.')) def get_request(self, method=RequestFactory().get, ajax=False, no_redirect=False, data=None, user=AnonymousUser(), add_session=False, session_dict={}, view_kwargs=None, **kwargs): if data is not None: kwargs.update({'data': data}) req = method(self.get_url(view_kwargs=view_kwargs), **kwargs) req.user = user req._dont_enforce_csrf_checks = True if add_session: middleware = SessionMiddleware() middleware.process_request(req) req.session.save() else: setattr(req, 'session', {}) if session_dict: for var in session_dict: req.session[var] = session_dict[var] messages = FallbackStorage(req) setattr(req, '_messages', messages) if ajax: req.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest' req = self.setUpRequest(req) if req is None: raise RuntimeError( 'The request has become None. You probably forgot to return' ' the request again, when implementing `setUpRequest`.') return req def get_get_request(self, ajax=False, no_redirect=False, data=None, user=None, add_session=False, session_dict={}, view_kwargs=None, **kwargs): if user is None: user = self.get_user() return self.get_request( ajax=ajax, no_redirect=no_redirect, data=data, user=user, add_session=add_session, session_dict=session_dict, view_kwargs=view_kwargs, **kwargs) def get_post_request(self, ajax=False, no_redirect=False, data=None, user=None, add_session=False, session_dict={}, **kwargs): method = RequestFactory().post if user is None: user = self.get_user() return self.get_request( method=method, ajax=ajax, no_redirect=no_redirect, data=data, user=user, add_session=add_session, session_dict=session_dict, **kwargs) def get_user(self): if self._logged_in_user is None: return AnonymousUser() return self._logged_in_user def get_login_url(self): login_url = getattr(settings, 'LOGIN_URL', None) if login_url is None: return reverse('auth_login') return login_url def get_view_name(self): raise NotImplementedError def get_view_args(self): return () def get_view_kwargs(self): return {} def get_url(self, view_kwargs=None): try: view_name = self.get_view_name() except NotImplementedError: return '/' view_args = self.get_view_args() view_kwargs = view_kwargs or self.get_view_kwargs() return reverse(view_name, args=view_args, kwargs=view_kwargs) def get_view_class(self): return self.view_class def get_view(self): view_class = self.get_view_class() if view_class is None: if hasattr(self, 'view') and self.view: return self.view raise NotImplementedError('You need to define a view class.') return view_class.as_view() def get(self, user=None, data=None, ajax=False, no_redirect=False, add_session=False, session_dict={}, kwargs=None): req = self.get_get_request( user=user, data=data, ajax=ajax, no_redirect=no_redirect, add_session=add_session, session_dict=session_dict, view_kwargs=kwargs) view = self.get_view() if kwargs is None: kwargs = {} kwargs.update(self.get_view_kwargs()) args = self.get_view_args() resp = view(req, *args, **kwargs) return resp def post(self, user=None, data=None, ajax=False, no_redirect=False, add_session=False, session_dict={}, kwargs=None): req = self.get_post_request( user=user, data=data, ajax=ajax, no_redirect=no_redirect, session_dict=session_dict, add_session=add_session) view = self.get_view() if kwargs is None: kwargs = {} kwargs.update(self.get_view_kwargs()) args = self.get_view_args() resp = view(req, *args, **kwargs) return resp def login(self, user): self._logged_in_user = user def logout(self): self._logged_in_user = None def assert200(self, resp, user=None, msg=None): user_msg = user or self.get_user() if self.get_view_class() is not None: view_msg = self.get_view_class() else: view_msg = self.get_view() if msg is None: msg = ('The `{0}` view should have been callable for' ' user `{1}`.').format(view_msg, user_msg) if resp.status_code in [301, 302]: msg = msg + ' The view redirected to "{0}".'.format(resp.url) self.assertEqual(resp.status_code, 200, msg=msg) return resp def is_callable(self, user=None, data=None, ajax=False, no_redirect=False, add_session=False, session_dict={}, kwargs=None, msg=None): resp = self.get( user=user, data=data, ajax=ajax, no_redirect=no_redirect, add_session=add_session, session_dict=session_dict, kwargs=kwargs) self.assert200(resp, user, msg=msg) return resp def is_forbidden(self, user=None, data=None, ajax=False, no_redirect=False, add_session=False, post=False, kwargs=None, msg=None): resp = self.get( user=user, data=data, ajax=ajax, no_redirect=no_redirect, add_session=add_session, kwargs=kwargs) user_msg = user or self.get_user() if self.get_view_class() is not None: view_msg = self.get_view_class() else: view_msg = self.get_view() if not msg: msg = ('The `{0}` view should have been forbidden for' ' user `{1}`.').format(view_msg, user_msg) self.assertEqual(resp.status_code, 403, msg=msg) return resp def is_not_callable(self, user=None, data=None, ajax=False, no_redirect=False, add_session=False, session_dict={}, post=False, kwargs=None, msg=None): if post: call_obj = self.post else: call_obj = self.get self.assertRaises( Http404, call_obj, user=user, data=data, ajax=ajax, no_redirect=no_redirect, add_session=add_session, session_dict=session_dict, kwargs=kwargs) def is_postable(self, user=None, data=None, ajax=False, no_redirect=False, to=None, to_url_name=None, next_url='', add_session=False, session_dict={}, kwargs=None, msg=None): resp = self.post( user=user, data=data, add_session=add_session, session_dict=session_dict, kwargs=kwargs, ajax=ajax, no_redirect=no_redirect) if not (ajax or no_redirect) or to or to_url_name: if next_url: next_url = '?next={0}'.format(next_url) if to_url_name: try: self.assertEqual( resolve(resp.url).url_name, to_url_name, msg=msg) except AttributeError: raise AssertionError( 'The response returned with a status code {}'.format( resp.status_code)) else: redirect_url = '{0}{1}'.format(to, next_url) self.assertRedirects(resp, redirect_url, msg=msg) else: self.assert200(resp, user, msg=msg) return resp def redirects(self, to=None, to_url_name=None, next_url='', user=None, add_session=False, session_dict={}, kwargs=None, msg=None, data=None): resp = self.get( user=user, add_session=add_session, kwargs=kwargs, session_dict=session_dict, data=data) if to or to_url_name: if next_url: next_url = '?next={0}'.format(next_url) if to_url_name: if msg is None: msg = ( 'Should redirect to correct to view with correct name.' ) self.assertEqual( resolve(resp.url).url_name, to_url_name, msg=msg) else: redirect_url = '{0}{1}'.format(to, next_url) self.assertRedirects(resp, redirect_url, msg=msg) return resp
MIT License
centerforopenscience/osf.io
admin_tests/utilities.py
setup_view
python
def setup_view(view, request, *args, **kwargs): view.request = request view.args = args view.kwargs = kwargs return view
Mimic as_view() returned callable, but returns view instance http://tech.novapost.fr/django-unit-test-your-views-en.html
https://github.com/centerforopenscience/osf.io/blob/6552a01fe250997cd3eb67cf72fc7157d9bc5af6/admin_tests/utilities.py#L8-L16
from osf_tests.factories import UserFactory
Apache License 2.0
zhiningliu1998/imbalanced-ensemble
imbalanced_ensemble/utils/estimator_checks.py
parametrize_with_checks
python
def parametrize_with_checks(estimators): def checks_generator(): for estimator in estimators: name = type(estimator).__name__ for check in _yield_all_checks(estimator): check = partial(check, name) yield _maybe_mark_xfail(estimator, check, pytest) return pytest.mark.parametrize( "estimator, check", checks_generator(), ids=_get_check_estimator_ids )
Pytest specific decorator for parametrizing estimator checks. The `id` of each check is set to be a pprint version of the estimator and the name of the check with its keyword arguments. This allows to use `pytest -k` to specify which tests to run:: pytest test_check_estimators.py -k check_estimators_fit_returns_self Parameters ---------- estimators : list of estimators instances Estimators to generated checks for. Returns ------- decorator : `pytest.mark.parametrize` Examples -------- >>> from sklearn.utils.estimator_checks import parametrize_with_checks >>> from sklearn.linear_model import LogisticRegression >>> from sklearn.tree import DecisionTreeRegressor >>> @parametrize_with_checks([LogisticRegression(), ... DecisionTreeRegressor()]) ... def test_sklearn_compatible_estimator(estimator, check): ... check(estimator)
https://github.com/zhiningliu1998/imbalanced-ensemble/blob/1cdf6a7c9dd6cb0a179ef35e66d78066dff77906/imbalanced_ensemble/utils/estimator_checks.py#L104-L143
import sys import traceback import warnings from collections import Counter from functools import partial import pytest import numpy as np from scipy import sparse from sklearn.base import clone from sklearn.datasets import ( fetch_openml, make_classification, make_multilabel_classification, ) from sklearn.cluster import KMeans from sklearn.exceptions import SkipTestWarning from sklearn.preprocessing import label_binarize from sklearn.utils.estimator_checks import _maybe_mark_xfail from sklearn.utils.estimator_checks import _get_check_estimator_ids from sklearn.utils._testing import assert_allclose from sklearn.utils._testing import assert_array_equal from sklearn.utils._testing import assert_raises_regex from sklearn.utils.multiclass import type_of_target from imbalanced_ensemble.datasets import make_imbalance from imbalanced_ensemble.sampler.over_sampling.base import BaseOverSampler from imbalanced_ensemble.sampler.under_sampling.base import BaseCleaningSampler, BaseUnderSampler def _set_checking_parameters(estimator): params = estimator.get_params() name = estimator.__class__.__name__ if "n_estimators" in params: estimator.set_params(n_estimators=min(5, estimator.n_estimators)) if name == "ClusterCentroids": estimator.set_params( voting="soft", estimator=KMeans(random_state=0, algorithm="full", n_init=1), ) if name == "KMeansSMOTE": estimator.set_params(kmeans_estimator=12) def _yield_sampler_checks(sampler): tags = sampler._get_tags() yield check_target_type yield check_samplers_one_label yield check_samplers_fit yield check_samplers_fit_resample yield check_samplers_sampling_strategy_fit_resample if "sparse" in tags["X_types"]: yield check_samplers_sparse if "dataframe" in tags["X_types"]: yield check_samplers_pandas if "string" in tags["X_types"]: yield check_samplers_string if tags["allow_nan"]: yield check_samplers_nan yield check_samplers_list yield check_samplers_multiclass_ova yield check_samplers_preserve_dtype yield check_samplers_sample_indices yield check_samplers_2d_target def _yield_classifier_checks(classifier): yield check_classifier_on_multilabel_or_multioutput_targets yield check_classifiers_with_encoded_labels def _yield_all_checks(estimator): name = estimator.__class__.__name__ tags = estimator._get_tags() if tags["_skip_test"]: warnings.warn( f"Explicit SKIP via _skip_test tag for estimator {name}.", SkipTestWarning, ) return if hasattr(estimator, "fit_resample"): for check in _yield_sampler_checks(estimator): yield check if hasattr(estimator, "predict"): for check in _yield_classifier_checks(estimator): yield check
MIT License
pathoschild/stewbot
stewbot/components/modules/mechanize/_beautifulsoup.py
PageElement.findNextSibling
python
def findNextSibling(self, name=None, attrs={}, text=None): return self._first(self.fetchNextSiblings, name, attrs, text)
Returns the closest sibling to this Tag that matches the given criteria and appears after this Tag in the document.
https://github.com/pathoschild/stewbot/blob/8cfcb8378684b0083eb11748f21d1bc74636cdb6/stewbot/components/modules/mechanize/_beautifulsoup.py#L107-L110
from __future__ import generators __author__ = "Leonard Richardson (leonardr@segfault.org)" __version__ = "2.1.1" __date__ = "$Date: 2004/10/18 00:14:20 $" __copyright__ = "Copyright (c) 2004-2005 Leonard Richardson" __license__ = "PSF" from _sgmllib_copy import SGMLParser, SGMLParseError import types import re import _sgmllib_copy as sgmllib class NullType(object): def __new__(cls): return Null def __call__(self, *args, **kwargs): return Null def __getattr__(self, attr): return Null def __getitem__(self, item): return Null def __setattr__(self, attr, value): pass def __setitem__(self, item, value): pass def __len__(self): return 0 def __iter__(self): return iter([]) def __contains__(self, item): return False def __repr__(self): return "Null" Null = object.__new__(NullType) class PageElement: def setup(self, parent=Null, previous=Null): self.parent = parent self.previous = previous self.next = Null self.previousSibling = Null self.nextSibling = Null if self.parent and self.parent.contents: self.previousSibling = self.parent.contents[-1] self.previousSibling.nextSibling = self def findNext(self, name=None, attrs={}, text=None): return self._first(self.fetchNext, name, attrs, text) firstNext = findNext def fetchNext(self, name=None, attrs={}, text=None, limit=None): return self._fetch(name, attrs, text, limit, self.nextGenerator)
ISC License
opennetworkingfoundation/tapi
RI/flask_server/tapi_server/models/tapi_common_getserviceinterfacepointlist_output_sip.py
TapiCommonGetserviceinterfacepointlistOutputSip.from_dict
python
def from_dict(cls, dikt) -> 'TapiCommonGetserviceinterfacepointlistOutputSip': return util.deserialize_model(dikt, cls)
Returns the dict as a model :param dikt: A dict. :type: dict :return: The tapi.common.getserviceinterfacepointlist.output.Sip of this TapiCommonGetserviceinterfacepointlistOutputSip. # noqa: E501 :rtype: TapiCommonGetserviceinterfacepointlistOutputSip
https://github.com/opennetworkingfoundation/tapi/blob/1f3fd9483d5674552c5a31206c97399c8c151897/RI/flask_server/tapi_server/models/tapi_common_getserviceinterfacepointlist_output_sip.py#L96-L104
from __future__ import absolute_import from datetime import date, datetime from typing import List, Dict from tapi_server.models.base_model_ import Model from tapi_server.models.tapi_common_administrative_state import TapiCommonAdministrativeState from tapi_server.models.tapi_common_capacity import TapiCommonCapacity from tapi_server.models.tapi_common_layer_protocol_name import TapiCommonLayerProtocolName from tapi_server.models.tapi_common_lifecycle_state import TapiCommonLifecycleState from tapi_server.models.tapi_common_name_and_value import TapiCommonNameAndValue from tapi_server.models.tapi_common_operational_state import TapiCommonOperationalState from tapi_server.models.tapi_common_service_interface_point import TapiCommonServiceInterfacePoint from tapi_server.models.tapi_photonic_media_media_channel_service_interface_point_spec import TapiPhotonicMediaMediaChannelServiceInterfacePointSpec from tapi_server.models.tapi_photonic_media_otsi_service_interface_point_spec import TapiPhotonicMediaOtsiServiceInterfacePointSpec from tapi_server.models.tapi_photonic_media_sip_augmentation1 import TapiPhotonicMediaSipAugmentation1 from tapi_server.models.tapi_photonic_media_sip_augmentation2 import TapiPhotonicMediaSipAugmentation2 from tapi_server import util class TapiCommonGetserviceinterfacepointlistOutputSip(Model): def __init__(self, operational_state=None, lifecycle_state=None, administrative_state=None, available_capacity=None, total_potential_capacity=None, name=None, uuid=None, supported_layer_protocol_qualifier=None, layer_protocol_name=None, media_channel_service_interface_point_spec=None, otsi_service_interface_point_spec=None): self.openapi_types = { 'operational_state': TapiCommonOperationalState, 'lifecycle_state': TapiCommonLifecycleState, 'administrative_state': TapiCommonAdministrativeState, 'available_capacity': TapiCommonCapacity, 'total_potential_capacity': TapiCommonCapacity, 'name': List[TapiCommonNameAndValue], 'uuid': str, 'supported_layer_protocol_qualifier': List[str], 'layer_protocol_name': TapiCommonLayerProtocolName, 'media_channel_service_interface_point_spec': TapiPhotonicMediaMediaChannelServiceInterfacePointSpec, 'otsi_service_interface_point_spec': TapiPhotonicMediaOtsiServiceInterfacePointSpec } self.attribute_map = { 'operational_state': 'operational-state', 'lifecycle_state': 'lifecycle-state', 'administrative_state': 'administrative-state', 'available_capacity': 'available-capacity', 'total_potential_capacity': 'total-potential-capacity', 'name': 'name', 'uuid': 'uuid', 'supported_layer_protocol_qualifier': 'supported-layer-protocol-qualifier', 'layer_protocol_name': 'layer-protocol-name', 'media_channel_service_interface_point_spec': 'media-channel-service-interface-point-spec', 'otsi_service_interface_point_spec': 'otsi-service-interface-point-spec' } self._operational_state = operational_state self._lifecycle_state = lifecycle_state self._administrative_state = administrative_state self._available_capacity = available_capacity self._total_potential_capacity = total_potential_capacity self._name = name self._uuid = uuid self._supported_layer_protocol_qualifier = supported_layer_protocol_qualifier self._layer_protocol_name = layer_protocol_name self._media_channel_service_interface_point_spec = media_channel_service_interface_point_spec self._otsi_service_interface_point_spec = otsi_service_interface_point_spec @classmethod
Apache License 2.0
pernat1y/vm-automation
vm_functions.py
vm_copyto
python
def vm_copyto(vm, username, password, local_file, remote_file): logging.info(f'Uploading "{local_file}" as "{remote_file}" to VM "{vm}".') result = vboxmanage( f'guestcontrol {vm} --username {username} --password {password} copyto {local_file} {remote_file}') if result[0] == 0: logging.debug(f'File uploaded.') else: logging.error(f'Error while uploading file: {result[2]}') return result[0], result[1], result[2]
Upload file to virtual machine :param vm: Virtual machine name. :param username: Guest OS username (login). :param password: Guest OS password. :param local_file: Path to local file on host OS. :param remote_file: Path to file on guest OS. :return: returncode, stdout, stderr.
https://github.com/pernat1y/vm-automation/blob/bb452e5a69b9f96b17dcbde33a0ae66821933bb1/vm_functions.py#L403-L420
import datetime import logging import random import re import secrets import subprocess if __name__ == "__main__": print('This script only contains functions and cannot be called directly. See demo scripts for usage examples.') exit(1) if 'vboxmanage_path' not in locals(): vboxmanage_path = 'vboxmanage' if 'timeout' not in locals(): timeout = 60 def vboxmanage(cmd, timeout=timeout): cmd = f'{vboxmanage_path} {cmd}'.split() logging.debug(f'''Running command: {' '.join(cmd)}''') try: result = subprocess.run(cmd, capture_output=True, timeout=timeout, text=True) return result.returncode, result.stdout, result.stderr except FileNotFoundError: logging.critical('vboxmanage path is incorrect. Stopping.') exit(1) def virtualbox_version(strip_newline=1, strip_build=0): result = vboxmanage('--version') version = result[1] if strip_newline: version = version.rstrip() if strip_build: version = re.findall(r'^(\d+(?:\.\d+)*)', version)[0] return result[0], version, result[2] def list_vms(list=1, dictionary=0): if dictionary: options = '--long' else: options = '' result = vboxmanage(f'list vms --sorted {options}') if result[0] == 0: if dictionary: vms = re.findall(r'^Name:\s+(\S+)', result[1], flags=re.MULTILINE) groups = re.findall(r'^Groups:\s+(\S+)', result[1], flags=re.MULTILINE) vms_list_ = dict(zip(vms, groups)) elif list: vms_list_ = re.findall(r'^"(\w+)"', result[1], flags=re.MULTILINE) else: vms_list_ = result[1] return result[0], vms_list_, result[2] else: logging.error(f'Unable to get list of VMs: {result[2]}') return result[0], result[1], result[2] def list_snapshots(vm, list=1): result = vboxmanage(f'snapshot {vm} list --machinereadable') if result[0] == 0: if list == 1: snapshots_list = re.findall(r'^SnapshotName(?:-\d+)?="(\S+)"', result[1], flags=re.MULTILINE) else: snapshots_list = result[1] return result[0], snapshots_list, result[2] else: logging.error(f'Unable to get list of snapshots: {result[2]}') return result[0], result[1], result[2] def vm_start(vm, ui='gui'): if ui == '0': ui = 'headless' elif ui == '1': ui = 'gui' ui = ui.lower() logging.info(f'Starting VM "{vm}".') if ui not in ['gui', 'sdl', 'headless', 'separate']: logging.error('Unknown ui type set. Assuming gui.') ui = 'gui' result = vboxmanage(f'startvm {vm} --type {ui}') if result[0] == 0: logging.info(f'VM {vm} started') else: logging.error(f'Error while starting VM "{vm}": {result[2]}') return result[0], result[1], result[2] def vm_stop(vm, ignore_status_error=0): logging.info(f'Stopping VM "{vm}".') result = vboxmanage(f'controlvm {vm} poweroff') if result[0] == 0: logging.debug('VM stopped.') else: if 'is not currently running' in result[2] or 'Invalid machine state: PoweredOff' in result[2] and ignore_status_error: logging.debug(f'VM already stopped: {result[2]}') else: logging.error(f'Error while stopping VM: {result[2]}') return result[0], result[1], result[2] def vm_enumerate(vm, pattern=None): logging.debug(f'Enumerating VM "{vm}" guest properties.') if pattern: result = vboxmanage(f'guestproperty enumerate {vm} --pattern {pattern}') else: result = vboxmanage(f'guestproperty enumerate {vm}') if result[0] == 0: logging.debug('VM properties enumerated.') else: logging.error(f'Error while enumerating guest properties: {result[2]}') return result[0], result[1], result[2] def list_ips(vm): result = vm_enumerate(vm, pattern='/VirtualBox/GuestInfo/Net/*/V4/IP') if result[0] == 0: ips_list = re.findall(r'value:\s(\d+\.\d+\.\d+\.\d+)', result[1], flags=re.MULTILINE) return result[0], ips_list, result[2] else: logging.error(f'Unable to get list of IP addresses: {result[2]}') return result[0], result[1], result[2] def vm_snapshot_take(vm, snapshot, live=0): if live: logging.info(f'Taking live snapshot "{snapshot}" for VM "{vm}".') options = '--live' else: logging.info(f'Taking snapshot "{snapshot}" for VM "{vm}".') options = '' result = vboxmanage(f'snapshot {vm} take {snapshot} {options}') if result[0] == 0: logging.debug('Snapshot created.') else: logging.error(f'Error while creating snapshot: {result[2]}') return result[0], result[1], result[2] def vm_backup(vm): now = datetime.datetime.now() snapshot = f'backup_{now.strftime("%Y_%m_%d_%H_%M_%S")}' result = vm_snapshot_take(vm, snapshot, live=1) return result[0], result[1], result[2] def vm_snapshot_restore(vm, snapshot, ignore_status_error=0): if snapshot == 'restorecurrent': logging.info(f'Restoring VM "{vm}" to current snapshot.') result = vboxmanage(f'snapshot {vm} restorecurrent') if result[0] == 0: logging.debug(f'VM "{vm}" restored to current snapshot.') else: logging.error(f'Error while restoring VM "{vm}" to current snapshot: {result[2]}.') else: logging.info(f'Restoring VM "{vm}" to snapshot "{snapshot}".') result = vboxmanage(f'snapshot {vm} restore {snapshot}') if result[0] == 0: logging.debug(f'VM "{vm}" restored to snapshot "{snapshot}".') else: if 'Could not find a snapshot' in result[2] and ignore_status_error: logging.debug(f'VM "{vm}" does not have snapshot "{snapshot}": {result[2]}.') else: logging.error(f'Error while restoring VM "{vm}" to snapshot "{snapshot}": {result[2]}.') return result[0], result[1], result[2] def vm_snapshot_remove(vm, snapshot): logging.info(f'Removing snapshot "{snapshot}" for VM "{vm}"') result = vboxmanage(f'snapshot {vm} delete {snapshot}') if result[0] == 0: logging.debug('Snapshot removed.') else: logging.error(f'Error while removing snapshot: {result[2]}') return result[0], result[1], result[2] def vm_network(vm, link_state): if link_state in ['on', 'off']: logging.info(f'Setting network parameters to {link_state} for VM {vm}') result = vboxmanage(f'controlvm {vm} setlinkstate1 {link_state}') if result[0] == 0: logging.debug(f'Network state set.') else: logging.error(f'Unable to change network state for VM: {result[2]}.') return result[0], result[1], result[2] else: return 0, 0, 0 def vm_set_resolution(vm, screen_resolution): if not screen_resolution: return 0, 0, 0 if screen_resolution == 'random': screen_resolution = random.choice['1024 768 32', '1280 1024 32', '1440 1080 32', '1600 1200 32', '1920 1080 32'] logging.debug(f'Changing screen resolution for VM "{vm}".') result = vboxmanage(f'controlvm {vm} setvideomodehint {screen_resolution}') if result[0] == 0: logging.debug('Screen resolution changed.') else: logging.error(f'Unable to change screen resolution: {result[2]}') return result[0], result[1], result[2] def vm_set_mac(vm, mac): logging.debug(f'Changing MAC address for VM "{vm}".') if mac == 'new': mac = f'080027{secrets.token_hex(3)}' if mac == 'random': mac = secrets.token_hex(6) result = vboxmanage(f'modifyvm {vm} --macaddress1 {mac}') if result[0] == 0: logging.debug('MAC changed.') else: logging.error(f'Unable to change MAC address: {result[2]}') return result[0], result[1], result[2] def vm_pcap(vm, file): result = vboxmanage(f'modifyvm {vm} --nictrace1 on --nictracefile1 {file}') if result[0] == 0: logging.debug(f'Saving network traffic from VM "{vm}" as {file}.') else: logging.error(f'Unable to update VM settings to capture traffic: {result[2]}') return result[0], result[1], result[2] def vm_memdump(vm, file): result = vboxmanage(f'debugvm {vm} dumpvmcore --filename={file}') if result[0] == 0: logging.debug(f'Dumping memory of VM "{vm}" as {file}.') else: logging.error(f'Unable to dump VM memory: {result[2]}') return result[0], result[1], result[2] def vm_disable_time_sync(vm): result = vboxmanage(f'setextradata {vm} "VBoxInternal/Devices/VMMDev/0/Config/GetHostTimeDisabled" "1"') if result[0] == 0: logging.debug(f'Time sync disabled for VM "{vm}".') else: logging.error(f'Unable to disable time sync for VM: {result[2]}') return result[0], result[1], result[2] def vm_exec(vm, username, password, remote_file, open_with='%windir%\\explorer.exe', file_args=None): logging.info(f'{vm}: Executing file "{remote_file}" with parent "{open_with}" on VM "{vm}".') if file_args: cmd = f'guestcontrol {vm} --username {username} --password {password} start {open_with} {remote_file} {file_args}' else: cmd = f'guestcontrol {vm} --username {username} --password {password} start {open_with} {remote_file}' result = vboxmanage(cmd) if result[0] == 0: logging.debug('File executed successfully.') else: logging.error(f'Error while executing file: {result[2]}') return result[0], result[1], result[2] def vm_file_stat(vm, username, password, remote_file): logging.debug(f'Checking if file "{remote_file}" exist on VM "{vm}".') result = vboxmanage(f'guestcontrol {vm} --username {username} --password {password} stat {remote_file}') if result[0] == 0: logging.debug('File exist.') else: logging.error(f'Error while checking for file: {result[2]}') return result[0], result[1], result[2]
MIT License
maistra/openshift-ansible
roles/lib_utils/action_plugins/node_group_checks.py
validate_labels
python
def validate_labels(labels_found): mandatory_labels = ('node-role.kubernetes.io/master=true', 'node-role.kubernetes.io/infra=true') for item in mandatory_labels: if item not in labels_found: msg = ("At least one group in openshift_node_groups requires the" " {} label").format(item) raise errors.AnsibleModuleError(msg)
Ensure mandatory_labels are found in the labels we found, labels_found
https://github.com/maistra/openshift-ansible/blob/f1a36950515c0f27039a263ffd6f9b954b3880f2/roles/lib_utils/action_plugins/node_group_checks.py#L23-L31
from ansible.plugins.action import ActionBase from ansible import errors def get_or_fail(group, key): res = group.get(key) if res is None: msg = "Each group in openshift_node_groups must have {} key".format(key) raise errors.AnsibleModuleError(msg) return res
Apache License 2.0
deepsphere/deepsphere-weather
modules/utils_config.py
get_default_SWAG_settings
python
def get_default_SWAG_settings(): dataloader_settings = {"SWAG": False, "target_learning_rate": 0.007, "no_cov_mat": False, "max_num_models": 40, "swag_freq": 10, "swa_start": 0, "sampling_scale": 0.1, "nb_samples": 10 } return dataloader_settings
Return some default settings for the SWAG model.
https://github.com/deepsphere/deepsphere-weather/blob/a9c75de9c9852a2832883cd998efd16d6542b083/modules/utils_config.py#L95-L106
import os import sys import json import torch import pickle import shutil import inspect import types import numpy as np import deepdiff from modules.utils_torch import set_pytorch_deterministic from modules.utils_torch import set_pytorch_numeric_precision def get_default_model_settings(): model_settings = {"pretrained_model_name": None, "model_name_prefix": None, "model_name": None, "model_name_suffix": None, "kernel_size_conv": 3, "bias": True, "batch_norm": False, "batch_norm_before_activation": False, "activation": True, "activation_fun": 'relu', "pool_method": "Max", "kernel_size_pooling": 4, "conv_type": 'graph', "graph_type": "knn", "knn": 20, "periodic_padding": 'True', } return model_settings def get_default_training_settings(): training_settings = {"epochs": 15, "ar_training_strategy": "RNN", "learning_rate": 0.001, "training_batch_size": 16, "validation_batch_size": 16, "scoring_interval": 20, "save_model_each_epoch": False, "numeric_precision": "float32", "deterministic_training": False, "seed_model_weights": 100, "seed_random_shuffling": 120, "benchmark_cudnn": True, "gpu_training": True, "gpu_devices_ids": [0], "dataparallel_training": False, } return training_settings def get_default_ar_settings(): ar_settings = {"input_k": [-3,-2,-1], "output_k": [0], "forecast_cycle": 1, "ar_iterations": 6, "stack_most_recent_prediction": True, } return ar_settings def get_default_dataloader_settings(): dataloader_settings = {"random_shuffling": True, "drop_last_batch": True, "prefetch_in_gpu": False, "prefetch_factor": 2, "pin_memory": False, "asyncronous_gpu_transfer": True, "num_workers": 8, "autotune_num_workers": False, } return dataloader_settings
MIT License
getnikola/plugins
v7/latex/latex/tokenizer.py
Tokenizer.next
python
def next(self): if self._token is not None: self._find_next()
Proceed to next token.
https://github.com/getnikola/plugins/blob/afafcec8a1530ee74dadfbe68ffa190b12a5a622/v7/latex/latex/tokenizer.py#L235-L238
from __future__ import unicode_literals import nikola.utils from enum import Enum LOGGER = nikola.utils.get_logger('compile_latex.tokenizer', nikola.utils.STDERR_HANDLER) class Token(Enum): Whitespace = 1 NonbreakableWhitespace = 2 Text = 3 EscapedText = 4 Command = 5 InlineFormulaDelimiter = 6 DisplayFormulaDelimiter = 7 CurlyBraketOpen = 8 CurlyBraketClose = 9 SquareBraketOpen = 10 SquareBraketClose = 11 DoubleNewLine = 12 Comment = 13 ForcedLineBreak = 14 TableColumnDelimiter = 15 def _compute_position(input, index): line = 1 col = 1 eol = None for c in input[:index]: if c == '\n' or c == '\r': if eol is None or eol == c: eol = c line += 1 col = 1 else: eol = None else: col += 1 return (line, col) class Tokenizer: def _is_whitespace(self, char): return ord(char) <= 32 def _is_line_break(self, char): return ord(char) == 10 or ord(char) == 13 def _is_command_char(self, char): return (char >= 'A' and char <= 'Z') or (char >= 'a' and char <= 'z') or (char == '@') def _eat_whitespace(self): number_of_line_breaks = 0 last_line_break = None while self._position < len(self._input): if not self._is_whitespace(self._input[self._position]): break if self._is_line_break(self._input[self._position]): if last_line_break is None or last_line_break == self._input[self._position]: number_of_line_breaks += 1 last_line_break = self._input[self._position] else: last_line_break = None self._position += 1 return number_of_line_breaks def _eat_comment(self): start = self._position last_line_break = None had_line_break = False while self._position < len(self._input): if had_line_break and not self._is_whitespace(self._input[self._position]): break if self._is_line_break(self._input[self._position]): if last_line_break is None or last_line_break == self._input[self._position]: if had_line_break: break last_line_break = self._input[self._position] had_line_break = True else: last_line_break = None self._position += 1 return self._input[start:self._position] def _read_text(self, strict): start = self._position while self._position < len(self._input): char = self._input[self._position] if self._is_whitespace(char): break if char == "~" or char == "{" or char == "}" or char == "$" or char == "[" or char == "]" or char == "$" or char == "\\" or char == "&": break if strict and not self._is_command_char(char): break self._position += 1 return self._input[start:self._position] def _find_next(self): self._token = None self._token_value = None self._token_begin_index = None self._token_end_index = None if (self._position >= len(self._input)): return self._token_begin_index = self._position char = self._input[self._position] if self._is_whitespace(char): number_of_line_breaks = self._eat_whitespace() if number_of_line_breaks > 1: self._token = Token.DoubleNewLine else: self._token = Token.Whitespace elif char == "~": self._token = Token.NonbreakableWhitespace self._position += 1 elif char == '&': self._token = Token.TableColumnDelimiter self._position += 1 elif char == "{": self._token = Token.CurlyBraketOpen self._position += 1 elif char == "}": self._token = Token.CurlyBraketClose self._position += 1 elif char == "[": self._token = Token.SquareBraketOpen self._position += 1 elif char == "]": self._token = Token.SquareBraketClose self._position += 1 elif char == "$": self._token = Token.InlineFormulaDelimiter self._position += 1 if self._position < len(self._input) and self._input[self._position] == "$": self._token = Token.DisplayFormulaDelimiter self._position += 1 elif char == "\\": self._position += 1 if self._position == len(self._input): raise "Reached end of text after '\\'" self._token = Token.Command cmd = self._read_text(True) if len(cmd) == 0: ch = self._input[self._position] if ch == '(' or ch == ')' or ch == '[' or ch == ']': self._token_value = ch elif ch == '\\': self._token = Token.ForcedLineBreak else: self._token = Token.EscapedText self._token_value = ch self._position += 1 else: self._token_value = cmd elif char == '%': self._token = Token.Comment self._position += 1 self._token_value = self._eat_comment() else: self._token = Token.Text self._token_value = self._read_text(False) self._token_end_index = self._position def __init__(self, input): self._input = input self._position = 0 self._find_next() def has_token(self): return self._token is not None def token_type(self): return self._token def token_value(self): return self._token_value def token_begin_index(self): return self._token_begin_index def token_end_index(self): return self._token_end_index
MIT License
kubeflow-kale/kale
backend/kale/rpc/nb.py
list_volumes
python
def list_volumes(request): volumes = podutils.list_volumes() volumes_out = [{"type": "clone", "name": volume.name, "mount_point": path, "size": size, "size_type": "", "snapshot": False} for path, volume, size in volumes] return volumes_out
Get the list of mounted volumes.
https://github.com/kubeflow-kale/kale/blob/1fc1f8dd8854c75c6869da45386686f9f2b54003/backend/kale/rpc/nb.py#L66-L76
import os import shutil import logging from tabulate import tabulate from kale import marshal from kale.rpc.log import create_adapter from kale import Compiler, NotebookProcessor from kale.rpc.errors import RPCInternalError from kale.common import podutils, kfputils, kfutils, astutils KALE_MARSHAL_DIR_POSTFIX = ".kale.marshal.dir" KALE_PIPELINE_STEP_ENV = "KALE_PIPELINE_STEP" KALE_SNAPSHOT_FINAL_ENV = "KALE_SNAPSHOT_FINAL" logger = create_adapter(logging.getLogger(__name__)) def resume_notebook_path(request, server_root=None): p = os.environ.get("KALE_NOTEBOOK_PATH") if p and not os.path.isfile(p): raise RuntimeError("env path KALE_NOTEBOOK_PATH=%s is not a file" % p) if not p: return None home = os.environ.get("HOME") if not home.endswith('/'): home = home + '/' if server_root: server_root = os.path.expanduser(server_root) if not p.startswith(server_root): raise ValueError("Trying to resume a notebook from path %s, but" " the provided server root %s does not match the" " notebook's path." % (p, server_root)) return p[len(server_root):] elif p.startswith(home): return p[len(home):] else: return p
Apache License 2.0
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/image_save_options_data.py
ImageSaveOptionsData.update_last_saved_time_property
python
def update_last_saved_time_property(self): return self._update_last_saved_time_property
Gets the update_last_saved_time_property of this ImageSaveOptionsData. # noqa: E501 Gets or sets a value indicating whether the Aspose.Words.Properties.BuiltInDocumentProperties.LastSavedTime property is updated before saving. # noqa: E501 :return: The update_last_saved_time_property of this ImageSaveOptionsData. # noqa: E501 :rtype: bool
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/image_save_options_data.py#L500-L508
import pprint import re import datetime import six import json class ImageSaveOptionsData(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'allow_embedding_post_script_fonts': 'bool', 'custom_time_zone_info_data': 'TimeZoneInfoData', 'dml3_d_effects_rendering_mode': 'str', 'dml_effects_rendering_mode': 'str', 'dml_rendering_mode': 'str', 'file_name': 'str', 'flat_opc_xml_mapping_only': 'bool', 'iml_rendering_mode': 'str', 'save_format': 'str', 'update_created_time_property': 'bool', 'update_fields': 'bool', 'update_last_printed_property': 'bool', 'update_last_saved_time_property': 'bool', 'update_sdt_content': 'bool', 'zip_output': 'bool', 'color_mode': 'str', 'jpeg_quality': 'int', 'metafile_rendering_options': 'MetafileRenderingOptionsData', 'numeral_format': 'str', 'optimize_output': 'bool', 'page_count': 'int', 'page_index': 'int', 'horizontal_resolution': 'float', 'image_brightness': 'float', 'image_color_mode': 'str', 'image_contrast': 'float', 'paper_color': 'str', 'pixel_format': 'str', 'resolution': 'float', 'scale': 'float', 'use_anti_aliasing': 'bool', 'use_gdi_emf_renderer': 'bool', 'use_high_quality_rendering': 'bool', 'vertical_resolution': 'float' } attribute_map = { 'allow_embedding_post_script_fonts': 'AllowEmbeddingPostScriptFonts', 'custom_time_zone_info_data': 'CustomTimeZoneInfoData', 'dml3_d_effects_rendering_mode': 'Dml3DEffectsRenderingMode', 'dml_effects_rendering_mode': 'DmlEffectsRenderingMode', 'dml_rendering_mode': 'DmlRenderingMode', 'file_name': 'FileName', 'flat_opc_xml_mapping_only': 'FlatOpcXmlMappingOnly', 'iml_rendering_mode': 'ImlRenderingMode', 'save_format': 'SaveFormat', 'update_created_time_property': 'UpdateCreatedTimeProperty', 'update_fields': 'UpdateFields', 'update_last_printed_property': 'UpdateLastPrintedProperty', 'update_last_saved_time_property': 'UpdateLastSavedTimeProperty', 'update_sdt_content': 'UpdateSdtContent', 'zip_output': 'ZipOutput', 'color_mode': 'ColorMode', 'jpeg_quality': 'JpegQuality', 'metafile_rendering_options': 'MetafileRenderingOptions', 'numeral_format': 'NumeralFormat', 'optimize_output': 'OptimizeOutput', 'page_count': 'PageCount', 'page_index': 'PageIndex', 'horizontal_resolution': 'HorizontalResolution', 'image_brightness': 'ImageBrightness', 'image_color_mode': 'ImageColorMode', 'image_contrast': 'ImageContrast', 'paper_color': 'PaperColor', 'pixel_format': 'PixelFormat', 'resolution': 'Resolution', 'scale': 'Scale', 'use_anti_aliasing': 'UseAntiAliasing', 'use_gdi_emf_renderer': 'UseGdiEmfRenderer', 'use_high_quality_rendering': 'UseHighQualityRendering', 'vertical_resolution': 'VerticalResolution' } def __init__(self, allow_embedding_post_script_fonts=None, custom_time_zone_info_data=None, dml3_d_effects_rendering_mode=None, dml_effects_rendering_mode=None, dml_rendering_mode=None, file_name=None, flat_opc_xml_mapping_only=None, iml_rendering_mode=None, save_format=None, update_created_time_property=None, update_fields=None, update_last_printed_property=None, update_last_saved_time_property=None, update_sdt_content=None, zip_output=None, color_mode=None, jpeg_quality=None, metafile_rendering_options=None, numeral_format=None, optimize_output=None, page_count=None, page_index=None, horizontal_resolution=None, image_brightness=None, image_color_mode=None, image_contrast=None, paper_color=None, pixel_format=None, resolution=None, scale=None, use_anti_aliasing=None, use_gdi_emf_renderer=None, use_high_quality_rendering=None, vertical_resolution=None): self._allow_embedding_post_script_fonts = None self._custom_time_zone_info_data = None self._dml3_d_effects_rendering_mode = None self._dml_effects_rendering_mode = None self._dml_rendering_mode = None self._file_name = None self._flat_opc_xml_mapping_only = None self._iml_rendering_mode = None self._save_format = None self._update_created_time_property = None self._update_fields = None self._update_last_printed_property = None self._update_last_saved_time_property = None self._update_sdt_content = None self._zip_output = None self._color_mode = None self._jpeg_quality = None self._metafile_rendering_options = None self._numeral_format = None self._optimize_output = None self._page_count = None self._page_index = None self._horizontal_resolution = None self._image_brightness = None self._image_color_mode = None self._image_contrast = None self._paper_color = None self._pixel_format = None self._resolution = None self._scale = None self._use_anti_aliasing = None self._use_gdi_emf_renderer = None self._use_high_quality_rendering = None self._vertical_resolution = None self.discriminator = None if allow_embedding_post_script_fonts is not None: self.allow_embedding_post_script_fonts = allow_embedding_post_script_fonts if custom_time_zone_info_data is not None: self.custom_time_zone_info_data = custom_time_zone_info_data if dml3_d_effects_rendering_mode is not None: self.dml3_d_effects_rendering_mode = dml3_d_effects_rendering_mode if dml_effects_rendering_mode is not None: self.dml_effects_rendering_mode = dml_effects_rendering_mode if dml_rendering_mode is not None: self.dml_rendering_mode = dml_rendering_mode if file_name is not None: self.file_name = file_name if flat_opc_xml_mapping_only is not None: self.flat_opc_xml_mapping_only = flat_opc_xml_mapping_only if iml_rendering_mode is not None: self.iml_rendering_mode = iml_rendering_mode if save_format is not None: self.save_format = save_format if update_created_time_property is not None: self.update_created_time_property = update_created_time_property if update_fields is not None: self.update_fields = update_fields if update_last_printed_property is not None: self.update_last_printed_property = update_last_printed_property if update_last_saved_time_property is not None: self.update_last_saved_time_property = update_last_saved_time_property if update_sdt_content is not None: self.update_sdt_content = update_sdt_content if zip_output is not None: self.zip_output = zip_output if color_mode is not None: self.color_mode = color_mode if jpeg_quality is not None: self.jpeg_quality = jpeg_quality if metafile_rendering_options is not None: self.metafile_rendering_options = metafile_rendering_options if numeral_format is not None: self.numeral_format = numeral_format if optimize_output is not None: self.optimize_output = optimize_output if page_count is not None: self.page_count = page_count if page_index is not None: self.page_index = page_index if horizontal_resolution is not None: self.horizontal_resolution = horizontal_resolution if image_brightness is not None: self.image_brightness = image_brightness if image_color_mode is not None: self.image_color_mode = image_color_mode if image_contrast is not None: self.image_contrast = image_contrast if paper_color is not None: self.paper_color = paper_color if pixel_format is not None: self.pixel_format = pixel_format if resolution is not None: self.resolution = resolution if scale is not None: self.scale = scale if use_anti_aliasing is not None: self.use_anti_aliasing = use_anti_aliasing if use_gdi_emf_renderer is not None: self.use_gdi_emf_renderer = use_gdi_emf_renderer if use_high_quality_rendering is not None: self.use_high_quality_rendering = use_high_quality_rendering if vertical_resolution is not None: self.vertical_resolution = vertical_resolution @property def allow_embedding_post_script_fonts(self): return self._allow_embedding_post_script_fonts @allow_embedding_post_script_fonts.setter def allow_embedding_post_script_fonts(self, allow_embedding_post_script_fonts): self._allow_embedding_post_script_fonts = allow_embedding_post_script_fonts @property def custom_time_zone_info_data(self): return self._custom_time_zone_info_data @custom_time_zone_info_data.setter def custom_time_zone_info_data(self, custom_time_zone_info_data): self._custom_time_zone_info_data = custom_time_zone_info_data @property def dml3_d_effects_rendering_mode(self): return self._dml3_d_effects_rendering_mode @dml3_d_effects_rendering_mode.setter def dml3_d_effects_rendering_mode(self, dml3_d_effects_rendering_mode): allowed_values = ["Basic", "Advanced"] if not dml3_d_effects_rendering_mode.isdigit(): if dml3_d_effects_rendering_mode not in allowed_values: raise ValueError( "Invalid value for `dml3_d_effects_rendering_mode` ({0}), must be one of {1}" .format(dml3_d_effects_rendering_mode, allowed_values)) self._dml3_d_effects_rendering_mode = dml3_d_effects_rendering_mode else: self._dml3_d_effects_rendering_mode = allowed_values[int(dml3_d_effects_rendering_mode) if six.PY3 else long(dml3_d_effects_rendering_mode)] @property def dml_effects_rendering_mode(self): return self._dml_effects_rendering_mode @dml_effects_rendering_mode.setter def dml_effects_rendering_mode(self, dml_effects_rendering_mode): self._dml_effects_rendering_mode = dml_effects_rendering_mode @property def dml_rendering_mode(self): return self._dml_rendering_mode @dml_rendering_mode.setter def dml_rendering_mode(self, dml_rendering_mode): self._dml_rendering_mode = dml_rendering_mode @property def file_name(self): return self._file_name @file_name.setter def file_name(self, file_name): self._file_name = file_name @property def flat_opc_xml_mapping_only(self): return self._flat_opc_xml_mapping_only @flat_opc_xml_mapping_only.setter def flat_opc_xml_mapping_only(self, flat_opc_xml_mapping_only): self._flat_opc_xml_mapping_only = flat_opc_xml_mapping_only @property def iml_rendering_mode(self): return self._iml_rendering_mode @iml_rendering_mode.setter def iml_rendering_mode(self, iml_rendering_mode): self._iml_rendering_mode = iml_rendering_mode @property def save_format(self): return self._save_format @save_format.setter def save_format(self, save_format): self._save_format = save_format @property def update_created_time_property(self): return self._update_created_time_property @update_created_time_property.setter def update_created_time_property(self, update_created_time_property): self._update_created_time_property = update_created_time_property @property def update_fields(self): return self._update_fields @update_fields.setter def update_fields(self, update_fields): self._update_fields = update_fields @property def update_last_printed_property(self): return self._update_last_printed_property @update_last_printed_property.setter def update_last_printed_property(self, update_last_printed_property): self._update_last_printed_property = update_last_printed_property @property
MIT License