repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
kitware/pylidar-slam
slam/odometry/odometry_runner.py
SLAMRunner.save_config
python
def save_config(self): with open(str(Path(self.log_dir) / "config.yaml"), "w") as config_file: config_dict = self.config.__dict__ git_hash = get_git_hash() if git_hash is not None: config_dict["git_hash"] = git_hash config_dict["_working_dir"] = os.getcwd() config_file.write(OmegaConf.to_yaml(config_dict))
Saves the config to Disk
https://github.com/kitware/pylidar-slam/blob/1baa21a67bd32f144f8e17583251ac777f81345e/slam/odometry/odometry_runner.py#L99-L109
import dataclasses import logging from pathlib import Path from typing import Optional import time import os import torch from abc import ABC import numpy as np from omegaconf import OmegaConf from torch.utils.data import DataLoader from tqdm import tqdm import shutil from hydra.core.config_store import ConfigStore from hydra.conf import dataclass, MISSING, field from slam.common.pose import Pose from slam.common.torch_utils import collate_fun from slam.common.utils import check_tensor, assert_debug, get_git_hash from slam.dataset import DatasetLoader, DATASET from slam.eval.eval_odometry import OdometryResults from slam.dataset.configuration import DatasetConfig from slam.slam import SLAMConfig, SLAM from slam.viz import _with_cv2 if _with_cv2: import cv2 @dataclass class SLAMRunnerConfig: slam: SLAMConfig = MISSING dataset: DatasetConfig = MISSING max_num_frames: int = -1 log_dir: str = field(default_factory=os.getcwd) num_workers: int = 2 pin_memory: bool = True device: str = "cuda" if torch.cuda.is_available() else "cpu" pose: str = "euler" fail_dir: str = field(default_factory=os.getcwd) move_if_fail: bool = False viz_num_pointclouds: int = 200 debug: bool = True save_results: bool = True cs = ConfigStore.instance() cs.store(name="slam_config", node=SLAMRunnerConfig) class SLAMRunner(ABC): def __init__(self, config: SLAMRunnerConfig): super().__init__() self.config: SLAMRunnerConfig = config self.num_workers = self.config.num_workers self.batch_size = 1 self.log_dir = self.config.log_dir self.device = torch.device(self.config.device) self.pin_memory = self.config.pin_memory if self.device != torch.device("cpu") else False self.pose = Pose(self.config.pose) self.viz_num_pointclouds = self.config.viz_num_pointclouds dataset_config: DatasetConfig = self.config.dataset self.dataset_loader: DatasetLoader = DATASET.load(dataset_config) self.slam_config: SLAMConfig = self.config.slam
MIT License
rapid7/vm-console-client-python
rapid7vmconsole/models/global_scan.py
GlobalScan.to_dict
python
def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(GlobalScan, dict): for key, value in self.items(): result[key] = value return result
Returns the model properties as a dict
https://github.com/rapid7/vm-console-client-python/blob/55e1f573967bce27cc9a2d10c12a949b1142c2b3/rapid7vmconsole/models/global_scan.py#L493-L518
import pprint import re import six class GlobalScan(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'assets': 'int', 'duration': 'str', 'end_time': 'str', 'engine_id': 'int', 'engine_name': 'str', 'id': 'int', 'links': 'list[Link]', 'message': 'str', 'scan_name': 'str', 'scan_type': 'str', 'site_id': 'int', 'site_name': 'str', 'start_time': 'str', 'started_by': 'str', 'status': 'str', 'vulnerabilities': 'Vulnerabilities' } attribute_map = { 'assets': 'assets', 'duration': 'duration', 'end_time': 'endTime', 'engine_id': 'engineId', 'engine_name': 'engineName', 'id': 'id', 'links': 'links', 'message': 'message', 'scan_name': 'scanName', 'scan_type': 'scanType', 'site_id': 'siteId', 'site_name': 'siteName', 'start_time': 'startTime', 'started_by': 'startedBy', 'status': 'status', 'vulnerabilities': 'vulnerabilities' } def __init__(self, assets=None, duration=None, end_time=None, engine_id=None, engine_name=None, id=None, links=None, message=None, scan_name=None, scan_type=None, site_id=None, site_name=None, start_time=None, started_by=None, status=None, vulnerabilities=None): self._assets = None self._duration = None self._end_time = None self._engine_id = None self._engine_name = None self._id = None self._links = None self._message = None self._scan_name = None self._scan_type = None self._site_id = None self._site_name = None self._start_time = None self._started_by = None self._status = None self._vulnerabilities = None self.discriminator = None if assets is not None: self.assets = assets if duration is not None: self.duration = duration if end_time is not None: self.end_time = end_time if engine_id is not None: self.engine_id = engine_id if engine_name is not None: self.engine_name = engine_name if id is not None: self.id = id if links is not None: self.links = links if message is not None: self.message = message if scan_name is not None: self.scan_name = scan_name if scan_type is not None: self.scan_type = scan_type if site_id is not None: self.site_id = site_id if site_name is not None: self.site_name = site_name if start_time is not None: self.start_time = start_time if started_by is not None: self.started_by = started_by if status is not None: self.status = status if vulnerabilities is not None: self.vulnerabilities = vulnerabilities @property def assets(self): return self._assets @assets.setter def assets(self, assets): self._assets = assets @property def duration(self): return self._duration @duration.setter def duration(self, duration): self._duration = duration @property def end_time(self): return self._end_time @end_time.setter def end_time(self, end_time): self._end_time = end_time @property def engine_id(self): return self._engine_id @engine_id.setter def engine_id(self, engine_id): self._engine_id = engine_id @property def engine_name(self): return self._engine_name @engine_name.setter def engine_name(self, engine_name): self._engine_name = engine_name @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def links(self): return self._links @links.setter def links(self, links): self._links = links @property def message(self): return self._message @message.setter def message(self, message): self._message = message @property def scan_name(self): return self._scan_name @scan_name.setter def scan_name(self, scan_name): self._scan_name = scan_name @property def scan_type(self): return self._scan_type @scan_type.setter def scan_type(self, scan_type): self._scan_type = scan_type @property def site_id(self): return self._site_id @site_id.setter def site_id(self, site_id): self._site_id = site_id @property def site_name(self): return self._site_name @site_name.setter def site_name(self, site_name): self._site_name = site_name @property def start_time(self): return self._start_time @start_time.setter def start_time(self, start_time): self._start_time = start_time @property def started_by(self): return self._started_by @started_by.setter def started_by(self, started_by): self._started_by = started_by @property def status(self): return self._status @status.setter def status(self, status): allowed_values = ["aborted", "unknown", "running", "finished", "stopped", "error", "paused", "dispatched", "integrating"] if status not in allowed_values: raise ValueError( "Invalid value for `status` ({0}), must be one of {1}" .format(status, allowed_values) ) self._status = status @property def vulnerabilities(self): return self._vulnerabilities @vulnerabilities.setter def vulnerabilities(self, vulnerabilities): self._vulnerabilities = vulnerabilities
MIT License
flyteorg/flytekit
flytekit/models/core/workflow.py
WorkflowNode.from_flyte_idl
python
def from_flyte_idl(cls, pb2_object): if pb2_object.HasField("launchplan_ref"): return cls(launchplan_ref=_identifier.Identifier.from_flyte_idl(pb2_object.launchplan_ref)) else: return cls(sub_workflow_ref=_identifier.Identifier.from_flyte_idl(pb2_object.sub_workflow_ref))
:param flyteidl.core.workflow_pb2.WorkflowNode pb2_object: :rtype: WorkflowNode
https://github.com/flyteorg/flytekit/blob/6c032035563ae645b0b93558b3fe3362080057ea/flytekit/models/core/workflow.py#L494-L502
import datetime import typing from flyteidl.core import workflow_pb2 as _core_workflow from flytekit.models import common as _common from flytekit.models import interface as _interface from flytekit.models import types as _types from flytekit.models.core import condition as _condition from flytekit.models.core import identifier as _identifier from flytekit.models.literals import Binding as _Binding from flytekit.models.literals import RetryStrategy as _RetryStrategy from flytekit.models.task import Resources class IfBlock(_common.FlyteIdlEntity): def __init__(self, condition, then_node): self._condition = condition self._then_node = then_node @property def condition(self): return self._condition @property def then_node(self): return self._then_node def to_flyte_idl(self): return _core_workflow.IfBlock(condition=self.condition.to_flyte_idl(), then_node=self.then_node.to_flyte_idl()) @classmethod def from_flyte_idl(cls, pb2_object): return cls( condition=_condition.BooleanExpression.from_flyte_idl(pb2_object.condition), then_node=Node.from_flyte_idl(pb2_object.then_node), ) class IfElseBlock(_common.FlyteIdlEntity): def __init__(self, case, other=None, else_node=None, error=None): self._case = case self._other = other self._else_node = else_node self._error = error @property def case(self): return self._case @property def other(self): return self._other @property def else_node(self): return self._else_node @property def error(self): return self._error def to_flyte_idl(self): return _core_workflow.IfElseBlock( case=self.case.to_flyte_idl(), other=[a.to_flyte_idl() for a in self.other] if self.other else None, else_node=self.else_node.to_flyte_idl() if self.else_node else None, error=self.error.to_flyte_idl() if self.error else None, ) @classmethod def from_flyte_idl(cls, pb2_object): return cls( case=IfBlock.from_flyte_idl(pb2_object.case), other=[IfBlock.from_flyte_idl(a) for a in pb2_object.other], else_node=Node.from_flyte_idl(pb2_object.else_node) if pb2_object.HasField("else_node") else None, error=_types.Error.from_flyte_idl(pb2_object.error) if pb2_object.HasField("error") else None, ) class BranchNode(_common.FlyteIdlEntity): def __init__(self, if_else: IfElseBlock): self._if_else = if_else @property def if_else(self) -> IfElseBlock: return self._if_else def to_flyte_idl(self): return _core_workflow.BranchNode(if_else=self.if_else.to_flyte_idl()) @classmethod def from_flyte_idl(cls, pb2_objct): return cls(if_else=IfElseBlock.from_flyte_idl(pb2_objct.if_else)) class NodeMetadata(_common.FlyteIdlEntity): def __init__(self, name, timeout=None, retries=None, interruptible=None): self._name = name self._timeout = timeout if timeout is not None else datetime.timedelta() self._retries = retries if retries is not None else _RetryStrategy(0) self._interruptible = interruptible @property def name(self): return self._name @property def timeout(self): return self._timeout @property def retries(self): return self._retries @property def interruptible(self): return self._interruptible def to_flyte_idl(self): node_metadata = _core_workflow.NodeMetadata( name=self.name, retries=self.retries.to_flyte_idl(), interruptible=self.interruptible, ) if self.timeout: node_metadata.timeout.FromTimedelta(self.timeout) return node_metadata @classmethod def from_flyte_idl(cls, pb2_object): return cls( pb2_object.name, pb2_object.timeout.ToTimedelta(), _RetryStrategy.from_flyte_idl(pb2_object.retries), ) class Node(_common.FlyteIdlEntity): def __init__( self, id, metadata, inputs, upstream_node_ids, output_aliases, task_node=None, workflow_node=None, branch_node=None, ): self._id = id self._metadata = metadata self._inputs = inputs self._upstream_node_ids = upstream_node_ids self._output_aliases = output_aliases self._task_node = task_node self._workflow_node = workflow_node self._branch_node = branch_node @property def id(self): return self._id @property def metadata(self): return self._metadata @property def inputs(self): return self._inputs @property def upstream_node_ids(self): return self._upstream_node_ids @property def output_aliases(self): return self._output_aliases @property def task_node(self): return self._task_node @property def workflow_node(self): return self._workflow_node @property def branch_node(self): return self._branch_node @property def target(self): return self.task_node or self.workflow_node or self.branch_node def to_flyte_idl(self): return _core_workflow.Node( id=self.id, metadata=self.metadata.to_flyte_idl() if self.metadata is not None else None, inputs=[i.to_flyte_idl() for i in self.inputs], upstream_node_ids=self.upstream_node_ids, output_aliases=[a.to_flyte_idl() for a in self.output_aliases], task_node=self.task_node.to_flyte_idl() if self.task_node is not None else None, workflow_node=self.workflow_node.to_flyte_idl() if self.workflow_node is not None else None, branch_node=self.branch_node.to_flyte_idl() if self.branch_node is not None else None, ) @classmethod def from_flyte_idl(cls, pb2_object): return cls( id=pb2_object.id, metadata=NodeMetadata.from_flyte_idl(pb2_object.metadata), inputs=[_Binding.from_flyte_idl(b) for b in pb2_object.inputs], upstream_node_ids=pb2_object.upstream_node_ids, output_aliases=[Alias.from_flyte_idl(a) for a in pb2_object.output_aliases], task_node=TaskNode.from_flyte_idl(pb2_object.task_node) if pb2_object.HasField("task_node") else None, workflow_node=WorkflowNode.from_flyte_idl(pb2_object.workflow_node) if pb2_object.HasField("workflow_node") else None, branch_node=BranchNode.from_flyte_idl(pb2_object.branch_node) if pb2_object.HasField("branch_node") else None, ) class TaskNodeOverrides(_common.FlyteIdlEntity): def __init__(self, resources: typing.Optional[Resources] = None): self._resources = resources @property def resources(self) -> Resources: return self._resources def to_flyte_idl(self): return _core_workflow.TaskNodeOverrides( resources=self.resources.to_flyte_idl() if self.resources is not None else None, ) @classmethod def from_flyte_idl(cls, pb2_object): resources = Resources.from_flyte_idl(pb2_object.resources) if bool(resources.requests) or bool(resources.limits): return cls(resources=resources) return cls(resources=None) class TaskNode(_common.FlyteIdlEntity): def __init__(self, reference_id, overrides: typing.Optional[TaskNodeOverrides] = None): self._reference_id = reference_id self._overrides = overrides @property def reference_id(self): return self._reference_id @property def overrides(self) -> TaskNodeOverrides: return self._overrides def to_flyte_idl(self): return _core_workflow.TaskNode( reference_id=self.reference_id.to_flyte_idl(), overrides=self.overrides.to_flyte_idl() if self.overrides is not None else None, ) @classmethod def from_flyte_idl(cls, pb2_object): overrides = TaskNodeOverrides.from_flyte_idl(pb2_object.overrides) if overrides.resources is None: overrides = None return cls( reference_id=_identifier.Identifier.from_flyte_idl(pb2_object.reference_id), overrides=overrides, ) class WorkflowNode(_common.FlyteIdlEntity): def __init__(self, launchplan_ref=None, sub_workflow_ref=None): self._launchplan_ref = launchplan_ref self._sub_workflow_ref = sub_workflow_ref @property def launchplan_ref(self): return self._launchplan_ref @property def sub_workflow_ref(self): return self._sub_workflow_ref @property def reference(self): return self.launchplan_ref or self.sub_workflow_ref def to_flyte_idl(self): return _core_workflow.WorkflowNode( launchplan_ref=self.launchplan_ref.to_flyte_idl() if self.launchplan_ref else None, sub_workflow_ref=self.sub_workflow_ref.to_flyte_idl() if self.sub_workflow_ref else None, ) @classmethod
Apache License 2.0
google/deepconsensus
deepconsensus/utils/test_utils.py
seq_to_one_hot
python
def seq_to_one_hot(sequences: Union[Text, List[Text]]) -> np.ndarray: result = [] for seq in sequences: result.append(get_one_hot(multiseq_to_array(seq))) result = np.squeeze(result) return result.astype(dc_constants.NP_DATA_TYPE)
Converts ATCG to one-hot format.
https://github.com/google/deepconsensus/blob/625ad4d4af67736f422785c1751583dd0aec3d63/deepconsensus/utils/test_utils.py#L207-L213
import os from typing import Union, Text, List, Tuple import apache_beam as beam import numpy as np from deepconsensus.protos import deepconsensus_pb2 from deepconsensus.utils import dc_constants from nucleus.protos import bed_pb2 from nucleus.testing import test_utils from nucleus.util import struct_utils DEEPCONSENSUS_DATADIR = '' def deepconsensus_testdata(filename): return test_utils.genomics_testdata( os.path.join('deepconsensus/testdata', filename), DEEPCONSENSUS_DATADIR) def make_read_with_info(expanded_cigar=None, pw=None, ip=None, sn=None, subread_indices=None, subread_strand=None, unsup_insertions_by_pos_keys=None, unsup_insertions_by_pos_values=None, **kwargs): read = test_utils.make_read(**kwargs) if expanded_cigar is not None: struct_utils.set_string_field(read.info, 'expanded_cigar', expanded_cigar) if subread_strand == deepconsensus_pb2.Subread.REVERSE: read.alignment.position.reverse_strand = True elif subread_strand == deepconsensus_pb2.Subread.FORWARD: read.alignment.position.reverse_strand = False if pw is not None: struct_utils.set_int_field(read.info, 'pw', pw) if ip is not None: struct_utils.set_int_field(read.info, 'ip', ip) if sn is not None: struct_utils.set_number_field(read.info, 'sn', sn) if subread_indices is not None: struct_utils.set_int_field(read.info, 'subread_indices', subread_indices) if unsup_insertions_by_pos_keys is not None: struct_utils.set_int_field(read.info, 'unsup_insertions_by_pos_keys', unsup_insertions_by_pos_keys) if unsup_insertions_by_pos_values is not None: struct_utils.set_int_field(read.info, 'unsup_insertions_by_pos_values', unsup_insertions_by_pos_values) return read def make_deepconsensus_input(inference: bool = False, **kwargs): default_kwargs = { 'molecule_name': 'm54238_180901_011437/8389007/100_110', 'molecule_start': 200, 'subread_strand': [deepconsensus_pb2.Subread.REVERSE], 'sn': [0.1, 0.2, 0.3, 0.4], 'subread_bases': ['ATCGA'], 'subread_expanded_cigars': ['MMMMM'], 'pws': [[1] * 5], 'ips': [[2] * 5], } if not inference: default_kwargs.update({ 'chrom_name': 'chr', 'chrom_start': 1, 'chrom_end': 6, 'label_bases': 'ATCGA', 'label_expanded_cigar': 'MMMMM', 'label_base_positions': [], 'strand': bed_pb2.BedRecord.Strand.FORWARD_STRAND, }) default_kwargs.update(**kwargs) subread_strand = default_kwargs.pop('subread_strand') subread_bases = default_kwargs.pop('subread_bases') subread_expanded_cigars = default_kwargs.pop('subread_expanded_cigars') pws = default_kwargs.pop('pws') ips = default_kwargs.pop('ips') if not (len(subread_bases) == len(subread_expanded_cigars) == len(pws) == len(ips) == len(subread_strand)): raise ValueError( 'There must be the same number of entries in `subread_bases`, ' '`subread_expanded_cigars`, `pws`, `ips`, and `subread_strand`: {}, {}, {} {} {}.' .format(subread_bases, subread_expanded_cigars, pws, ips, subread_strand)) for (sb, sec, pw, ip) in zip(subread_bases, subread_expanded_cigars, pws, ips): if not len(sb) == len(sec) == len(pw) == len(ip): raise ValueError( 'There must be the same length in each element of `subread_bases`, ' '`subread_expanded_cigars`, `pws`, and `ips`: {}, {}, {}, {}.'.format( sb, sec, pw, ip)) if not inference: label_bases = default_kwargs.pop('label_bases') label_expanded_cigar = default_kwargs.pop('label_expanded_cigar') label_base_positions = default_kwargs.pop('label_base_positions') label = deepconsensus_pb2.Subread( molecule_name=default_kwargs['molecule_name'], bases=label_bases, expanded_cigar=label_expanded_cigar, base_positions=label_base_positions, subread_strand=deepconsensus_pb2.Subread.FORWARD) default_kwargs['label'] = label subreads = [] for bases, expanded_cigar, pw, ip in zip(subread_bases, subread_expanded_cigars, pws, ips): subread = deepconsensus_pb2.Subread( molecule_name=default_kwargs['molecule_name'], bases=bases, expanded_cigar=expanded_cigar, subread_strand=subread_strand.pop(0), pw=pw, ip=ip) subreads.append(subread) return deepconsensus_pb2.DeepConsensusInput( subreads=subreads, **default_kwargs) def get_beam_counter_value(pipeline_metrics: beam.metrics.metric.MetricResults, namespace: str, counter_name: str) -> int: metric_filter = beam.metrics.metric.MetricsFilter().with_namespace( namespace).with_name(counter_name) return pipeline_metrics.query(filter=metric_filter)['counters'][0].committed def get_one_hot(value: Union[int, np.ndarray]) -> np.ndarray: return np.eye(len(dc_constants.VOCAB), dtype=dc_constants.NP_DATA_TYPE)[value] def seq_to_array(seq: str) -> List[int]: return [dc_constants.VOCAB.index(i) for i in seq] def multiseq_to_array(sequences: Union[Text, List[Text]]) -> np.ndarray: return np.array(list(map(seq_to_array, sequences)))
BSD 3-Clause New or Revised License
eleurent/highway-env
highway_env/envs/u_turn_env.py
UTurnEnv._reward
python
def _reward(self, action: int) -> float: neighbours = self.road.network.all_side_lanes(self.vehicle.lane_index) lane = self.vehicle.lane_index[2] scaled_speed = utils.lmap(self.vehicle.speed, self.config["reward_speed_range"], [0, 1]) reward = + self.config["collision_reward"] * self.vehicle.crashed + self.config["left_lane_reward"] * lane / max(len(neighbours) - 1, 1) + self.config["high_speed_reward"] * np.clip(scaled_speed, 0, 1) reward = utils.lmap(reward, [self.config["collision_reward"], self.config["high_speed_reward"] + self.config["left_lane_reward"]], [0, 1]) reward = 0 if not self.vehicle.on_road else reward return reward
The vehicle is rewarded for driving with high speed and collision avoidance. :param action: the action performed :return: the reward of the state-action transition
https://github.com/eleurent/highway-env/blob/b823e77855316e204542ee94e4cb793aed000eea/highway_env/envs/u_turn_env.py#L41-L58
import numpy as np from gym.envs.registration import register from highway_env import utils from highway_env.envs.common.abstract import AbstractEnv from highway_env.road.lane import LineType, StraightLane, CircularLane from highway_env.road.road import Road, RoadNetwork from highway_env.vehicle.controller import MDPVehicle class UTurnEnv(AbstractEnv): @classmethod def default_config(cls) -> dict: config = super().default_config() config.update({ "observation": { "type": "TimeToCollision", "horizon": 16 }, "action": { "type": "DiscreteMetaAction", }, "screen_width": 789, "screen_height": 289, "duration": 10, "collision_reward": -1.0, "left_lane_reward": 0.1, "high_speed_reward": 0.4, "reward_speed_range": [8, 24], "offroad_terminal": False }) return config
MIT License
google/vulncode-db
lib/vcs_handler/github_handler.py
GithubHandler.fetch_commit_data
python
def fetch_commit_data(self, commit_hash=None): if not commit_hash: commit_hash = self.commit_hash cache_file = CACHE_DIR + commit_hash + ".json" if self.use_cache and os.path.exists(cache_file): cache_content = lib.utils.get_file_contents(cache_file) return cache_content github_repo = self.github.get_repo(f"{self.repo_owner}/{self.repo_name}") commit = github_repo.get_commit(commit_hash) commit_parents = commit.commit.parents parent_commit_hash = commit_hash if commit_parents: parent_commit_hash = commit_parents[0].sha git_tree = github_repo.get_git_tree(parent_commit_hash) commit_message = commit.commit.message commit_files = commit.files patched_files = self._parse_patch_per_file(commit_files) commit_stats = self._get_patch_stats(commit.stats) files_metadata = self._get_files_metadata(commit_files) commit_date = int( ( commit.commit.committer.date - datetime.datetime(1970, 1, 1) ).total_seconds() ) commit_metadata = CommitMetadata( parent_commit_hash, commit_date, commit_message, commit_stats, files_metadata, ) data = self._create_data(git_tree.tree, patched_files, commit_metadata) json_content = jsonify(data) if self.use_cache: lib.utils.write_contents(cache_file, json_content) return json_content
Args: commit_hash: Returns:
https://github.com/google/vulncode-db/blob/08f32bcb92fca16ed7dae5a4b8e860c69552ae5d/lib/vcs_handler/github_handler.py#L182-L235
import datetime import io import logging import os import re from urllib.parse import urlparse from flask import jsonify from github import Github from unidiff import PatchSet import lib.utils from app.exceptions import InvalidIdentifierException from lib.vcs_handler.vcs_handler import ( VcsHandler, HASH_PLACEHOLDER, PATH_PLACEHOLDER, CommitStats, CommitFilesMetadata, CommitMetadata, ) CACHE_DIR = "cache/" class GithubHandler(VcsHandler): def __init__(self, app, resource_url=None): super().__init__(app, resource_url) self.use_cache = False use_token = None if app and "GITHUB_API_ACCESS_TOKEN" in app.config: use_token = app.config["GITHUB_API_ACCESS_TOKEN"] self.github = Github(login_or_token=use_token) if resource_url is not None: self.parse_resource_url(resource_url) def parse_resource_url(self, resource_url): if not resource_url: raise InvalidIdentifierException("Please provide a Github commit link.") url_data = urlparse(resource_url) git_path = url_data.path matches = re.match(r"/([^/]+)/([^/]+)/commit/([^/]+)/?$", git_path) if ( not url_data.hostname or "github.com" not in url_data.hostname or not matches ): raise InvalidIdentifierException( "Please provide a valid " "(https://github.com/{owner}/{repo}/commit/{hash})" " commit link." ) self.repo_owner, self.repo_name, self.commit_hash = matches.groups() self.repo_url = f"https://github.com/{self.repo_owner}/{self.repo_name}" self.commit_link = resource_url def parse_url_and_hash(self, repo_url, commit_hash): if not repo_url or not commit_hash: raise InvalidIdentifierException("Please provide a Github url and hash.") url_data = urlparse(repo_url) git_path = url_data.path matches = re.match(r"/([^/]+)/([^/]+)/?$", git_path) if ( not url_data.hostname or "github.com" not in url_data.hostname or not matches ): raise InvalidIdentifierException( "Please provide a valid " "(https://github.com/{owner}/{repo})" " repository url." ) if not re.match(r"[a-fA-F0-9]{5,}$", commit_hash): raise InvalidIdentifierException( "Please provide a valid " "git commit hash (min 5 characters)" ) self.repo_owner, self.repo_name = matches.groups() self.repo_url = repo_url self.commit_hash = commit_hash self.commit_link = f"{repo_url}/commit/{commit_hash}" @staticmethod def _parse_patch_per_file(files): patched_files = {} for patched_file in files: patched_files[patched_file.filename] = { "status": patched_file.status, "sha": patched_file.sha, "deltas": [], } patch_str = io.StringIO() patch_str.write("--- a\n+++ b\n") if patched_file.patch is not None: patch_str.write(patched_file.patch) patch_str.seek(0) logging.debug("Parsing diff\n%s", patch_str.getvalue()) patch = PatchSet(patch_str, encoding=None) for hunk in patch[0]: for line in hunk: if line.is_context: continue patched_files[patched_file.filename]["deltas"].append(vars(line)) return patched_files def get_file_provider_url(self): owner = self.repo_owner repo = self.repo_name return ( f"https://api.github.com/repos/{owner}/{repo}/git/" + f"blobs/{HASH_PLACEHOLDER}" ) def get_ref_file_provider_url(self): owner = self.repo_owner repo = self.repo_name return ( f"https://api.github.com/repos/{owner}/{repo}/contents/" + f"{PATH_PLACEHOLDER}?ref={HASH_PLACEHOLDER}" ) def get_file_url(self): owner = self.repo_owner repo = self.repo_name commit_hash = self.commit_hash return f"https://github.com/{owner}/{repo}/blob/{commit_hash}/" def get_tree_url(self): owner = self.repo_owner repo = self.repo_name commit_hash = self.commit_hash return f"https://github.com/{owner}/{repo}/tree/{commit_hash}/" @staticmethod def _get_files_metadata(github_files_metadata): files_metadata = [] for file in github_files_metadata: file_metadata = CommitFilesMetadata( file.filename, file.status, file.additions, file.deletions ) files_metadata.append(file_metadata) return files_metadata @staticmethod def _get_patch_stats(commit_stats): return CommitStats( commit_stats.additions, commit_stats.deletions, commit_stats.total )
Apache License 2.0
microsoftgraph/python3-connect-rest-sample
connectsample.py
login
python
def login(): guid = uuid.uuid4() session['state'] = guid return msgraphapi.authorize(callback=url_for('authorized', _external=True), state=guid)
Handler for login route.
https://github.com/microsoftgraph/python3-connect-rest-sample/blob/81118eabd69e2eb356a74fdee9950a7b9b114565/connectsample.py#L50-L54
import json import sys import uuid import requests from flask import Flask, redirect, url_for, session, request, render_template from flask_oauthlib.client import OAuth client_id, client_secret, *_ = open('_PRIVATE.txt').read().split('\n') if (client_id.startswith('*') and client_id.endswith('*')) or (client_secret.startswith('*') and client_secret.endswith('*')): print('MISSING CONFIGURATION: the _PRIVATE.txt file needs to be edited ' + 'to add client ID and secret.') sys.exit(1) app = Flask(__name__) app.debug = True app.secret_key = 'development' oauth = OAuth(app) requests.packages.urllib3.disable_warnings() msgraphapi = oauth.remote_app( 'microsoft', consumer_key=client_id, consumer_secret=client_secret, request_token_params={'scope': 'User.Read Mail.Send'}, base_url='https://graph.microsoft.com/v1.0/', request_token_url=None, access_token_method='POST', access_token_url='https://login.microsoftonline.com/common/oauth2/v2.0/token', authorize_url='https://login.microsoftonline.com/common/oauth2/v2.0/authorize' ) @app.route('/') def index(): return render_template('connect.html') @app.route('/login')
MIT License
orenault/testlink-api-python-client
src/testlink/testlinkapigeneric.py
TestlinkAPIGeneric.createBuild
python
def createBuild(self):
Creates a new build for a specific test plan active : 1 (default) = activ 0 = inactiv open : 1 (default) = open 0 = closed releasedate : YYYY-MM-DD copytestersfrombuild : valid buildid tester assignments will be copied.
https://github.com/orenault/testlink-api-python-client/blob/f2348e9a994e3f1a1bf7d7a9b95b1a00bd117780/src/testlink/testlinkapigeneric.py#L174-L181
import sys, os.path IS_PY3 = sys.version_info[0] < 3 if IS_PY3: import xmlrpclib from base64 import encodestring as encodebytes else: import xmlrpc.client as xmlrpclib from base64 import encodebytes from platform import python_version from mimetypes import guess_type from . import testlinkerrors from .testlinkhelper import TestLinkHelper, VERSION from .testlinkargs import getMethodsWithPositionalArgs, getArgsForMethod from .testlinkdecorators import decoApiCallAddAttachment,decoApiCallAddDevKey, decoApiCallWithoutArgs,decoMakerApiCallReplaceTLResponseError, decoMakerApiCallWithArgs,decoMakerApiCallChangePosToOptArg class TestlinkAPIGeneric(object): __slots__ = ['server', 'devKey', '_server_url', '_positionalArgNames'] __version__ = VERSION __author__ = 'Luiko Czub, TestLink-API-Python-client developers' def __init__(self, server_url, devKey, **args): transport=args.get('transport') encoding=args.get('encoding') verbose=args.get('verbose',0) allow_none=args.get('allow_none',0) self.server = xmlrpclib.Server(server_url, transport, encoding, verbose, allow_none) self.devKey = devKey self._server_url = server_url self._positionalArgNames = getMethodsWithPositionalArgs() @decoApiCallAddDevKey @decoMakerApiCallWithArgs(['testplanid']) def getLatestBuildForTestPlan(self): @decoApiCallAddDevKey @decoMakerApiCallWithArgs(['testplanid'], ['testcaseid', 'testcaseexternalid', 'platformid', 'platformname', 'buildid', 'buildname', 'options']) def getLastExecutionResult(self): @decoApiCallWithoutArgs def sayHello(self): def ping(self): return self.sayHello() @decoMakerApiCallWithArgs(['str']) def repeat(self): @decoApiCallWithoutArgs def about(self): @decoApiCallAddDevKey @decoMakerApiCallWithArgs(['testplanid', 'buildname'], ['buildnotes', 'active', 'open', 'releasedate', 'copytestersfrombuild'])
Apache License 2.0
supercodepoet/django-merlin
src/merlin/wizards/session.py
SessionWizard._show_form
python
def _show_form(self, request, step, form): context = self.process_show_form(request, step, form) return self.render_form(request, step, form, { 'current_step': step, 'form': form, 'previous_step': self.get_before(request, step), 'next_step': self.get_after(request, step), 'url_base': self._get_URL_base(request, step), 'extra_context': context })
Render the provided form for the provided step to the response stream.
https://github.com/supercodepoet/django-merlin/blob/7129d58638c78dcc56a0e2f2952220b201c3afd5/src/merlin/wizards/session.py#L108-L122
from functools import wraps from django.http import * from django.shortcuts import render_to_response from django.template.context import RequestContext from merlin.wizards import MissingStepException, MissingSlugException from merlin.wizards.utils import * def modifies_session(func): @wraps(func) def wrapper(self, request, *args, **kwargs): result = func(self, request, *args, **kwargs) request.session.modified = True return result return wrapper class SessionWizard(object): def __init__(self, steps): if not isinstance(steps, list): raise TypeError('steps must be an instance of or subclass of list') if [step for step in steps if not isinstance(step, Step)]: raise TypeError('All steps must be an instance of Step') slugs = set([step.slug for step in steps]) if len(slugs) != len(steps): raise ValueError('Step slugs must be unique.') clazz = self.__class__ self.id = '%s.%s' % (clazz.__module__, clazz.__name__,) self.base_steps = steps def __call__(self, request, *args, **kwargs): self._init_wizard(request) slug = kwargs.get('slug', None) if not slug: raise MissingSlugException("Slug not found.") step = self.get_step(request, slug) if not step: if slug == 'cancel': self.cancel(request) redirect = request.REQUEST.get('rd', '/') return HttpResponseRedirect(redirect) raise MissingStepException("Step for slug %s not found." % slug) method_name = 'process_%s' % request.method method = getattr(self, method_name) return method(request, step) def _init_wizard(self, request): if self.id not in request.session: request.session[self.id] = WizardState( steps=self.base_steps[:], current_step=self.base_steps[0], form_data={}) self.initialize(request, request.session[self.id]) def _get_state(self, request): return request.session[self.id]
BSD 3-Clause New or Revised License
unofficial-memsource/memsource-cli-client
memsource_cli/models/set_project_trans_memories_v2_dto.py
SetProjectTransMemoriesV2Dto.target_lang
python
def target_lang(self, target_lang): self._target_lang = target_lang
Sets the target_lang of this SetProjectTransMemoriesV2Dto. Set translation memory only for the specific project target language # noqa: E501 :param target_lang: The target_lang of this SetProjectTransMemoriesV2Dto. # noqa: E501 :type: str
https://github.com/unofficial-memsource/memsource-cli-client/blob/a6639506b74e95476da87f4375953448b76ea90c/memsource_cli/models/set_project_trans_memories_v2_dto.py#L96-L105
import pprint import re import six from memsource_cli.models.id_reference import IdReference from memsource_cli.models.set_project_trans_memory_dto import SetProjectTransMemoryDto class SetProjectTransMemoriesV2Dto(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'trans_memories': 'list[SetProjectTransMemoryDto]', 'target_lang': 'str', 'workflow_step': 'IdReference' } attribute_map = { 'trans_memories': 'transMemories', 'target_lang': 'targetLang', 'workflow_step': 'workflowStep' } def __init__(self, trans_memories=None, target_lang=None, workflow_step=None): self._trans_memories = None self._target_lang = None self._workflow_step = None self.discriminator = None if trans_memories is not None: self.trans_memories = trans_memories if target_lang is not None: self.target_lang = target_lang if workflow_step is not None: self.workflow_step = workflow_step @property def trans_memories(self): return self._trans_memories @trans_memories.setter def trans_memories(self, trans_memories): self._trans_memories = trans_memories @property def target_lang(self): return self._target_lang @target_lang.setter
Apache License 2.0
taleinat/fuzzysearch
src/fuzzysearch/generic_search.py
find_near_matches_generic
python
def find_near_matches_generic(subsequence, sequence, search_params): if not subsequence: raise ValueError('Given subsequence is empty!') if search_params.max_l_dist == 0: return [ Match(start_index, start_index + len(subsequence), 0, matched=sequence[start_index:start_index + len(subsequence)]) for start_index in search_exact(subsequence, sequence) ] elif len(subsequence) // (search_params.max_l_dist + 1) >= 3: return find_near_matches_generic_ngrams(subsequence, sequence, search_params) else: return find_near_matches_generic_linear_programming(subsequence, sequence, search_params)
search for near-matches of subsequence in sequence This searches for near-matches, where the nearly-matching parts of the sequence must meet the following limitations (relative to the subsequence): * the maximum allowed number of character substitutions * the maximum allowed number of new characters inserted * and the maximum allowed number of character deletions * the total number of substitutions, insertions and deletions
https://github.com/taleinat/fuzzysearch/blob/faa0f4d3fb30c63a7fb8eea9778372bbb3243d90/src/fuzzysearch/generic_search.py#L26-L55
from collections import namedtuple from functools import wraps import attr from fuzzysearch.common import FuzzySearchBase, Match, consolidate_overlapping_matches from fuzzysearch.compat import xrange from fuzzysearch.search_exact import search_exact __all__ = [ 'find_near_matches_generic', 'find_near_matches_generic_linear_programming', 'find_near_matches_generic_ngrams', 'has_near_match_generic_ngrams', ] GenericSearchCandidate = namedtuple( 'GenericSearchCandidate', ['start', 'subseq_index', 'l_dist', 'n_subs', 'n_ins', 'n_dels'], )
MIT License
blacklight/platypush
platypush/plugins/stt/__init__.py
SttPlugin.detection_thread
python
def detection_thread(self) -> None: self._current_text = '' self.logger.debug('Detection thread started') self.on_detection_started() while self._audio_queue: try: frames = self._audio_queue.get() frames = self.convert_frames(frames) except Exception as e: self.logger.warning('Error while feeding audio to the model: {}'.format(str(e))) continue text = self.detect_speech(frames).strip() self.process_text(text) self.on_detection_ended() self.logger.debug('Detection thread terminated')
This thread reads frames from ``_audio_queue``, performs the speech-to-text detection and calls
https://github.com/blacklight/platypush/blob/a5f1dc2638d7c6308325e0ca39dc7d5e262836aa/platypush/plugins/stt/__init__.py#L183-L203
import queue import threading from abc import ABC, abstractmethod from typing import Optional, Union, List import sounddevice as sd from platypush.context import get_bus from platypush.message.event.stt import SpeechDetectionStartedEvent, SpeechDetectionStoppedEvent, SpeechStartedEvent, SpeechDetectedEvent, HotwordDetectedEvent, ConversationDetectedEvent from platypush.message.response.stt import SpeechDetectedResponse from platypush.plugins import Plugin, action class SttPlugin(ABC, Plugin): _thread_stop_timeout = 10.0 rate = 16000 channels = 1 def __init__(self, input_device: Optional[Union[int, str]] = None, hotword: Optional[str] = None, hotwords: Optional[List[str]] = None, conversation_timeout: Optional[float] = 10.0, block_duration: float = 1.0): super().__init__() self.input_device = input_device self.conversation_timeout = conversation_timeout self.block_duration = block_duration self.hotwords = set(hotwords or []) if hotword: self.hotwords = {hotword} self._conversation_event = threading.Event() self._input_stream: Optional[sd.InputStream] = None self._recording_thread: Optional[threading.Thread] = None self._detection_thread: Optional[threading.Thread] = None self._audio_queue: Optional[queue.Queue] = None self._current_text = '' def _get_input_device(self, device: Optional[Union[int, str]] = None) -> int: if not device: device = self.input_device if not device: return sd.query_hostapis()[0].get('default_input_device') if isinstance(device, int): assert device <= len(sd.query_devices()) return device for i, dev in enumerate(sd.query_devices()): if dev['name'] == device: return i raise AssertionError('Device {} not found'.format(device)) def on_speech_detected(self, speech: str) -> None: speech = speech.strip() if speech in self.hotwords: event = HotwordDetectedEvent(hotword=speech) if self.conversation_timeout: self._conversation_event.set() threading.Timer(self.conversation_timeout, lambda: self._conversation_event.clear()).start() elif self._conversation_event.is_set(): event = ConversationDetectedEvent(speech=speech) else: event = SpeechDetectedEvent(speech=speech) get_bus().post(event) @staticmethod def convert_frames(frames: bytes) -> bytes: return frames def on_detection_started(self) -> None: pass def on_detection_ended(self) -> None: pass def before_recording(self) -> None: pass def on_recording_started(self) -> None: pass def on_recording_ended(self) -> None: pass @abstractmethod def detect_speech(self, frames) -> str: raise NotImplementedError def process_text(self, text: str) -> None: if (not text and self._current_text) or (text and text == self._current_text): self.on_speech_detected(self._current_text) self._current_text = '' else: if text: if not self._current_text: get_bus().post(SpeechStartedEvent()) self.logger.info('Intermediate speech results: [{}]'.format(text)) self._current_text = text
MIT License
google/citest
citest/reporting/generate_html_report.py
build_index
python
def build_index(journal_list, output_dir): document_manager = HtmlDocumentManager(title='Journal Summary') document_manager.has_key = False document_manager.has_global_expand = False processor = HtmlIndexRenderer(document_manager) for journal in journal_list: processor.process(StreamJournalNavigator.new_from_path(journal)) processor.terminate() tr_tag = document_manager.make_tag_container( 'tr', [document_manager.make_tag_text('th', name) for name in processor.output_column_names]) table = document_manager.make_tag_container( 'table', [tr_tag], style='font-size:12pt') document_manager.wrap_tag(table) document_manager.build_to_path(os.path.join(output_dir, 'index.html'))
Create an index.html file for HTML output from journal list. Args: journal_list: [array of path] Path to the journal files to put in the index. Assumes that there is a corresponding .html file for each to link to.
https://github.com/google/citest/blob/eda9171eed35b82ce6f048229bebd898edc25369/citest/reporting/generate_html_report.py#L97-L121
import argparse import os import resource import sys from citest.base import StreamJournalNavigator from citest.reporting.html_renderer import HtmlRenderer from citest.reporting.html_document_manager import HtmlDocumentManager from citest.reporting.html_index_renderer import HtmlIndexRenderer from citest.reporting.html_index_table_renderer import HtmlIndexTableRenderer def journal_to_html(input_path, prune=False): output_path = os.path.basename(os.path.splitext(input_path)[0]) + '.html' document_manager = HtmlDocumentManager( title='Report for {0}'.format(os.path.basename(input_path))) processor = HtmlRenderer(document_manager, prune=prune) processor.process(StreamJournalNavigator.new_from_path(input_path)) processor.terminate() document_manager.wrap_tag(document_manager.new_tag('table')) document_manager.build_to_path(output_path) def determine_columns(dir_names): if not dir_names: return [] path_to_parts = {path : path.split('/') for path in dir_names} all_path_parts = path_to_parts.values() first = all_path_parts[0] def match_position(index, expect): for path in all_path_parts: if len(path) <= index or path[index] != expect: return False return True index = 0 while index < len(first): if match_position(index, first[index]): for path in all_path_parts: del path[index] else: index += 1 return {name : '/'.join(path_to_parts[name]) for name in dir_names} def build_table(journal_list, output_dir): document_manager = HtmlDocumentManager(title='Journal Summary') document_manager.has_key = False document_manager.has_global_expand = False HtmlIndexTableRenderer.process_all(document_manager, journal_list, output_dir) document_manager.build_to_path(os.path.join(output_dir, 'table_index.html'))
Apache License 2.0
pygrowler/growler
growler/aio/http_protocol.py
GrowlerHTTPProtocol.begin_application
python
def begin_application(self, req, res): coro = self.http_application.handle_client_request(req, res) create_task(coro)
Entry point for the application middleware chain for an asyncio event loop.
https://github.com/pygrowler/growler/blob/5492466d8828115bb04c665917d6aeb4f4323f44/growler/aio/http_protocol.py#L153-L162
import traceback from sys import stderr try: from asyncio import create_task, Future except ImportError: from asyncio import ensure_future as create_task, Future from .protocol import GrowlerProtocol from growler.http.responder import GrowlerHTTPResponder from growler.http.response import HTTPResponse from growler.http.errors import ( HTTPError ) class GrowlerHTTPProtocol(GrowlerProtocol): def __init__(self, app, loop=None): self.http_application = app self.client_method = None self.client_query = None self.client_headers = None super().__init__(_loop=loop, responder_factory=self.http_responder_factory) @staticmethod def http_responder_factory(proto): return GrowlerHTTPResponder( proto, request_factory=proto.http_application._request_class, response_factory=proto.http_application._response_class, ) def handle_error(self, error): if isinstance(error, HTTPError): err_code = error.code err_msg = error.msg err_info = '' else: err_code = 500 err_msg = "Server Error" err_info = "%s" % error print("Unexpected Server Error", file=stderr) traceback.print_tb(error.__traceback__, file=stderr) err_str = ( "<html>" "<head></head>" "<body><h1>HTTP Error : {code} {message}</h1><p>{info}</p></body>" "</html>\n" ).format( code=err_code, message=err_msg, info=err_info ) header_info = { 'code': err_code, 'msg': err_msg, 'date': HTTPResponse.get_current_time(), 'length': len(err_str.encode()), 'contents': err_str } response = '\r\n'.join(( "HTTP/1.1 {code} {msg}", "Content-Type: text/html; charset=UTF-8", "Content-Length: {length}", "Date: {date}", "", "{contents}")).format(**header_info) self.transport.write(response.encode())
Apache License 2.0
nandaka/pixivutil2
PixivOAuthBrowser.py
refresh
python
def refresh(refresh_token): response = requests.post( AUTH_TOKEN_URL, data={ "client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, "grant_type": "refresh_token", "include_policy": "true", "refresh_token": refresh_token, }, headers={"User-Agent": USER_AGENT}, ) login_data = print_auth_token_response(response) return login_data
:return new access token and refresh token
https://github.com/nandaka/pixivutil2/blob/bd2dd3ca34b1277042ee5f3d74a80800985aa4cc/PixivOAuthBrowser.py#L100-L114
from base64 import urlsafe_b64encode from hashlib import sha256 from pprint import pprint from secrets import token_urlsafe from sys import exit from urllib.parse import urlencode from webbrowser import open as open_url import requests from colorama import Fore, Style USER_AGENT = "PixivAndroidApp/5.0.234 (Android 11; Pixel 5)" REDIRECT_URI = "https://app-api.pixiv.net/web/v1/users/auth/pixiv/callback" LOGIN_URL = "https://app-api.pixiv.net/web/v1/login" AUTH_TOKEN_URL = "https://oauth.secure.pixiv.net/auth/token" CLIENT_ID = "MOBrBDS8blbauoSck0ZfDbtuzpyT" CLIENT_SECRET = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj" def s256(data): return urlsafe_b64encode(sha256(data).digest()).rstrip(b"=").decode("ascii") def oauth_pkce(transform): code_verifier = token_urlsafe(32) code_challenge = transform(code_verifier.encode("ascii")) return code_verifier, code_challenge def print_auth_token_response(response): data = response.json() try: access_token = data["access_token"] refresh_token = data["refresh_token"] except KeyError: print("error:") pprint(data) exit(1) print("access_token:", access_token) print("refresh_token:", refresh_token) print("expires_in:", data.get("expires_in", 0)) return data def login(): code_verifier, code_challenge = oauth_pkce(s256) login_params = { "code_challenge": code_challenge, "code_challenge_method": "S256", "client": "pixiv-android", } print(Style.BRIGHT + Fore.YELLOW + "Instructions: " + Style.RESET_ALL) print("1. This will open a new browser to login to Pixiv site to get the code.") print("1b. In case the browser will not open, or you are using an headless server, use this link: " + f"{LOGIN_URL}?{urlencode(login_params)}") print("2. Open dev console " + Fore.YELLOW + "(F12)" + Style.RESET_ALL + " and switch to network tab." + Style.RESET_ALL) print("3. Enable persistent logging (" + Fore.YELLOW + "\"Preserve log\"" + Style.RESET_ALL + "). " + Style.RESET_ALL) print("4. Type into the filter field: '" + Fore.YELLOW + "callback?" + Style.RESET_ALL + "'." + Style.RESET_ALL) print("5. Proceed with Pixiv login.") print("6. After logging in you should see a blank page and request that looks like this:" + Style.RESET_ALL) print(" 'https://app-api.pixiv.net/web/v1/users/auth/pixiv/callback?state=...&" + Fore.YELLOW + "code=..." + Style.RESET_ALL + "'" + Style.RESET_ALL) print("7. Copy value of the " + Fore.YELLOW + "code param" + Style.RESET_ALL + " into the prompt and hit the Enter key.") input("Press enter when you ready.") open_url(f"{LOGIN_URL}?{urlencode(login_params)}") try: code = input("code: ").strip() except (EOFError, KeyboardInterrupt): return response = requests.post( AUTH_TOKEN_URL, data={ "client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, "code": code, "code_verifier": code_verifier, "grant_type": "authorization_code", "include_policy": "true", "redirect_uri": REDIRECT_URI, }, headers={"User-Agent": USER_AGENT}, ) return response
BSD 2-Clause Simplified License
geophysics-ubonn/reda
lib/reda/configs/configManager.py
ConfigManager.load_crmod_config
python
def load_crmod_config(self, filename): with open(filename, 'r') as fid: nr_of_configs = int(fid.readline().strip()) configs = np.loadtxt(fid) print('loaded configs:', configs.shape) if nr_of_configs != configs.shape[0]: raise Exception( 'indicated number of measurements does not equal ' + 'to actual number of measurements') ABMN = self._crmod_to_abmn(configs[:, 0:2]) self.configs = ABMN
Load a CRMod configuration file Parameters ---------- filename: string absolute or relative path to a crmod config.dat file
https://github.com/geophysics-ubonn/reda/blob/5be52ecb184f45f0eabb23451f039fec3d9537c5/lib/reda/configs/configManager.py#L151-L169
import itertools import numpy as np import pandas as pd import reda.utils.mpl from reda.utils import opt_import plt, mpl = reda.utils.mpl.setup() class ConfigManager(object): def __init__(self, nr_of_electrodes=None): self.configs = None self.metadata = {} self.nr_electrodes = nr_of_electrodes def abmn_to_dataframe(self): abmn_df = pd.DataFrame(self.configs, columns=['a', 'b', 'm', 'n']) return abmn_df def _get_next_index(self): self.meas_counter += 1 return self.meas_counter def clear_configs(self): del (self.configs) self.configs = None @property def nr_of_configs(self): if self.configs is None: return 0 else: return self.configs.shape[0] def _crmod_to_abmn(self, configs): A = np.floor(configs[:, 0] / 1e4).astype(int) B = (configs[:, 0] % 1e4).astype(int) M = np.floor(configs[:, 1] / 1e4).astype(int) N = (configs[:, 1] % 1e4).astype(int) ABMN = np.hstack(( A[:, np.newaxis], B[:, np.newaxis], M[:, np.newaxis], N[:, np.newaxis] )).astype(int) return ABMN def load_configs(self, filename): configs = np.loadtxt(filename) self.add_to_configs(configs) def load_injections_from_mcf(self, filename): injections = [] with open(filename, encoding="latin-1") as mcf: for line in mcf: if line[:2] == "SE": injections.append((line[3:6], line[7:10])) injections = np.asarray(injections, dtype=int) mask = np.less(injections[:, 0], injections[:, 1]) injections = injections[mask] return(injections)
MIT License
packit/ogr
ogr/abstract.py
PullRequest.description
python
def description(self) -> str: return self._description
Description of the pull request.
https://github.com/packit/ogr/blob/2f2eec1a71b58efff0dc43cdbca28a4b8de8c38a/ogr/abstract.py#L569-L571
import datetime import functools import warnings from enum import Enum, IntEnum from typing import ( Optional, Match, List, Dict, Set, TypeVar, Any, Sequence, Union, Callable, ) from urllib.request import urlopen import github import gitlab import requests from ogr.deprecation import deprecate_and_set_removal from ogr.exceptions import ( OgrException, GitlabAPIException, GithubAPIException, OgrNetworkError, ) from ogr.parsing import parse_git_repo try: from functools import cached_property as _cached_property except ImportError: from functools import lru_cache def _cached_property(func): return property(lru_cache()(func)) AnyComment = TypeVar("AnyComment", bound="Comment") def catch_common_exceptions(function: Callable) -> Any: @functools.wraps(function) def wrapper(*args, **kwargs): try: return function(*args, **kwargs) except github.BadCredentialsException as ex: raise GithubAPIException("Invalid Github credentials") from ex except gitlab.GitlabAuthenticationError as ex: raise GitlabAPIException("Invalid Gitlab credentials") from ex except requests.exceptions.ConnectionError as ex: raise OgrNetworkError( "Could not perform the request due to a network error" ) from ex return wrapper class CatchCommonErrors(type): def __new__(cls, name, bases, namespace): for key, value in namespace.items(): if isinstance(value, staticmethod): namespace[key] = staticmethod(catch_common_exceptions(value.__func__)) elif isinstance(value, classmethod): namespace[key] = classmethod(catch_common_exceptions(value.__func__)) elif callable(namespace[key]): namespace[key] = catch_common_exceptions(namespace[key]) return super().__new__(cls, name, bases, namespace) class OgrAbstractClass(metaclass=CatchCommonErrors): def __repr__(self) -> str: return f"<{str(self)}>" class Reaction(OgrAbstractClass): def __init__(self, raw_reaction: Any) -> None: self._raw_reaction = raw_reaction def __str__(self): return f"Reaction(raw_reaction={self._raw_reaction})" def delete(self) -> None: raise NotImplementedError() class Comment(OgrAbstractClass): def __init__( self, raw_comment: Optional[Any] = None, parent: Optional[Any] = None, body: Optional[str] = None, author: Optional[str] = None, created: Optional[datetime.datetime] = None, edited: Optional[datetime.datetime] = None, ) -> None: if raw_comment: self._from_raw_comment(raw_comment) elif body and author: self._body = body self._author = author self._created = created self._edited = edited else: raise ValueError("cannot construct comment without body and author") self._parent = parent def __str__(self) -> str: body = f"{self.body[:10]}..." if self.body is not None else "None" return ( f"Comment(" f"comment='{body}', " f"author='{self.author}', " f"created='{self.created}', " f"edited='{self.edited}')" ) def _from_raw_comment(self, raw_comment: Any) -> None: raise NotImplementedError() @property def comment(self) -> str: warnings.warn( "Using deprecated property, that will be removed in 0.14.0" " (or 1.0.0 if it comes sooner). Please use body. " ) return self.body @property def body(self) -> str: return self._body @body.setter def body(self, new_body: str) -> None: self._body = new_body @property def author(self) -> str: return self._author @property def created(self) -> datetime.datetime: return self._created @property def edited(self) -> datetime.datetime: return self._edited def get_reactions(self) -> List[Reaction]: raise NotImplementedError() def add_reaction(self, reaction: str) -> Reaction: raise NotImplementedError() class IssueComment(Comment): @property def issue(self) -> "Issue": return self._parent def __str__(self) -> str: return "Issue" + super().__str__() class PRComment(Comment): @property def pull_request(self) -> "PullRequest": return self._parent def __str__(self) -> str: return "PR" + super().__str__() class IssueStatus(IntEnum): open = 1 closed = 2 all = 3 class Issue(OgrAbstractClass): def __init__(self, raw_issue: Any, project: "GitProject") -> None: self._raw_issue = raw_issue self.project = project @property def title(self) -> str: raise NotImplementedError() @property def private(self) -> bool: raise NotImplementedError() @property def id(self) -> int: raise NotImplementedError() @property def status(self) -> IssueStatus: raise NotImplementedError() @property def url(self) -> str: raise NotImplementedError() @property def description(self) -> str: raise NotImplementedError() @property def author(self) -> str: raise NotImplementedError() @property def created(self) -> datetime.datetime: raise NotImplementedError() @property def labels(self) -> List: raise NotImplementedError() def __str__(self) -> str: description = ( f"{self.description[:10]}..." if self.description is not None else "None" ) return ( f"Issue(" f"title='{self.title}', " f"id={self.id}, " f"status='{self.status.name}', " f"url='{self.url}', " f"description='{description}', " f"author='{self.author}', " f"created='{self.created}')" ) @staticmethod def create( project: Any, title: str, body: str, private: Optional[bool] = None, labels: Optional[List[str]] = None, assignees: Optional[List[str]] = None, ) -> "Issue": raise NotImplementedError() @staticmethod def get(project: Any, id: int) -> "Issue": raise NotImplementedError() @staticmethod def get_list( project: Any, status: IssueStatus = IssueStatus.open, author: Optional[str] = None, assignee: Optional[str] = None, labels: Optional[List[str]] = None, ) -> List["Issue"]: raise NotImplementedError() def _get_all_comments(self) -> List[IssueComment]: raise NotImplementedError() def get_comments( self, filter_regex: str = None, reverse: bool = False, author: str = None ) -> List[IssueComment]: raise NotImplementedError() def can_close(self, username: str) -> bool: raise NotImplementedError() def comment(self, body: str) -> IssueComment: raise NotImplementedError() def close(self) -> "Issue": raise NotImplementedError() def add_label(self, *labels: str) -> None: raise NotImplementedError() def add_assignee(self, *assignees: str) -> None: raise NotImplementedError() def get_comment(self, comment_id: int) -> IssueComment: raise NotImplementedError() class PRStatus(IntEnum): open = 1 closed = 2 merged = 3 all = 4 class CommitStatus(Enum): pending = 1 success = 2 failure = 3 error = 4 canceled = 5 running = 6 class MergeCommitStatus(Enum): can_be_merged = 1 cannot_be_merged = 2 unchecked = 3 checking = 4 cannot_be_merged_recheck = 5 class PullRequest(OgrAbstractClass): @deprecate_and_set_removal( since="0.9.0", remove_in="0.14.0 (or 1.0.0 if it comes sooner)", message="Use PullRequestReadOnly from ogr.read_only to use a static and offline " "representation of the pull-request. The subclasses of this class are not static anymore.", ) def __init__( self, title: str, description: str, target_branch: str, source_branch: str, id: int, status: PRStatus, url: str, author: str, created: datetime.datetime, ) -> None: self._title = title self._description = description self._target_branch = target_branch self._source_branch = source_branch self._id = id self._status = PRStatus.open self._url = url self._author = author self._created = created @property def title(self) -> str: return self._title @title.setter def title(self, new_title: str) -> None: raise NotImplementedError() @property def id(self) -> int: return self._id @property def status(self) -> PRStatus: return self._status @property def url(self) -> str: return self._url @property
MIT License
mrknow/filmkodi
plugin.video.mrknow/mylib/pydevd_attach_to_process/winappdbg/event.py
ExitProcessEvent.get_module
python
def get_module(self): return self.get_process().get_main_module()
@rtype: L{Module} @return: Main module of the process.
https://github.com/mrknow/filmkodi/blob/0162cde9ae25ddbf4a69330948714833ff2f78c9/plugin.video.mrknow/mylib/pydevd_attach_to_process/winappdbg/event.py#L944-L949
__revision__ = "$Id$" __all__ = [ 'EventFactory', 'EventDispatcher', 'EventHandler', 'EventSift', 'EventCallbackWarning', 'NoEvent', 'Event', 'CreateProcessEvent', 'CreateThreadEvent', 'ExitProcessEvent', 'ExitThreadEvent', 'LoadDLLEvent', 'UnloadDLLEvent', 'OutputDebugStringEvent', 'RIPEvent', 'ExceptionEvent' ] from winappdbg import win32 from winappdbg import compat from winappdbg.win32 import FileHandle, ProcessHandle, ThreadHandle from winappdbg.breakpoint import ApiHook from winappdbg.module import Module from winappdbg.thread import Thread from winappdbg.process import Process from winappdbg.textio import HexDump from winappdbg.util import StaticClass, PathOperations import sys import ctypes import warnings import traceback class EventCallbackWarning (RuntimeWarning): class Event (object): eventMethod = 'unknown_event' eventName = 'Unknown event' eventDescription = 'A debug event of an unknown type has occured.' def __init__(self, debug, raw): self.debug = debug self.raw = raw self.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED def get_event_name(self): return self.eventName def get_event_description(self): return self.eventDescription def get_event_code(self): return self.raw.dwDebugEventCode def get_pid(self): return self.raw.dwProcessId def get_tid(self): return self.raw.dwThreadId def get_process(self): pid = self.get_pid() system = self.debug.system if system.has_process(pid): process = system.get_process(pid) else: process = Process(pid) system._add_process(process) process.scan_modules() return process def get_thread(self): tid = self.get_tid() process = self.get_process() if process.has_thread(tid): thread = process.get_thread(tid) else: thread = Thread(tid) process._add_thread(thread) return thread class NoEvent (Event): eventMethod = 'no_event' eventName = 'No event' eventDescription = 'No debug event has occured.' def __init__(self, debug, raw = None): Event.__init__(self, debug, raw) def __len__(self): return 0 def get_event_code(self): return -1 def get_pid(self): return -1 def get_tid(self): return -1 def get_process(self): return Process(self.get_pid()) def get_thread(self): return Thread(self.get_tid()) class ExceptionEvent (Event): eventName = 'Exception event' eventDescription = 'An exception was raised by the debugee.' __exceptionMethod = { win32.EXCEPTION_ACCESS_VIOLATION : 'access_violation', win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED : 'array_bounds_exceeded', win32.EXCEPTION_BREAKPOINT : 'breakpoint', win32.EXCEPTION_DATATYPE_MISALIGNMENT : 'datatype_misalignment', win32.EXCEPTION_FLT_DENORMAL_OPERAND : 'float_denormal_operand', win32.EXCEPTION_FLT_DIVIDE_BY_ZERO : 'float_divide_by_zero', win32.EXCEPTION_FLT_INEXACT_RESULT : 'float_inexact_result', win32.EXCEPTION_FLT_INVALID_OPERATION : 'float_invalid_operation', win32.EXCEPTION_FLT_OVERFLOW : 'float_overflow', win32.EXCEPTION_FLT_STACK_CHECK : 'float_stack_check', win32.EXCEPTION_FLT_UNDERFLOW : 'float_underflow', win32.EXCEPTION_ILLEGAL_INSTRUCTION : 'illegal_instruction', win32.EXCEPTION_IN_PAGE_ERROR : 'in_page_error', win32.EXCEPTION_INT_DIVIDE_BY_ZERO : 'integer_divide_by_zero', win32.EXCEPTION_INT_OVERFLOW : 'integer_overflow', win32.EXCEPTION_INVALID_DISPOSITION : 'invalid_disposition', win32.EXCEPTION_NONCONTINUABLE_EXCEPTION : 'noncontinuable_exception', win32.EXCEPTION_PRIV_INSTRUCTION : 'privileged_instruction', win32.EXCEPTION_SINGLE_STEP : 'single_step', win32.EXCEPTION_STACK_OVERFLOW : 'stack_overflow', win32.EXCEPTION_GUARD_PAGE : 'guard_page', win32.EXCEPTION_INVALID_HANDLE : 'invalid_handle', win32.EXCEPTION_POSSIBLE_DEADLOCK : 'possible_deadlock', win32.EXCEPTION_WX86_BREAKPOINT : 'wow64_breakpoint', win32.CONTROL_C_EXIT : 'control_c_exit', win32.DBG_CONTROL_C : 'debug_control_c', win32.MS_VC_EXCEPTION : 'ms_vc_exception', } __exceptionName = { win32.EXCEPTION_ACCESS_VIOLATION : 'EXCEPTION_ACCESS_VIOLATION', win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED : 'EXCEPTION_ARRAY_BOUNDS_EXCEEDED', win32.EXCEPTION_BREAKPOINT : 'EXCEPTION_BREAKPOINT', win32.EXCEPTION_DATATYPE_MISALIGNMENT : 'EXCEPTION_DATATYPE_MISALIGNMENT', win32.EXCEPTION_FLT_DENORMAL_OPERAND : 'EXCEPTION_FLT_DENORMAL_OPERAND', win32.EXCEPTION_FLT_DIVIDE_BY_ZERO : 'EXCEPTION_FLT_DIVIDE_BY_ZERO', win32.EXCEPTION_FLT_INEXACT_RESULT : 'EXCEPTION_FLT_INEXACT_RESULT', win32.EXCEPTION_FLT_INVALID_OPERATION : 'EXCEPTION_FLT_INVALID_OPERATION', win32.EXCEPTION_FLT_OVERFLOW : 'EXCEPTION_FLT_OVERFLOW', win32.EXCEPTION_FLT_STACK_CHECK : 'EXCEPTION_FLT_STACK_CHECK', win32.EXCEPTION_FLT_UNDERFLOW : 'EXCEPTION_FLT_UNDERFLOW', win32.EXCEPTION_ILLEGAL_INSTRUCTION : 'EXCEPTION_ILLEGAL_INSTRUCTION', win32.EXCEPTION_IN_PAGE_ERROR : 'EXCEPTION_IN_PAGE_ERROR', win32.EXCEPTION_INT_DIVIDE_BY_ZERO : 'EXCEPTION_INT_DIVIDE_BY_ZERO', win32.EXCEPTION_INT_OVERFLOW : 'EXCEPTION_INT_OVERFLOW', win32.EXCEPTION_INVALID_DISPOSITION : 'EXCEPTION_INVALID_DISPOSITION', win32.EXCEPTION_NONCONTINUABLE_EXCEPTION : 'EXCEPTION_NONCONTINUABLE_EXCEPTION', win32.EXCEPTION_PRIV_INSTRUCTION : 'EXCEPTION_PRIV_INSTRUCTION', win32.EXCEPTION_SINGLE_STEP : 'EXCEPTION_SINGLE_STEP', win32.EXCEPTION_STACK_OVERFLOW : 'EXCEPTION_STACK_OVERFLOW', win32.EXCEPTION_GUARD_PAGE : 'EXCEPTION_GUARD_PAGE', win32.EXCEPTION_INVALID_HANDLE : 'EXCEPTION_INVALID_HANDLE', win32.EXCEPTION_POSSIBLE_DEADLOCK : 'EXCEPTION_POSSIBLE_DEADLOCK', win32.EXCEPTION_WX86_BREAKPOINT : 'EXCEPTION_WX86_BREAKPOINT', win32.CONTROL_C_EXIT : 'CONTROL_C_EXIT', win32.DBG_CONTROL_C : 'DBG_CONTROL_C', win32.MS_VC_EXCEPTION : 'MS_VC_EXCEPTION', } __exceptionDescription = { win32.EXCEPTION_ACCESS_VIOLATION : 'Access violation', win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED : 'Array bounds exceeded', win32.EXCEPTION_BREAKPOINT : 'Breakpoint', win32.EXCEPTION_DATATYPE_MISALIGNMENT : 'Datatype misalignment', win32.EXCEPTION_FLT_DENORMAL_OPERAND : 'Float denormal operand', win32.EXCEPTION_FLT_DIVIDE_BY_ZERO : 'Float divide by zero', win32.EXCEPTION_FLT_INEXACT_RESULT : 'Float inexact result', win32.EXCEPTION_FLT_INVALID_OPERATION : 'Float invalid operation', win32.EXCEPTION_FLT_OVERFLOW : 'Float overflow', win32.EXCEPTION_FLT_STACK_CHECK : 'Float stack check', win32.EXCEPTION_FLT_UNDERFLOW : 'Float underflow', win32.EXCEPTION_ILLEGAL_INSTRUCTION : 'Illegal instruction', win32.EXCEPTION_IN_PAGE_ERROR : 'In-page error', win32.EXCEPTION_INT_DIVIDE_BY_ZERO : 'Integer divide by zero', win32.EXCEPTION_INT_OVERFLOW : 'Integer overflow', win32.EXCEPTION_INVALID_DISPOSITION : 'Invalid disposition', win32.EXCEPTION_NONCONTINUABLE_EXCEPTION : 'Noncontinuable exception', win32.EXCEPTION_PRIV_INSTRUCTION : 'Privileged instruction', win32.EXCEPTION_SINGLE_STEP : 'Single step event', win32.EXCEPTION_STACK_OVERFLOW : 'Stack limits overflow', win32.EXCEPTION_GUARD_PAGE : 'Guard page hit', win32.EXCEPTION_INVALID_HANDLE : 'Invalid handle', win32.EXCEPTION_POSSIBLE_DEADLOCK : 'Possible deadlock', win32.EXCEPTION_WX86_BREAKPOINT : 'WOW64 breakpoint', win32.CONTROL_C_EXIT : 'Control-C exit', win32.DBG_CONTROL_C : 'Debug Control-C', win32.MS_VC_EXCEPTION : 'Microsoft Visual C++ exception', } @property def eventMethod(self): return self.__exceptionMethod.get( self.get_exception_code(), 'unknown_exception') def get_exception_name(self): code = self.get_exception_code() unk = HexDump.integer(code) return self.__exceptionName.get(code, unk) def get_exception_description(self): code = self.get_exception_code() description = self.__exceptionDescription.get(code, None) if description is None: try: description = 'Exception code %s (%s)' description = description % (HexDump.integer(code), ctypes.FormatError(code)) except OverflowError: description = 'Exception code %s' % HexDump.integer(code) return description def is_first_chance(self): return self.raw.u.Exception.dwFirstChance != 0 def is_last_chance(self): return not self.is_first_chance() def is_noncontinuable(self): return bool( self.raw.u.Exception.ExceptionRecord.ExceptionFlags & win32.EXCEPTION_NONCONTINUABLE ) def is_continuable(self): return not self.is_noncontinuable() def is_user_defined_exception(self): return self.get_exception_code() & 0x10000000 == 0 def is_system_defined_exception(self): return not self.is_user_defined_exception() def get_exception_code(self): return self.raw.u.Exception.ExceptionRecord.ExceptionCode def get_exception_address(self): address = self.raw.u.Exception.ExceptionRecord.ExceptionAddress if address is None: address = 0 return address def get_exception_information(self, index): if index < 0 or index > win32.EXCEPTION_MAXIMUM_PARAMETERS: raise IndexError("Array index out of range: %s" % repr(index)) info = self.raw.u.Exception.ExceptionRecord.ExceptionInformation value = info[index] if value is None: value = 0 return value def get_exception_information_as_list(self): info = self.raw.u.Exception.ExceptionRecord.ExceptionInformation data = list() for index in compat.xrange(0, win32.EXCEPTION_MAXIMUM_PARAMETERS): value = info[index] if value is None: value = 0 data.append(value) return data def get_fault_type(self): if self.get_exception_code() not in (win32.EXCEPTION_ACCESS_VIOLATION, win32.EXCEPTION_IN_PAGE_ERROR, win32.EXCEPTION_GUARD_PAGE): msg = "This method is not meaningful for %s." raise NotImplementedError(msg % self.get_exception_name()) return self.get_exception_information(0) def get_fault_address(self): if self.get_exception_code() not in (win32.EXCEPTION_ACCESS_VIOLATION, win32.EXCEPTION_IN_PAGE_ERROR, win32.EXCEPTION_GUARD_PAGE): msg = "This method is not meaningful for %s." raise NotImplementedError(msg % self.get_exception_name()) return self.get_exception_information(1) def get_ntstatus_code(self): if self.get_exception_code() != win32.EXCEPTION_IN_PAGE_ERROR: msg = "This method is only meaningful " "for in-page memory error exceptions." raise NotImplementedError(msg) return self.get_exception_information(2) def is_nested(self): return bool(self.raw.u.Exception.ExceptionRecord.ExceptionRecord) def get_raw_exception_record_list(self): nested = list() record = self.raw.u.Exception while True: record = record.ExceptionRecord if not record: break nested.append(record) return nested def get_nested_exceptions(self): nested = [ self ] raw = self.raw dwDebugEventCode = raw.dwDebugEventCode dwProcessId = raw.dwProcessId dwThreadId = raw.dwThreadId dwFirstChance = raw.u.Exception.dwFirstChance record = raw.u.Exception.ExceptionRecord while True: record = record.ExceptionRecord if not record: break raw = win32.DEBUG_EVENT() raw.dwDebugEventCode = dwDebugEventCode raw.dwProcessId = dwProcessId raw.dwThreadId = dwThreadId raw.u.Exception.ExceptionRecord = record raw.u.Exception.dwFirstChance = dwFirstChance event = EventFactory.get(self.debug, raw) nested.append(event) return nested class CreateThreadEvent (Event): eventMethod = 'create_thread' eventName = 'Thread creation event' eventDescription = 'A new thread has started.' def get_thread_handle(self): hThread = self.raw.u.CreateThread.hThread if hThread in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): hThread = None else: hThread = ThreadHandle(hThread, False, win32.THREAD_ALL_ACCESS) return hThread def get_teb(self): return self.raw.u.CreateThread.lpThreadLocalBase def get_start_address(self): return self.raw.u.CreateThread.lpStartAddress class CreateProcessEvent (Event): eventMethod = 'create_process' eventName = 'Process creation event' eventDescription = 'A new process has started.' def get_file_handle(self): try: hFile = self.__hFile except AttributeError: hFile = self.raw.u.CreateProcessInfo.hFile if hFile in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): hFile = None else: hFile = FileHandle(hFile, True) self.__hFile = hFile return hFile def get_process_handle(self): hProcess = self.raw.u.CreateProcessInfo.hProcess if hProcess in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): hProcess = None else: hProcess = ProcessHandle(hProcess, False, win32.PROCESS_ALL_ACCESS) return hProcess def get_thread_handle(self): hThread = self.raw.u.CreateProcessInfo.hThread if hThread in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): hThread = None else: hThread = ThreadHandle(hThread, False, win32.THREAD_ALL_ACCESS) return hThread def get_start_address(self): return self.raw.u.CreateProcessInfo.lpStartAddress def get_image_base(self): return self.raw.u.CreateProcessInfo.lpBaseOfImage def get_teb(self): return self.raw.u.CreateProcessInfo.lpThreadLocalBase def get_debug_info(self): raw = self.raw.u.CreateProcessInfo ptr = raw.lpBaseOfImage + raw.dwDebugInfoFileOffset size = raw.nDebugInfoSize data = self.get_process().peek(ptr, size) if len(data) == size: return data return None def get_filename(self): szFilename = None hFile = self.get_file_handle() if hFile: szFilename = hFile.get_filename() if not szFilename: aProcess = self.get_process() lpRemoteFilenamePtr = self.raw.u.CreateProcessInfo.lpImageName if lpRemoteFilenamePtr: lpFilename = aProcess.peek_uint(lpRemoteFilenamePtr) fUnicode = bool( self.raw.u.CreateProcessInfo.fUnicode ) szFilename = aProcess.peek_string(lpFilename, fUnicode) if not szFilename: szFilename = aProcess.get_image_name() return szFilename def get_module_base(self): return self.get_image_base() def get_module(self): return self.get_process().get_module( self.get_module_base() ) class ExitThreadEvent (Event): eventMethod = 'exit_thread' eventName = 'Thread termination event' eventDescription = 'A thread has finished executing.' def get_exit_code(self): return self.raw.u.ExitThread.dwExitCode class ExitProcessEvent (Event): eventMethod = 'exit_process' eventName = 'Process termination event' eventDescription = 'A process has finished executing.' def get_exit_code(self): return self.raw.u.ExitProcess.dwExitCode def get_filename(self): return self.get_module().get_filename() def get_image_base(self): return self.get_module_base() def get_module_base(self): return self.get_module().get_base()
Apache License 2.0
scossu/lakesuperior
lakesuperior/util/toolbox.py
RequestUtils.globalize_triple
python
def globalize_triple(self, trp): s, p, o = trp if s.startswith(nsc['fcres']): s = self.globalize_term(s) if o.startswith(nsc['fcres']): o = self.globalize_term(o) return s, p, o
Globalize terms in a triple. :param tuple(rdflib.URIRef) trp: The triple to be converted :rtype: tuple(rdflib.URIRef)
https://github.com/scossu/lakesuperior/blob/e01d1999f7838b635f2955d2abdd5359c800baee/lakesuperior/util/toolbox.py#L310-L324
import logging import os import re from collections import defaultdict from hashlib import sha1 from rdflib import Graph from rdflib.term import URIRef, Variable from lakesuperior.dictionaries.namespaces import ns_collection as nsc from lakesuperior.store.ldp_rs import ROOT_RSRC_URI logger = logging.getLogger(__name__) __doc__ = ''' Utility to translate and generate strings and other objects. ''' def fsize_fmt(num, suffix='b'): for unit in ['','K','M','G','T','P','E','Z']: if abs(num) < 1024.0: return f'{num:3.1f} {unit}{suffix}' num /= 1024.0 return f'{num:.1f} Y{suffix}' def get_tree_size(path, follow_symlinks=True): total = 0 for entry in os.scandir(path): if entry.is_dir(follow_symlinks=follow_symlinks): total += get_tree_size(entry.path) else: total += entry.stat( follow_symlinks=follow_symlinks ).st_size return total def replace_term_domain(term, search, replace): s = str(term) if s.startswith(search): s = s.replace(search, replace) return URIRef(s) def parse_rfc7240(h_str): parsed_hdr = defaultdict(dict) hdr_list = [ x.strip() for x in h_str.split(',') ] for hdr in hdr_list: parsed_pref = defaultdict(dict) token_list = [ token.strip() for token in hdr.split(';') ] prefer_token = token_list.pop(0).split('=') prefer_name = prefer_token[0] if len(prefer_token)>1: parsed_pref['value'] = prefer_token[1].strip('"') for param_token in token_list: param_parts = [ prm.strip().strip('"') for prm in param_token.split('=') ] param_value = param_parts[1] if len(param_parts) > 1 else None parsed_pref['parameters'][param_parts[0]] = param_value parsed_hdr[prefer_name] = parsed_pref return parsed_hdr def split_uuid(uuid): path = '{}/{}/{}/{}/{}'.format(uuid[:2], uuid[2:4], uuid[4:6], uuid[6:8], uuid) return path def rel_uri_to_urn(uri, uid): return nsc['fcres'][uid] if str(uri) == '' else uri def rel_uri_to_urn_string(string, uid): urn = str(nsc['fcres'][uid]) return ( re.sub('<#([^>]+)>', f'<{urn}#\\1>', string).replace('<>', f'<{urn}>') ) class RequestUtils: def __init__(self): from flask import g self.webroot = g.webroot def uid_to_uri(self, uid): return URIRef(self.webroot + uid) def uri_to_uid(self, uri): if uri.startswith(nsc['fcres']): return str(uri).replace(nsc['fcres'], '') else: return '/' + str(uri).replace(self.webroot, '').strip('/') def localize_uri_string(self, s): if s.strip('/') == self.webroot: return str(ROOT_RSRC_URI) else: return s.rstrip('/').replace( self.webroot, str(nsc['fcres'])) def localize_term(self, uri): return URIRef(self.localize_uri_string(str(uri))) def localize_triple(self, trp): s, p, o = trp if s.startswith(self.webroot): s = self.localize_term(s) if o.startswith(self.webroot): o = self.localize_term(o) return s, p, o def localize_graph(self, gr): l_id = self.localize_term(gr.identifier) l_gr = Graph(identifier=l_id) for trp in gr: l_gr.add(self.localize_triple(trp)) return l_gr def localize_payload(self, data): return data.replace( (self.webroot + '/').encode('utf-8'), (nsc['fcres'] + '/').encode('utf-8') ).replace( self.webroot.encode('utf-8'), (nsc['fcres'] + '/').encode('utf-8') ) def localize_ext_str(self, s, urn): esc_webroot = self.webroot.replace('/', '\\/') loc_ptn1 = r'<{}\/?(.*?)>'.format(esc_webroot) loc_sub1 = '<{}/\\1>'.format(nsc['fcres']) s1 = re.sub(loc_ptn1, loc_sub1, s) loc_ptn2 = r'<([#?].*?)?>' loc_sub2 = '<{}\\1>'.format(urn) s2 = re.sub(loc_ptn2, loc_sub2, s1) loc_ptn3 = r'<{}([#?].*?)?>'.format(nsc['fcres']) loc_sub3 = '<{}\\1>'.format(ROOT_RSRC_URI) s3 = re.sub(loc_ptn3, loc_sub3, s2) return s3 def globalize_string(self, s): return s.replace(str(nsc['fcres']), self.webroot) def globalize_term(self, urn): return URIRef(self.globalize_string(str(urn)))
Apache License 2.0
iscre4m/pycarddeck
examples/blackjack.py
BlackjackGame.blackjack
python
def blackjack(self): print("Setting up...") print("Shuffling...") self.deck.shuffle() print("All shuffled!") print("Dealing...") self.deal() print("\nLet's play!") for player in self.players: print("{}'s turn...".format(player.name)) self.play(player) else: print("That's the last turn. Determining the winner...") self.find_winner()
The main blackjack game sequence. Each player takes an entire turn before moving on. If each player gets a turn and no one has won, the player or players with the highest score below 21 are declared the winner.
https://github.com/iscre4m/pycarddeck/blob/2171ca53852aeb690019a27f2e774ffb5efaff5c/examples/blackjack.py#L31-L52
import sys import pyCardDeck from typing import List from pyCardDeck.cards import PokerCard class Player: def __init__(self, name: str): self.hand = [] self.name = name def __str__(self): return self.name class BlackjackGame: def __init__(self, players: List[Player]): self.deck = pyCardDeck.Deck() self.deck.load_standard_deck() self.players = players self.scores = {} print("Created a game with {} players.".format(len(self.players)))
MIT License
nervanasystems/ngraph-python
ngraph/frontends/caffe/cf_importer/importer.py
parse_prototxt
python
def parse_prototxt(model_txt=None, solver_txt=None, caffemodel=None, verbose=False): ops_bridge = OpsBridge() data_layers = [l for l in supported_layers if "Data" in l] name_op_map = {} if model_txt is None and solver_txt is None: raise ValueError("Either model prototxt or solver prototxt is needed") model_def = caffe_pb2.NetParameter() solver_def = caffe_pb2.SolverParameter() if solver_txt is not None: with open(solver_txt, 'r') as fid: text_format.Merge(fid.read(), solver_def) if not solver_def.HasField("net"): raise ValueError('model prototxt is not available in the solver prototxt') else: model_txt = solver_def.net with open(model_txt, 'r') as fid: text_format.Merge(fid.read(), model_def) netLayers = model_def.layer for layer in netLayers: if verbose: print("\nLayer: ", layer.name, " Type: ", layer.type) if layer.type not in supported_layers: raise ValueError('layer type', layer.type, ' is not supported') if len(layer.top) > 1 and layer.type not in data_layers: raise ValueError('only "Data" layers can have more than one output (top)') input_ops = [] for name in layer.bottom: if name in name_op_map: input_ops.append(name_op_map[name]) elif layer.type not in data_layers: raise ValueError("Bottom layer:", name, " is missing in the prototxt") out_op = ops_bridge(layer, input_ops) if out_op is None: print("!!! Unknown Operation '{}' of type '{}' !!!" .format(layer.name, layer.type)) if verbose: print("input Ops:", input_ops) print("output Op:", [out_op]) if layer.name in name_op_map: raise ValueError('Layer ', layer.name, ' already exists. Layer name should be unique') name_op_map[layer.name] = out_op if layer.top == layer.bottom: if layer.top in name_op_map: name_op_map[layer.top] = out_op return name_op_map
This function parses and creates a graph of ngraph ops corresponding to each layer in the prototxt Arguments: model_txt: prototxt file of the Neural net topology solver_txt: protoxt file of the solver to train the neural net caffemodel: parameters (weights/biases) to be loded into the model return : Dictionary of the ngraph ops whose keys are the layer names of the prototxt
https://github.com/nervanasystems/ngraph-python/blob/ac032c83c7152b615a9ad129d54d350f9d6a2986/ngraph/frontends/caffe/cf_importer/importer.py#L44-L117
from __future__ import print_function import ngraph.transformers as ngt from ngraph.frontends.caffe.cf_importer.ops_bridge import OpsBridge import ngraph.frontends.caffe.cf_importer.ops_binary as OpsBinary import ngraph.frontends.caffe.cf_importer.ops_constant as OpsConstant import argparse from google.protobuf import text_format try: import caffe_pb2 except: raise ImportError('Must be able to import Caffe modules to use this module') supported_layers = ["Eltwise", "DummyData"]
Apache License 2.0
felixchenfy/realtime-action-recognition
utils/lib_feature_proc.py
ProcFtr.remove_body_offset
python
def remove_body_offset(x): x = x.copy() px0, py0 = get_joint(x, NECK) x[0::2] = x[0::2] - px0 x[1::2] = x[1::2] - py0 return x
The origin is the neck. TODO: Deal with empty data.
https://github.com/felixchenfy/realtime-action-recognition/blob/96ed47bc04620f806a981d2962b4fe5ff10c86f6/utils/lib_feature_proc.py#L193-L201
import numpy as np import math from collections import deque if True: import sys import os ROOT = os.path.dirname(os.path.abspath(__file__))+"/../" CURR_PATH = os.path.dirname(os.path.abspath(__file__))+"/" sys.path.append(ROOT) from tools.an_example_skeleton_of_standing import get_a_normalized_standing_skeleton NOISE_INTENSITY = 0.05 PI = np.pi Inf = float("inf") NaN = 0 def retrain_only_body_joints(skeleton): return skeleton.copy()[2:2+13*2] TOTAL_JOINTS = 13 NECK = 0 L_ARMS = [1, 2, 3] R_ARMS = [4, 5, 6] L_KNEE = 8 L_ANKLE = 9 R_KNEE = 11 R_ANKLE = 12 L_LEGS = [8, 9] R_LEGS = [11, 12] ARMS_LEGS = L_ARMS + R_ARMS + L_LEGS + R_LEGS L_THIGH = 7 R_THIGH = 10 STAND_SKEL_NORMED = retrain_only_body_joints( get_a_normalized_standing_skeleton()) def extract_multi_frame_features( X, Y, video_indices, window_size, is_adding_noise=False, is_print=False): X_new = [] Y_new = [] N = len(video_indices) for i, _ in enumerate(video_indices): if i == 0 or video_indices[i] != video_indices[i-1]: fg = FeatureGenerator(window_size, is_adding_noise) success, features = fg.add_cur_skeleton(X[i, :]) if success: X_new.append(features) Y_new.append(Y[i]) if is_print and i % 1000 == 0: print(f"{i}/{N}", end=", ") if is_print: print("") X_new = np.array(X_new) Y_new = np.array(Y_new) return X_new, Y_new class Math(): @staticmethod def calc_dist(p1, p0): return math.sqrt((p1[0]-p0[0])**2+(p1[1]-p0[1])**2) @staticmethod def pi2pi(x): if x > PI: x -= 2*PI if x <= -PI: x += 2*PI return x @staticmethod def calc_relative_angle(x1, y1, x0, y0, base_angle): if (y1 == y0) and (x1 == x0): return 0 a1 = np.arctan2(y1-y0, x1-x0) return Math.pi2pi(a1 - base_angle) @staticmethod def calc_relative_angle_v2(p1, p0, base_angle): return Math.calc_relative_angle(p1[0], p1[1], p0[0], p0[1], base_angle) def get_joint(x, idx): px = x[2*idx] py = x[2*idx+1] return px, py def set_joint(x, idx, px, py): x[2*idx] = px x[2*idx+1] = py return def check_joint(x, idx): return x[2*idx] != NaN class ProcFtr(object): @staticmethod def drop_arms_and_legs_randomly(x, thresh=0.3): x = x.copy() N = len(ARMS_LEGS) rand_num = np.random.random() if rand_num < thresh: joint_idx = int((rand_num / thresh)*N) set_joint(x, joint_idx, NaN, NaN) return x @staticmethod def has_neck_and_thigh(x): return check_joint(x, NECK) and (check_joint(x, L_THIGH) or check_joint(x, R_THIGH)) @staticmethod def get_body_height(x): x0, y0 = get_joint(x, NECK) x11, y11 = get_joint(x, L_THIGH) x12, y12 = get_joint(x, R_THIGH) if y11 == NaN and y12 == NaN: return 1.0 if y11 == NaN: x1, y1 = x12, y12 elif y12 == NaN: x1, y1 = x11, y11 else: x1, y1 = (x11 + x12) / 2, (y11 + y12) / 2 height = ((x0-x1)**2 + (y0-y1)**2)**(0.5) return height @staticmethod
MIT License
ynop/audiomate
audiomate/corpus/base.py
CorpusView.contains_issuer
python
def contains_issuer(self, issuer): if issuer.idx not in self.issuers.keys(): return False if issuer != self.issuers[issuer.idx]: return False return True
Return ``True`` if the given issuer is in the corpus already, ``False`` otherwise.
https://github.com/ynop/audiomate/blob/080402eadaa81f77f64c8680510a2de64bc18e74/audiomate/corpus/base.py#L107-L122
import abc import collections import copy import math import numpy as np from audiomate.utils import stats class CorpusView(metaclass=abc.ABCMeta): @property @abc.abstractmethod def name(self): return 'undefined' @property @abc.abstractmethod def tracks(self): return {} @property def num_tracks(self): return len(self.tracks) def contains_track(self, track): if track.idx not in self.tracks.keys(): return False if track != self.tracks[track.idx]: return False return True @property @abc.abstractmethod def utterances(self): return {} @property def num_utterances(self): return len(self.utterances) @property @abc.abstractmethod def issuers(self): return {} @property def num_issuers(self): return len(self.issuers)
MIT License
neptune-ai/neptune-client
neptune/new/types/atoms/file.py
File.as_html
python
def as_html(chart) -> 'File': content = get_html_content(chart) return File.from_content(content if content is not None else "", extension="html")
Converts an object to an HTML File value object. This way you can upload `Altair`, `Bokeh`, `Plotly`, `Matplotlib` interactive charts or upload directly `Pandas` `DataFrame` objects to explore them in Neptune UI. Args: chart: An object to be converted. Supported are `Altair`, `Bokeh`, `Plotly`, `Matplotlib` interactive charts, and `Pandas` `DataFrame` objects. Returns: ``File``: value object with converted object. Examples: >>> import neptune.new as neptune >>> from neptune.new.types import File >>> run = neptune.init() Convert Pandas DataFrame to File value object and upload it >>> run["train/results"].upload(File.as_html(df_predictions)) Convert Altair interactive chart to File value object and upload it >>> altair_file = File.as_html(altair_chart) >>> run["dataset/data_sample/img1"].upload(altair_file) You can upload Altair interactive chart without explicit conversion >>> run["dataset/data_sample/img2"].upload(altair_chart) You may also want to check `as_html docs page`_. .. _as_html docs page: https://docs.neptune.ai/api-reference/field-types#as_html
https://github.com/neptune-ai/neptune-client/blob/e125f684a4526353c60a5f478c9b608d4fdb843c/neptune/new/types/atoms/file.py#L167-L205
import os from io import IOBase from typing import TypeVar, TYPE_CHECKING, Optional, Union from neptune.new.internal.utils.images import get_image_content, get_html_content, get_pickle_content, is_pil_image, is_matplotlib_figure, is_plotly_figure, is_altair_chart, is_bokeh_figure, is_numpy_array, is_pandas_dataframe from neptune.new.internal.utils import verify_type, get_stream_content from neptune.new.types.atoms.atom import Atom if TYPE_CHECKING: from neptune.new.types.value_visitor import ValueVisitor Ret = TypeVar('Ret') class File(Atom): def __init__(self, path: Optional[str] = None, content: Optional[bytes] = None, extension: Optional[str] = None): verify_type("path", path, (str, type(None))) verify_type("content", content, (bytes, type(None))) verify_type("extension", extension, (str, type(None))) if path is not None and content is not None: raise ValueError("path and content are mutually exclusive") if path is None and content is None: raise ValueError("path or content is required") self.path = path self.content = content if extension is None and path is not None: try: ext = os.path.splitext(path)[1] self.extension = ext[1:] if ext else "" except ValueError: self.extension = "" else: self.extension = extension or "" def accept(self, visitor: 'ValueVisitor[Ret]') -> Ret: return visitor.visit_file(self) def __str__(self): if self.path is not None: return "File(path={})".format(str(self.path)) else: return "File(content=...)" @staticmethod def from_content(content: Union[str, bytes], extension: Optional[str] = None) -> 'File': if isinstance(content, str): ext = "txt" content = content.encode("utf-8") else: ext = "bin" return File(content=content, extension=extension or ext) @staticmethod def from_stream(stream: IOBase, seek: Optional[int] = 0, extension: Optional[str] = None) -> 'File': verify_type("stream", stream, IOBase) content, stream_default_ext = get_stream_content(stream, seek) return File(content=content, extension=extension or stream_default_ext) @staticmethod def as_image(image) -> 'File': content_bytes = get_image_content(image) return File.from_content(content_bytes if content_bytes is not None else b"", extension="png") @staticmethod
Apache License 2.0
stanfordvl/robovat
robovat/envs/push/push_env.py
PushEnv._create_action_space
python
def _create_action_space(self): if self.num_goal_steps is None: action_shape = [4] else: action_shape = [self.num_goal_steps, 4] return gym.spaces.Box( low=-np.ones(action_shape, dtype=np.float32), high=np.ones(action_shape, dtype=np.float32), dtype=np.float32)
Create the action space. Returns: The action space.
https://github.com/stanfordvl/robovat/blob/1141f09342849d339a4418d6db7376bb420e1f7e/robovat/envs/push/push_env.py#L253-L267
from __future__ import absolute_import from __future__ import division from __future__ import print_function import os.path import glob import random import socket import shutil import cv2 import gym import numpy as np from matplotlib import pyplot as plt from robovat.envs import arm_env from robovat.envs import robot_env from robovat.envs.push import layouts from robovat.observations import attribute_obs from robovat.observations import camera_obs from robovat.observations import pose_obs from robovat.reward_fns import push_reward from robovat.math import Pose from robovat.utils import time_utils from robovat.utils.logging import logger class PushEnv(arm_env.ArmEnv): def __init__(self, simulator=None, config=None, debug=True): self._simulator = simulator self._config = config or self.default_config self._debug = debug self.camera = self._create_camera( height=self.config.KINECT2.DEPTH.HEIGHT, width=self.config.KINECT2.DEPTH.WIDTH, intrinsics=self.config.KINECT2.DEPTH.INTRINSICS, translation=self.config.KINECT2.DEPTH.TRANSLATION, rotation=self.config.KINECT2.DEPTH.ROTATION) self.task_name = self.config.TASK_NAME self.layout_id = self.config.LAYOUT_ID if self.task_name is None: self.layouts = None self.num_layouts = 1 elif self.task_name == 'data_collection': self.layouts = None self.num_layouts = 1 else: self.layouts = layouts.TASK_NAME_TO_LAYOUTS[self.task_name] self.num_layouts = len(self.layouts) self.num_goal_steps = self.config.NUM_GOAL_STEPS self.cspace = gym.spaces.Box( low=np.array(self.config.ACTION.CSPACE.LOW), high=np.array(self.config.ACTION.CSPACE.HIGH), dtype=np.float32) start_low = np.array(self.config.ACTION.CSPACE.LOW, dtype=np.float32) start_high = np.array(self.config.ACTION.CSPACE.HIGH, dtype=np.float32) self.start_offset = 0.5 * (start_high + start_low) self.start_range = 0.5 * (start_high - start_low) self.start_z = self.config.ARM.FINGER_TIP_OFFSET + self.start_offset[2] table_x = self.config.SIM.TABLE.POSE[0][0] table_y = self.config.SIM.TABLE.POSE[0][1] self.table_workspace = gym.spaces.Box( low=np.array([table_x - 0.5 * self.config.TABLE.X_RANGE, table_y - 0.5 * self.config.TABLE.Y_RANGE]), high=np.array([table_x + 0.5 * self.config.TABLE.X_RANGE, table_y + 0.5 * self.config.TABLE.Y_RANGE]), dtype=np.float32) self.min_movable_bodies = self.config.MIN_MOVABLE_BODIES self.max_movable_bodies = self.config.MAX_MOVABLE_BODIES self.num_movable_bodies = None self.movable_body_mask = None if self.is_simulation: movable_name = self.config.MOVABLE_NAME.upper() self.movable_config = self.config.MOVABLE[movable_name] self.movable_bodies = [] self.movable_paths = [] for pattern in self.movable_config.PATHS: if not os.path.isabs(pattern): pattern = os.path.join(self.simulator.assets_dir, pattern) self.movable_paths += glob.glob(pattern) assert len(self.movable_paths) > 0 self.target_movable_paths = [] for pattern in self.movable_config.TARGET_PATHS: if not os.path.isabs(pattern): pattern = os.path.join(self.simulator.assets_dir, pattern) self.target_movable_paths += glob.glob(pattern) assert len(self.target_movable_paths) > 0 else: self.movable_config = None self.movable_bodies = None self.movable_paths = None self.target_movable_paths = None self.phase_list = ['initial', 'pre', 'start', 'motion', 'post', 'offstage', 'done'] self.attributes = None self.start_status = None self.end_status = None self.max_phase_steps = None self.num_total_steps = 0 self.num_unsafe = 0 self.num_ineffective = 0 self.num_useful = 0 self.num_successes = 0 self.num_successes_by_step = [0] * int(self.config.MAX_STEPS + 1) self.use_recording = self.config.RECORDING.USE if self.use_recording: self.recording_camera = None self.recording_output_dir = None self.video_writer = None if self.debug: fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(111) plt.ion() plt.show() self.ax = ax super(PushEnv, self).__init__( simulator=self.simulator, config=self.config, debug=self.debug) def _create_observations(self): observations = [ attribute_obs.IntegerAttributeObs( 'num_episodes', max_value=int(2**16 - 1), name='num_episodes'), attribute_obs.IntegerAttributeObs( 'num_steps', max_value=int(2**16 - 1), name='num_steps'), attribute_obs.IntegerAttributeObs( 'layout_id', max_value=self.num_layouts, name='layout_id'), attribute_obs.ArrayAttributeObs( 'movable_body_mask', shape=[self.max_movable_bodies], name='body_mask'), ] if self.is_simulation: observations += [ camera_obs.SegmentedPointCloudObs( self.camera, num_points=self.config.OBS.NUM_POINTS, num_bodies=self.max_movable_bodies, name='point_cloud'), ] else: observations += [ camera_obs.SegmentedPointCloudObs( self.camera, num_points=self.config.OBS.NUM_POINTS, num_bodies=self.max_movable_bodies, crop_min=self.config.OBS.CROP_MIN, crop_max=self.config.OBS.CROP_MAX, confirm_target=True, name='point_cloud'), ] if self.is_simulation and self.config.USE_PRESTIGE_OBS: observations += [ pose_obs.PoseObs( num_bodies=self.max_movable_bodies, modality='position', name='position'), attribute_obs.FlagObs('is_safe', name='is_safe'), attribute_obs.FlagObs('is_effective', name='is_effective'), ] if self.config.USE_VISUALIZATION_OBS: observations += [ camera_obs.CameraObs( self.camera, modality='rgb', name='rgb'), camera_obs.CameraObs( self.camera, modality='depth', name='depth'), ] return observations def _create_reward_fns(self): return [ push_reward.PushReward( name='reward', task_name=self.task_name, layout_id=self.layout_id, is_planning=False ) ]
MIT License
pyansys/pymapdl
ansys/mapdl/core/_commands/post26_/setup.py
Setup.gssol
python
def gssol(self, nvar="", item="", comp="", name="", **kwargs): command = f"GSSOL,{nvar},{item},{comp},{name}" return self.run(command, **kwargs)
Specifies which results to store from the results file when using APDL Command: GSSOL generalized plane strain. Parameters ---------- nvar Arbitrary reference number or name assigned to this variable. Variable numbers can be 2 to NV (NUMVAR) while the name can be an eight byte character string. Overwrites any existing results for this variable. item Label identifying item to be stored. LENGTH - Change of fiber length at the ending point. ROT - Rotation of the ending plane during deformation. F - Reaction force at the ending point in the fiber direction. M - Reaction moment applied on the ending plane. comp Component of the item, if Item = ROT or M. X - The rotation angle or reaction moment of the ending plane about X. Y - The rotation angle or reaction moment of the ending plane about Y. name Thirty-two character name identifying the item on the printout and display. Defaults to the label formed by concatenating the first four characters of the Item and Comp labels. Notes ----- This command stores the results (new position of the ending plane after deformation) for generalized plane strain. All outputs are in the global Cartesian coordinate system. For more information about the generalized plane strain feature, see Generalized Plane Strain Option of Current-Technology Solid Elements in the Element Reference.
https://github.com/pyansys/pymapdl/blob/e5cc21471c3a8fcef1f7b88359e38aa89cd63f73/ansys/mapdl/core/_commands/post26_/setup.py#L470-L516
from typing import Optional from ansys.mapdl.core.mapdl_types import MapdlInt class Setup: def ansol( self, nvar="", node="", item="", comp="", name="", mat="", real="", ename="", **kwargs, ): command = f"ANSOL,{nvar},{node},{item},{comp},{name},{mat},{real},{ename}" return self.run(command, **kwargs) def cisol(self, n="", id_="", node="", cont="", dtype="", **kwargs): command = f"CISOL,{n},{id_},{node},{cont},{dtype}" return self.run(command, **kwargs) def data(self, ir="", lstrt="", lstop="", linc="", name="", kcplx="", **kwargs): command = f"DATA,{ir},{lstrt},{lstop},{linc},{name},{kcplx}" return self.run(command, **kwargs) def edread(self, nstart="", label="", num="", step1="", step2="", **kwargs): command = f"EDREAD,{nstart},{label},{num},{step1},{step2}" return self.run(command, **kwargs) def enersol(self, nvar="", item="", name="", **kwargs): command = f"ENERSOL,{nvar},{item},{name}" return self.run(command, **kwargs) def esol( self, nvar: MapdlInt = "", elem: MapdlInt = "", node: MapdlInt = "", item: str = "", comp: str = "", name: str = "", **kwargs, ) -> Optional[str]: command = f"ESOL,{nvar},{elem},{node},{item},{comp},{name}" return self.run(command, **kwargs) def file(self, fname="", ext="", **kwargs): command = f"FILE,{fname},{ext}" return self.run(command, **kwargs) def gapf(self, nvar="", num="", name="", **kwargs): command = f"GAPF,{nvar},{num},{name}" return self.run(command, **kwargs)
MIT License
sassoftware/python-sasctl
tests/unit/test_pageiterator.py
paging
python
def paging(request): import math import re num_items, start, limit = request.param with mock.patch('sasctl.core.request') as req: items = [{'name': str(i)} for i in range(num_items)] obj = RestObj( items=items[:start], count=len(items), links=[ {'rel': 'next', 'href': '/moaritems?start=%d&limit=%d' % (start, limit)} ], ) def side_effect(_, link, **kwargs): assert 'limit=%d' % limit in link start = int(re.search(r'(?<=start=)[\d]+', link).group()) return RestObj(items=items[start : start + limit]) req.side_effect = side_effect yield obj, items[:], req call_count = (num_items - start) / float(limit) assert req.call_count >= math.ceil(call_count)
Create a RestObj designed to page through a collection of items and the collection itself. Returns ------- RestObj : initial RestObj that can be used to initialize a paging iterator List[dict] : List of items being used as the "server-side" source MagicMock : Mock of sasctl.request for performing additional validation
https://github.com/sassoftware/python-sasctl/blob/ab6387b86a26f6b0b08fbb36d0c94fe18be59b5f/tests/unit/test_pageiterator.py#L15-L53
from unittest import mock import pytest from sasctl.core import PageIterator, RestObj @pytest.fixture(params=[(6, 2, 2), (6, 1, 4), (6, 5, 4), (6, 6, 2), (100, 10, 20)])
Apache License 2.0
disqus/django-db-utils
dbutils/helpers.py
distinct
python
def distinct(l): return list(set(l))
Given an iterable will return a list of all distinct values.
https://github.com/disqus/django-db-utils/blob/b4b6872804d297cf3485ec8ca7cf97068e7c344e/dbutils/helpers.py#L19-L23
from collections import defaultdict def queryset_to_dict(qs, key='pk', singular=True): if singular: result = {} for u in qs: result.setdefault(getattr(u, key), u) else: result = defaultdict(list) for u in qs: result[getattr(u, key)].append(u) return result
Apache License 2.0
openstack-archive/deb-python-proliantutils
proliantutils/rest/v1.py
RestConnectorBase._rest_post
python
def _rest_post(self, suburi, request_headers, request_body): return self._rest_op('POST', suburi, request_headers, request_body)
REST POST operation. The response body after the operation could be the new resource, or ExtendedError, or it could be empty.
https://github.com/openstack-archive/deb-python-proliantutils/blob/b9229a0ab3e7c7af0b9e59968a5c6c7fea53bd88/proliantutils/rest/v1.py#L247-L253
__author__ = 'HPE' import base64 import gzip import json import requests from requests.packages import urllib3 from requests.packages.urllib3 import exceptions as urllib3_exceptions import retrying import six from six.moves.urllib import parse as urlparse from proliantutils import exception from proliantutils import log REDIRECTION_ATTEMPTS = 5 LOG = log.get_logger(__name__) class RestConnectorBase(object): def __init__(self, host, login, password, bios_password=None, cacert=None): self.host = host self.login = login self.password = password self.bios_password = bios_password self.message_registries = {} self.cacert = cacert if self.cacert is None: urllib3.disable_warnings(urllib3_exceptions.InsecureRequestWarning) def _(self, msg): return "[iLO %s] %s" % (self.host, msg) def _get_response_body_from_gzipped_content(self, url, response): try: gzipper = gzip.GzipFile(fileobj=six.BytesIO(response.text)) LOG.debug(self._("Received compressed response for " "url %(url)s."), {'url': url}) uncompressed_string = (gzipper.read().decode('UTF-8')) response_body = json.loads(uncompressed_string) except Exception as e: LOG.debug( self._("Error occurred while decompressing body. " "Got invalid response '%(response)s' for " "url %(url)s: %(error)s"), {'url': url, 'response': response.text, 'error': e}) raise exception.IloError(e) return response_body def _rest_op(self, operation, suburi, request_headers, request_body): url = urlparse.urlparse('https://' + self.host + suburi) start_url = url.geturl() LOG.debug(self._("%(operation)s %(url)s"), {'operation': operation, 'url': start_url}) if request_headers is None or not isinstance(request_headers, dict): request_headers = {} if self.login is not None and self.password is not None: auth_data = self.login + ":" + self.password hr = "BASIC " + base64.b64encode( auth_data.encode('ascii')).decode("utf-8") request_headers['Authorization'] = hr if request_body is not None: if (isinstance(request_body, dict) or isinstance(request_body, list)): request_headers['Content-Type'] = 'application/json' else: request_headers['Content-Type'] = ('application/' 'x-www-form-urlencoded') """Helper methods to retry and keep retrying on redirection - START""" def retry_if_response_asks_for_redirection(response): if response.status_code == 301 and 'location' in response.headers: retry_if_response_asks_for_redirection.url = ( urlparse.urlparse(response.headers['location'])) LOG.debug(self._("Request redirected to %s."), retry_if_response_asks_for_redirection.url.geturl()) return True return False @retrying.retry( retry_on_result=retry_if_response_asks_for_redirection, retry_on_exception=( lambda e: not isinstance(e, exception.IloConnectionError)), stop_max_attempt_number=REDIRECTION_ATTEMPTS) def _fetch_response(): url = retry_if_response_asks_for_redirection.url kwargs = {'headers': request_headers, 'data': json.dumps(request_body)} if self.cacert is not None: kwargs['verify'] = self.cacert else: kwargs['verify'] = False LOG.debug(self._('\n\tHTTP REQUEST: %(restreq_method)s' '\n\tPATH: %(restreq_path)s' '\n\tBODY: %(restreq_body)s' '\n'), {'restreq_method': operation, 'restreq_path': url.geturl(), 'restreq_body': request_body}) request_method = getattr(requests, operation.lower()) try: response = request_method(url.geturl(), **kwargs) except Exception as e: LOG.debug(self._("Unable to connect to iLO. %s"), e) raise exception.IloConnectionError(e) return response """Helper methods to retry and keep retrying on redirection - END""" try: retry_if_response_asks_for_redirection.url = url response = _fetch_response() except retrying.RetryError as e: msg = (self._("URL Redirected %(times)s times continuously. " "URL used: %(start_url)s More info: %(error)s") % {'start_url': start_url, 'times': REDIRECTION_ATTEMPTS, 'error': str(e)}) LOG.debug(msg) raise exception.IloConnectionError(msg) response_body = {} if response.text: try: response_body = json.loads(response.text) except (TypeError, ValueError): response_body = ( self._get_response_body_from_gzipped_content(url, response)) LOG.debug(self._('\n\tHTTP RESPONSE for %(restreq_path)s:' '\n\tCode: %(status_code)s' '\n\tResponse Body: %(response_body)s' '\n'), {'restreq_path': url.geturl(), 'status_code': response.status_code, 'response_body': response_body}) return response.status_code, response.headers, response_body def _rest_get(self, suburi, request_headers=None): return self._rest_op('GET', suburi, request_headers, None) def _rest_patch(self, suburi, request_headers, request_body): return self._rest_op('PATCH', suburi, request_headers, request_body) def _rest_put(self, suburi, request_headers, request_body): return self._rest_op('PUT', suburi, request_headers, request_body)
Apache License 2.0
openstack/zun
zun/db/api.py
get_volume_by_id
python
def get_volume_by_id(context, vol_id): return _get_dbdriver_instance().get_volume_by_id( context, vol_id)
Return a volume :param context: The security context :param vol_id: The id of a volume. :returns: A volume.
https://github.com/openstack/zun/blob/7ed094696b75d2971d1a6d467bb95e2a641ad9ae/zun/db/api.py#L233-L241
from oslo_db import api as db_api from zun.common import profiler import zun.conf CONF = zun.conf.CONF _BACKEND_MAPPING = {'sqlalchemy': 'zun.db.sqlalchemy.api'} IMPL = db_api.DBAPI.from_config(CONF, backend_mapping=_BACKEND_MAPPING, lazy=True) @profiler.trace("db") def _get_dbdriver_instance(): return IMPL @profiler.trace("db") def list_containers(context, container_type, filters=None, limit=None, marker=None, sort_key=None, sort_dir=None): return _get_dbdriver_instance().list_containers( context, container_type, filters, limit, marker, sort_key, sort_dir) @profiler.trace("db") def create_container(context, values): return _get_dbdriver_instance().create_container(context, values) @profiler.trace("db") def get_container_by_uuid(context, container_type, container_uuid): return _get_dbdriver_instance().get_container_by_uuid( context, container_type, container_uuid) @profiler.trace("db") def get_container_by_name(context, container_type, container_name): return _get_dbdriver_instance().get_container_by_name( context, container_type, container_name) @profiler.trace("db") def destroy_container(context, container_type, container_id): return _get_dbdriver_instance().destroy_container( context, container_type, container_id) @profiler.trace("db") def update_container(context, container_type, container_id, values): return _get_dbdriver_instance().update_container( context, container_type, container_id, values) @profiler.trace("db") def list_volume_mappings(context, filters=None, limit=None, marker=None, sort_key=None, sort_dir=None): return _get_dbdriver_instance().list_volume_mappings( context, filters, limit, marker, sort_key, sort_dir) @profiler.trace("db") def count_volume_mappings(context, **filters): return _get_dbdriver_instance().count_volume_mappings(context, **filters) @profiler.trace("db") def create_volume_mapping(context, values): return _get_dbdriver_instance().create_volume_mapping(context, values) @profiler.trace("db") def get_volume_mapping_by_uuid(context, vm_uuid): return _get_dbdriver_instance().get_volume_mapping_by_uuid( context, vm_uuid) @profiler.trace("db") def destroy_volume_mapping(context, vm_id): return _get_dbdriver_instance().destroy_volume_mapping(context, vm_id) @profiler.trace("db") def update_volume_mapping(context, vm_id, values): return _get_dbdriver_instance().update_volume_mapping( context, vm_id, values) @profiler.trace("db") def create_volume(context, values): return _get_dbdriver_instance().create_volume(context, values) @profiler.trace("db")
Apache License 2.0
jacobkrantz/simple-hohmm
SimpleHOHMM/builder.py
HiddenMarkovModelBuilder.build_unsupervised
python
def build_unsupervised(self, single_states=None, all_obs=None, distribution="random", highest_order=1): if(distribution not in ('random', 'uniform')): raise ValueError("parameter 'distribution must be either 'random' or 'uniform'") if(single_states is None): single_states = self._single_states if(all_obs is None): all_obs = self._all_obs single_states = list(set(single_states)) all_obs = list(set(all_obs)) all_states = self._make_permutations(single_states, highest_order) num_states = len(all_states) if(distribution == 'uniform'): trans_probs = init_matrix_uniform(num_states, num_states) emission_probs = init_matrix_uniform(num_states, len(all_obs)) start_probs = self._init_uniform_start_probs( single_states, highest_order ) else: trans_probs = init_matrix_random(num_states, num_states) emission_probs = init_matrix_random(num_states, len(all_obs)) start_probs = self._init_random_start_probs( single_states, highest_order ) return HMM( trans_probs, emission_probs, start_probs, all_obs, all_states, single_states=single_states, order=highest_order )
Builds a Hidden Markov Model based on a uniform probability distribution. Args: single_states (list<>): list of unique elements detailing all possible hidden states the model should account for. If default, uses the values set previously through 'set_single_states'. all_obs (list<>): list of unique elements detailing all possible observation elements the model should account for. If default, uses the values set previously through 'set_all_obs'. distribution (string): either 'random' for a random probability distribution, or 'uniform' for a uniform probability distribution. defaults to 'random'. highest_order (int): History window of hidden states. Defaults to 1. Returns: HiddenMarkovModel: capable of evaluating, decoding, and learning.
https://github.com/jacobkrantz/simple-hohmm/blob/73d0da85e2e06c7ec7683b2e28079fbf6991580e/SimpleHOHMM/builder.py#L118-L171
from copy import deepcopy from itertools import product import random as ran from .model import HiddenMarkovModel as HMM from .utility import init_matrix, init_matrix_uniform, init_matrix_random class HiddenMarkovModelBuilder: def __init__(self): self._obs_sequences = list() self._state_sequences = list() self._single_states = None self._all_obs = None def add_training_example(self, o, s): self._obs_sequences.append(o) self._state_sequences.append(s) def add_batch_training_examples(self, o_lst, s_lst): self._obs_sequences += o_lst self._state_sequences += s_lst def set_single_states(self, single_states): self._single_states = list(single_states) def set_all_obs(self, all_obs): self._all_obs = list(all_obs) def build(self, highest_order=1, k_smoothing=0.0, synthesize_states=False, include_pi=True): if(highest_order < 1): raise ValueError("highest order must be 1 or greater.") if(self._all_obs is None): all_obs = self._get_unique_elements(self._obs_sequences) else: all_obs = self._all_obs if(self._single_states is None): single_states = self._get_higher_order_states(self._state_sequences, 1) if(synthesize_states): all_states = self._make_permutations(single_states, highest_order) else: all_states = self._get_higher_order_states(self._state_sequences, highest_order) else: synthesize_states = True single_states = self._single_states all_states = self._make_permutations(single_states, highest_order) start_probs = list() for i in range(highest_order): start_probs.append(self._calculate_start_probs( state_sequences = self._state_sequences, single_states = single_states, order = i+1, k_smoothing = k_smoothing, synthesize_states = synthesize_states, set_to_1 = not include_pi )) trans_probs = self._calculate_transition_probs(all_states, highest_order, k_smoothing) emission_probs = self._calculate_emission_probs(single_states, all_obs, k_smoothing) return HMM( trans_probs, emission_probs, start_probs, all_obs, all_states, single_states=single_states, order=highest_order )
MIT License
oddlama/autokernel
autokernel/node_detector.py
ModuleNode.__init__
python
def __init__(self, line): self.nodes = [Subsystem.module.create_node({'name': line})]
Initialize module node
https://github.com/oddlama/autokernel/blob/cd165cbc9467385c356d4a3d31b524a40d574edd/autokernel/node_detector.py#L310-L314
from . import log from .subsystem import Subsystem import re import glob import subprocess class NodeParserException(Exception): pass class Node: node_type = None nodes = [] @classmethod def detect_nodes(cls): raise NodeParserException("missing implementation for detect_nodes() on derived class '{}'".format(cls.__name__)) @classmethod def log_nodes(cls, nodes): log.info(" {:3d} {} nodes".format(len(nodes), cls.node_type)) for n in nodes: log.verbose(" - {}".format(n)) def __str__(self): return '[' + ', '.join([str(i) for i in self.nodes]) + ']' class LineParserNode(Node): @classmethod def get_lines(cls): raise ValueError("Missing get_lines() method implementation on derived class {}".format(cls.__name__)) @classmethod def detect_nodes(cls): nodes = [] for line in cls.get_lines(): try: nodes.append(cls(line)) except NodeParserException as e: log.verbose(str(e)) cls.log_nodes(nodes) return nodes class SysfsNode(LineParserNode): sysfs_path = None @classmethod def get_sysfs_files(cls): if hasattr(cls, 'sysfs_path'): return glob.glob(cls.sysfs_path) raise ValueError("Missing sysfs_path or get_sysfs_files() implementation on derived class {}".format(cls.__name__)) @classmethod def get_lines(cls): lines = set() for file_name in cls.get_sysfs_files(): with open(file_name, 'r', encoding='utf-8') as file: for line in file: line = line.strip() if line: lines.add(line) return lines def create_modalias_token_parser(subsystem, subsystem_regex_str, options): class Parser: @staticmethod def parse(modalias): m = Parser._get_regex().match(modalias) if not m: raise NodeParserException("Could not parse sysfs line") data = {} for option in options: val = m.group(option[1]) if not val: raise NodeParserException("Could not match modalias for parser '{}'".format(subsystem_regex_str)) data[option[1]] = val return [subsystem.create_node(data)] @staticmethod def _get_regex(): if not hasattr(Parser, 'regex'): regex = '{}:'.format(subsystem_regex_str) for option in options: alias = option[0] optname = option[1] part_regex = "[0-9A-Z*]*" if len(option) <= 2 else option[2] regex += '{}(?P<{}>{})'.format(alias, optname, part_regex) Parser.regex = re.compile(regex) return Parser.regex return Parser() def create_modalias_split_parser(subsystem, subsystem_str, delim, attr_name='value'): class Parser: @staticmethod def parse(modalias): values = filter(None, modalias[len(subsystem_str) + 1:].split(delim)) return [subsystem.create_node({attr_name: v}) for v in values] return Parser() class ModaliasNode(SysfsNode): node_type = 'modalias' modalias_parsers = { 'acpi': create_modalias_split_parser(Subsystem.acpi, 'acpi', ':', attr_name='id'), 'hdaudio': create_modalias_token_parser(Subsystem.hda, 'hdaudio', [ ('v', 'vendor' ), ('r', 'revision' ), ('a', 'api_version'), ]), 'hid': create_modalias_token_parser(Subsystem.hid, 'hid', [ ('b', 'bus' ), ('v', 'vendor' ), ('p', 'product' ), ('d', 'driver_data'), ]), 'input': create_modalias_token_parser(Subsystem.input, 'input', [ ('b', 'bustype'), ('v', 'vendor' ), ('p', 'product'), ('e', 'version'), ('-e', 'list', '.*'), ]), 'pci': create_modalias_token_parser(Subsystem.pci, 'pci', [ ('v' , 'vendor' ), ('d' , 'device' ), ('sv', 'subvendor' ), ('sd', 'subdevice' ), ('bc', 'bus_class' ), ('sc', 'bus_subclass'), ('i' , 'interface' ), ]), 'pcmcia': create_modalias_token_parser(Subsystem.pcmcia, 'pcmcia', [ ('m' , 'manf_id' ), ('c' , 'card_id' ), ('f' , 'func_id' ), ('fn' , 'function' ), ('pfn', 'device_no'), ('pa' , 'prod_id_1'), ('pb' , 'prod_id_2'), ('pc' , 'prod_id_3'), ('pd' , 'prod_id_4'), ]), 'platform': create_modalias_split_parser(Subsystem.platform, 'platform', ':', attr_name='name'), 'sdio': create_modalias_token_parser(Subsystem.sdio, 'sdio', [ ('c', 'class' ), ('v', 'vendor'), ('d', 'device'), ]), 'serio': create_modalias_token_parser(Subsystem.serio, 'serio', [ ('ty' , 'type' ), ('pr' , 'proto'), ('id' , 'id' ), ('ex' , 'extra'), ]), 'usb': create_modalias_token_parser(Subsystem.usb, 'usb', [ ('v' , 'device_vendor' ), ('p' , 'device_product' ), ('d' , 'bcddevice' ), ('dc' , 'device_class' ), ('dsc', 'device_subclass' ), ('dp' , 'device_protocol' ), ('ic' , 'interface_class' ), ('isc', 'interface_subclass'), ('ip' , 'interface_protocol'), ]), 'virtio': create_modalias_token_parser(Subsystem.virtio, 'virtio', [ ('v', 'vendor'), ('d', 'device'), ]), } def __init__(self, modalias): self.modalias_subsystem = modalias[:modalias.index(':')] if self.modalias_subsystem not in self.modalias_parsers: raise NodeParserException("No parser for modalias subsystem '{}'".format(self.modalias_subsystem)) self.nodes = self.modalias_parsers[self.modalias_subsystem].parse(modalias) @classmethod def get_sysfs_files(cls): return filter(None, [i.decode() for i in subprocess.run(['find', '/sys', '-type', 'f', '-name', 'modalias', '-print0'], check=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.split(b'\0')]) class PnpNode(SysfsNode): node_type = 'pnp' sysfs_path = '/sys/bus/pnp/devices/*/id' def __init__(self, sysfs_line): self.nodes = [Subsystem.pnp.create_node({'id': sysfs_line})] class I2cNode(SysfsNode): node_type = 'i2c' sysfs_path = '/sys/bus/i2c/devices/*/name' def __init__(self, sysfs_line): self.nodes = [Subsystem.i2c.create_node({'name': sysfs_line})] class FsTypeNode(LineParserNode): node_type = 'filesystem' def __init__(self, line): self.nodes = [Subsystem.fs.create_node({'fstype': line})] @classmethod def get_lines(cls): fstypes = subprocess.run(['findmnt', '-A', '-n', '-o', 'FSTYPE'], check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines() return set(fstypes) class ModuleNode(LineParserNode): node_type = 'module'
MIT License
lucacappelletti94/dictances
dictances/nth_variation.py
nth_variation
python
def nth_variation(a: Dict, b: Dict, exp: float = 2, overlap: bool = False) -> float: total = 0 n = 0 bget = b.__getitem__ aget = a.__getitem__ if exp % 2 == 0: nth = _even_nth else: nth = _odd_nth for k, a_val in a.items(): try: total += nth(a_val, bget(k))**exp except KeyError: total += a_val**exp n += 1 for k, b_val in b.items(): try: aget(k) except KeyError: total += b_val**exp n += 1 result = total if overlap: return result, n return result
Return the nth power distance beetween the given dictionaries. Parameters ---------------------------- a: Dict, First dictionary to consider. b: Dict, Second dictionary to consider. exp: float, The exponent for the nth power distance. overlap: bool, Whetever to return or not the overlap number. Returns ---------------------------- Return the nth power distance distance beetween the given dictionaries.
https://github.com/lucacappelletti94/dictances/blob/9825f9bebadd9f98d4e8ff4623391554740a9cac/dictances/nth_variation.py#L13-L56
from typing import Dict def _even_nth(a_val: float, b_val: float) -> float: return a_val - b_val def _odd_nth(a_val: float, b_val: float) -> float: return abs(a_val - b_val)
MIT License
aolarchive/hydro
src/hydro/connectors/base_classes.py
DBBaseConnector._convert_results_to_dataframe
python
def _convert_results_to_dataframe(self, cursor): rows = cursor.fetchall() columns = [col[0] for col in cursor.description] if isinstance(rows, tuple): rows = list(rows) data = pd.DataFrame.from_records(rows, columns=columns) return data
This is deprecated - use SQLAlchemy and pandas' read_sql method instead
https://github.com/aolarchive/hydro/blob/8580aebc30694156c436e5ba7470d3fcbb46896b/src/hydro/connectors/base_classes.py#L68-L77
__author__ = 'moshebasanchig' import pandas as pd from hydro.exceptions import HydroException DSN = 'dsn' CONNECTION_STRING = 'connection string' class ConnectorBase(object): _conn = None def __init__(self): self.logger = None def _verify_connection_definitions(self): raise HydroException("Not implemented") def _connect(self): raise HydroException("Not implemented") def _close(self): raise HydroException('Not implemented') def execute(self): raise HydroException('Not implemented') def close(self): self.logger.debug('Closing connection') self._close() self._conn = None return True def connect(self): if not self._conn: self.logger.debug('Connection does not exist, Verify definitions of connection') self._verify_connection_definitions() self._connect() return True def execute(self, command): self.connect() try: self.logger.debug('Executing command: {0}'.format(command)) res = self._execute(command) return res except Exception, err: self.logger.error('Error: {0}'.format(err.message)) self.close() raise err def set_logger(self, logger): self.logger = logger class DBBaseConnector(ConnectorBase): def __init__(self, conn_definitions): self._conn = None self._conf_defs = conn_definitions super(DBBaseConnector, self).__init__()
MIT License
fpaupier/tensorflow-serving_sidecar
object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py
SSDMobileNetV2KerasFeatureExtractor.__init__
python
def __init__(self, is_training, depth_multiplier, min_depth, pad_to_multiple, conv_hyperparams, freeze_batchnorm, inplace_batchnorm_update, use_explicit_padding=False, use_depthwise=False, override_base_feature_extractor_hyperparams=False, name=None): super(SSDMobileNetV2KerasFeatureExtractor, self).__init__( is_training=is_training, depth_multiplier=depth_multiplier, min_depth=min_depth, pad_to_multiple=pad_to_multiple, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, inplace_batchnorm_update=inplace_batchnorm_update, use_explicit_padding=use_explicit_padding, use_depthwise=use_depthwise, override_base_feature_extractor_hyperparams= override_base_feature_extractor_hyperparams, name=name) self._feature_map_layout = { 'from_layer': ['layer_15/expansion_output', 'layer_19', '', '', '', ''], 'layer_depth': [-1, -1, 512, 256, 256, 128], 'use_depthwise': self._use_depthwise, 'use_explicit_padding': self._use_explicit_padding, } self.mobilenet_v2 = None self.feature_map_generator = None
MobileNetV2 Feature Extractor for SSD Models. Mobilenet v2 (experimental), designed by sandler@. More details can be found in //knowledge/cerebra/brain/compression/mobilenet/mobilenet_experimental.py Args: is_training: whether the network is in training mode. depth_multiplier: float depth multiplier for feature extractor (Functions as a width multiplier for the mobilenet_v2 network itself). min_depth: minimum feature extractor depth. pad_to_multiple: the nearest multiple to zero pad the input height and width dimensions to. conv_hyperparams: `hyperparams_builder.KerasLayerHyperparams` object containing convolution hyperparameters for the layers added on top of the base feature extractor. freeze_batchnorm: Whether to freeze batch norm parameters during training or not. When training with a small batch size (e.g. 1), it is desirable to freeze batch norm update and use pretrained batch norm params. inplace_batchnorm_update: Whether to update batch norm moving average values inplace. When this is false train op must add a control dependency on tf.graphkeys.UPDATE_OPS collection in order to update batch norm statistics. use_explicit_padding: Whether to use explicit padding when extracting features. Default is False. use_depthwise: Whether to use depthwise convolutions. Default is False. override_base_feature_extractor_hyperparams: Whether to override hyperparameters of the base feature extractor with the one from `conv_hyperparams_fn`. name: A string name scope to assign to the model. If 'None', Keras will auto-generate one from the class name.
https://github.com/fpaupier/tensorflow-serving_sidecar/blob/40626a333285b269bc5f51ae335faba308bf8839/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py#L31-L96
import tensorflow as tf from object_detection.meta_architectures import ssd_meta_arch from object_detection.models import feature_map_generators from object_detection.models.keras_applications import mobilenet_v2 from object_detection.utils import ops from object_detection.utils import shape_utils class SSDMobileNetV2KerasFeatureExtractor( ssd_meta_arch.SSDKerasFeatureExtractor):
MIT License
coldfusion39/domi-owned
domi_owned/utilities.py
Utilities.parse_credentials
python
def parse_credentials(self, value): return '' if value is None else value
Handle credentials if value is None.
https://github.com/coldfusion39/domi-owned/blob/583d0a5ade9305c40329916e0ecf1540a089c9be/domi_owned/utilities.py#L65-L69
import logging.handlers import re import sys import tqdm class Utilities(object): HEADERS = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'close' } URL_REGEX = re.compile(r'(https?:\/\/[\d\w.:-]+)', re.I) FORM_REGEX = re.compile(r'method[\'\"= ]{1,4}post[\'\"]?', re.I) OPEN_REGEX = re.compile(r'name[\'\"= ]{1,4}notesview[\'\"]?', re.I) ACCOUNT_REGEX = re.compile(r'/([a-f0-9]{32}/[a-f0-9]{32})', re.I) USER_FIELD_REGEX = re.compile(r'user.+', re.I) REDIRECT_FIELD_REGEX = re.compile(r'redirect.+', re.I) NAMES_REGEX = re.compile(r'name[\'\"= ]{1,4}notesview[\'\"]?', re.I) WEBADMIN_REGEX = re.compile(r'<title>.*administration</title>', re.I) RESTRICTED_REGEX = re.compile(r'(notes exception|not authorized)', re.I) VERSION_REGEX = re.compile(r'(?:version|domino administrator|domino|release)[=":\s]{0,4}([\d.]+)(?:\s|\")?', re.I) LINUX_USER_REGEX = re.compile(r'([a-z0-9-_].+):(.+)', re.I) WINDOWS_USER_REGEX = re.compile(r'(.+)\\(.+)', re.I) PATH_REGEX = re.compile(r'DataDirectory\s*=\s*\'(.+)\';', re.I) def set_logging(self): logger = logging.getLogger('DomiOwned') logger.setLevel(logging.DEBUG) custom_format = CustomLoggingFormatter() handler = logging.StreamHandler() handler.setFormatter(custom_format) logger.addHandler(handler) return logger
MIT License
opennetworkingfoundation/tapi
RI/flask_server/tapi_server/models/tapi_connectivity_connectivitycontext_connectivity_service.py
TapiConnectivityConnectivitycontextConnectivityService.service_type
python
def service_type(self): return self._service_type
Gets the service_type of this TapiConnectivityConnectivitycontextConnectivityService. :return: The service_type of this TapiConnectivityConnectivitycontextConnectivityService. :rtype: TapiConnectivityServiceType
https://github.com/opennetworkingfoundation/tapi/blob/1f3fd9483d5674552c5a31206c97399c8c151897/RI/flask_server/tapi_server/models/tapi_connectivity_connectivitycontext_connectivity_service.py#L520-L527
from __future__ import absolute_import from datetime import date, datetime from typing import List, Dict from tapi_server.models.base_model_ import Model from tapi_server.models.tapi_common_admin_state_pac import TapiCommonAdminStatePac from tapi_server.models.tapi_common_administrative_state import TapiCommonAdministrativeState from tapi_server.models.tapi_common_capacity import TapiCommonCapacity from tapi_server.models.tapi_common_forwarding_direction import TapiCommonForwardingDirection from tapi_server.models.tapi_common_global_class import TapiCommonGlobalClass from tapi_server.models.tapi_common_layer_protocol_name import TapiCommonLayerProtocolName from tapi_server.models.tapi_common_lifecycle_state import TapiCommonLifecycleState from tapi_server.models.tapi_common_name_and_value import TapiCommonNameAndValue from tapi_server.models.tapi_common_operational_state import TapiCommonOperationalState from tapi_server.models.tapi_common_time_range import TapiCommonTimeRange from tapi_server.models.tapi_connectivity_connection_ref import TapiConnectivityConnectionRef from tapi_server.models.tapi_connectivity_connectivity_constraint import TapiConnectivityConnectivityConstraint from tapi_server.models.tapi_connectivity_connectivity_service_ref import TapiConnectivityConnectivityServiceRef from tapi_server.models.tapi_connectivity_connectivityservice_end_point import TapiConnectivityConnectivityserviceEndPoint from tapi_server.models.tapi_connectivity_coordinate_type import TapiConnectivityCoordinateType from tapi_server.models.tapi_connectivity_resilience_constraint import TapiConnectivityResilienceConstraint from tapi_server.models.tapi_connectivity_reversion_mode import TapiConnectivityReversionMode from tapi_server.models.tapi_connectivity_service_type import TapiConnectivityServiceType from tapi_server.models.tapi_path_computation_diversity_policy import TapiPathComputationDiversityPolicy from tapi_server.models.tapi_path_computation_path_ref import TapiPathComputationPathRef from tapi_server.models.tapi_path_computation_route_objective_function import TapiPathComputationRouteObjectiveFunction from tapi_server.models.tapi_path_computation_routing_constraint import TapiPathComputationRoutingConstraint from tapi_server.models.tapi_path_computation_topology_constraint import TapiPathComputationTopologyConstraint from tapi_server.models.tapi_topology_cost_characteristic import TapiTopologyCostCharacteristic from tapi_server.models.tapi_topology_latency_characteristic import TapiTopologyLatencyCharacteristic from tapi_server.models.tapi_topology_link_ref import TapiTopologyLinkRef from tapi_server.models.tapi_topology_node_ref import TapiTopologyNodeRef from tapi_server.models.tapi_topology_resilience_type import TapiTopologyResilienceType from tapi_server.models.tapi_topology_risk_characteristic import TapiTopologyRiskCharacteristic from tapi_server.models.tapi_topology_topology_ref import TapiTopologyTopologyRef from tapi_server import util class TapiConnectivityConnectivitycontextConnectivityService(Model): def __init__(self, operational_state=None, lifecycle_state=None, administrative_state=None, name=None, uuid=None, service_layer=None, schedule=None, connectivity_direction=None, requested_capacity=None, diversity_exclusion=None, service_level=None, service_type=None, coroute_inclusion=None, is_lock_out=False, max_switch_times=None, restoration_coordinate_type=None, is_coordinated_switching_both_ends=False, hold_off_time=None, is_frozen=False, wait_to_revert_time=15, resilience_type=None, preferred_restoration_layer=None, restore_priority=None, reversion_mode=None, is_exclusive=True, diversity_policy=None, route_objective_function=None, cost_characteristic=None, latency_characteristic=None, risk_diversity_characteristic=None, route_direction=None, include_node=None, exclude_link=None, avoid_topology=None, exclude_path=None, include_link=None, preferred_transport_layer=None, exclude_node=None, include_topology=None, include_path=None, end_point=None, connection=None): self.openapi_types = { 'operational_state': TapiCommonOperationalState, 'lifecycle_state': TapiCommonLifecycleState, 'administrative_state': TapiCommonAdministrativeState, 'name': List[TapiCommonNameAndValue], 'uuid': str, 'service_layer': TapiCommonLayerProtocolName, 'schedule': TapiCommonTimeRange, 'connectivity_direction': TapiCommonForwardingDirection, 'requested_capacity': TapiCommonCapacity, 'diversity_exclusion': List[TapiConnectivityConnectivityServiceRef], 'service_level': str, 'service_type': TapiConnectivityServiceType, 'coroute_inclusion': TapiConnectivityConnectivityServiceRef, 'is_lock_out': bool, 'max_switch_times': int, 'restoration_coordinate_type': TapiConnectivityCoordinateType, 'is_coordinated_switching_both_ends': bool, 'hold_off_time': int, 'is_frozen': bool, 'wait_to_revert_time': int, 'resilience_type': TapiTopologyResilienceType, 'preferred_restoration_layer': List[TapiCommonLayerProtocolName], 'restore_priority': int, 'reversion_mode': TapiConnectivityReversionMode, 'is_exclusive': bool, 'diversity_policy': TapiPathComputationDiversityPolicy, 'route_objective_function': TapiPathComputationRouteObjectiveFunction, 'cost_characteristic': List[TapiTopologyCostCharacteristic], 'latency_characteristic': List[TapiTopologyLatencyCharacteristic], 'risk_diversity_characteristic': List[TapiTopologyRiskCharacteristic], 'route_direction': TapiCommonForwardingDirection, 'include_node': List[TapiTopologyNodeRef], 'exclude_link': List[TapiTopologyLinkRef], 'avoid_topology': List[TapiTopologyTopologyRef], 'exclude_path': List[TapiPathComputationPathRef], 'include_link': List[TapiTopologyLinkRef], 'preferred_transport_layer': List[TapiCommonLayerProtocolName], 'exclude_node': List[TapiTopologyNodeRef], 'include_topology': List[TapiTopologyTopologyRef], 'include_path': List[TapiPathComputationPathRef], 'end_point': List[TapiConnectivityConnectivityserviceEndPoint], 'connection': List[TapiConnectivityConnectionRef] } self.attribute_map = { 'operational_state': 'operational-state', 'lifecycle_state': 'lifecycle-state', 'administrative_state': 'administrative-state', 'name': 'name', 'uuid': 'uuid', 'service_layer': 'service-layer', 'schedule': 'schedule', 'connectivity_direction': 'connectivity-direction', 'requested_capacity': 'requested-capacity', 'diversity_exclusion': 'diversity-exclusion', 'service_level': 'service-level', 'service_type': 'service-type', 'coroute_inclusion': 'coroute-inclusion', 'is_lock_out': 'is-lock-out', 'max_switch_times': 'max-switch-times', 'restoration_coordinate_type': 'restoration-coordinate-type', 'is_coordinated_switching_both_ends': 'is-coordinated-switching-both-ends', 'hold_off_time': 'hold-off-time', 'is_frozen': 'is-frozen', 'wait_to_revert_time': 'wait-to-revert-time', 'resilience_type': 'resilience-type', 'preferred_restoration_layer': 'preferred-restoration-layer', 'restore_priority': 'restore-priority', 'reversion_mode': 'reversion-mode', 'is_exclusive': 'is-exclusive', 'diversity_policy': 'diversity-policy', 'route_objective_function': 'route-objective-function', 'cost_characteristic': 'cost-characteristic', 'latency_characteristic': 'latency-characteristic', 'risk_diversity_characteristic': 'risk-diversity-characteristic', 'route_direction': 'route-direction', 'include_node': 'include-node', 'exclude_link': 'exclude-link', 'avoid_topology': 'avoid-topology', 'exclude_path': 'exclude-path', 'include_link': 'include-link', 'preferred_transport_layer': 'preferred-transport-layer', 'exclude_node': 'exclude-node', 'include_topology': 'include-topology', 'include_path': 'include-path', 'end_point': 'end-point', 'connection': 'connection' } self._operational_state = operational_state self._lifecycle_state = lifecycle_state self._administrative_state = administrative_state self._name = name self._uuid = uuid self._service_layer = service_layer self._schedule = schedule self._connectivity_direction = connectivity_direction self._requested_capacity = requested_capacity self._diversity_exclusion = diversity_exclusion self._service_level = service_level self._service_type = service_type self._coroute_inclusion = coroute_inclusion self._is_lock_out = is_lock_out self._max_switch_times = max_switch_times self._restoration_coordinate_type = restoration_coordinate_type self._is_coordinated_switching_both_ends = is_coordinated_switching_both_ends self._hold_off_time = hold_off_time self._is_frozen = is_frozen self._wait_to_revert_time = wait_to_revert_time self._resilience_type = resilience_type self._preferred_restoration_layer = preferred_restoration_layer self._restore_priority = restore_priority self._reversion_mode = reversion_mode self._is_exclusive = is_exclusive self._diversity_policy = diversity_policy self._route_objective_function = route_objective_function self._cost_characteristic = cost_characteristic self._latency_characteristic = latency_characteristic self._risk_diversity_characteristic = risk_diversity_characteristic self._route_direction = route_direction self._include_node = include_node self._exclude_link = exclude_link self._avoid_topology = avoid_topology self._exclude_path = exclude_path self._include_link = include_link self._preferred_transport_layer = preferred_transport_layer self._exclude_node = exclude_node self._include_topology = include_topology self._include_path = include_path self._end_point = end_point self._connection = connection @classmethod def from_dict(cls, dikt) -> 'TapiConnectivityConnectivitycontextConnectivityService': return util.deserialize_model(dikt, cls) @property def operational_state(self): return self._operational_state @operational_state.setter def operational_state(self, operational_state): self._operational_state = operational_state @property def lifecycle_state(self): return self._lifecycle_state @lifecycle_state.setter def lifecycle_state(self, lifecycle_state): self._lifecycle_state = lifecycle_state @property def administrative_state(self): return self._administrative_state @administrative_state.setter def administrative_state(self, administrative_state): self._administrative_state = administrative_state @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def uuid(self): return self._uuid @uuid.setter def uuid(self, uuid): self._uuid = uuid @property def service_layer(self): return self._service_layer @service_layer.setter def service_layer(self, service_layer): self._service_layer = service_layer @property def schedule(self): return self._schedule @schedule.setter def schedule(self, schedule): self._schedule = schedule @property def connectivity_direction(self): return self._connectivity_direction @connectivity_direction.setter def connectivity_direction(self, connectivity_direction): self._connectivity_direction = connectivity_direction @property def requested_capacity(self): return self._requested_capacity @requested_capacity.setter def requested_capacity(self, requested_capacity): self._requested_capacity = requested_capacity @property def diversity_exclusion(self): return self._diversity_exclusion @diversity_exclusion.setter def diversity_exclusion(self, diversity_exclusion): self._diversity_exclusion = diversity_exclusion @property def service_level(self): return self._service_level @service_level.setter def service_level(self, service_level): self._service_level = service_level @property
Apache License 2.0
texttheater/produce
t/prodtest.py
ProduceTestCase.qmtime
python
def qmtime(self, path): try: return self.mtime(path) except FileNotFoundError: return 0
Returns the modification time of the file at path, or 0 if it doesn't exist.
https://github.com/texttheater/produce/blob/202e3196daf7ac53c1998ac2ee9b0f8cbb1c6615/t/prodtest.py#L100-L108
import os import produce import subprocess import time import unittest def dict2opts(d): result = [] for k, v in d.items(): result.append(k) if v != None: result.append(v) return result class ProduceTestCase(unittest.TestCase): def setUp(self): self.workdir = self.__module__ + '.working' try: self.runCommand(['rm', '-rf', self.workdir]) self.runCommand(['cp', '-r', self.__module__, self.workdir]) except Exception as e: self.skipTest('setup failed') os.chdir(self.workdir) def tearDown(self): os.chdir('..') def assertDirectoryContents(self, filelist, directory='.'): self.assertEqual(set(filelist), set(os.listdir(directory))) def produce(self, *args, **kwargs): produce.produce(dict2opts(kwargs) + list(args)) def assertFileExists(self, path): self.assertTrue(os.path.exists(path)) def assertFileDoesNotExist(self, path): self.assertFalse(os.path.exists(path)) def assertState(self, existentFiles, nonExistentFiles): for f in existentFiles: self.assertFileExists(f) for f in nonExistentFiles: self.assertFileDoesNotExist(f) def assertNewer(self, newFile, oldFile): self.assertGreater(self.mtime(newFile), self.mtime(oldFile)) def assertNewerEqual(self, newFile, oldFile): self.assertGreaterEqual(self.mtime(newFile), self.mtime(oldFile)) def assertUpdates(self, changed, function, updated, notUpdated): for f in changed: self.sleep(0.1) self.touch(f) times = {} for f in updated + notUpdated: times[f] = self.qmtime(f) self.sleep() function() for f in updated: self.assertGreater(self.qmtime(f), times[f]) for f in notUpdated: self.assertLessEqual(self.qmtime(f), times[f]) def assertTakesLessThan(self, seconds): return _TakesLessThan(seconds, self) def assertTakesMoreThan(self, seconds): return _TakesMoreThan(seconds, self) def assertFileContents(self, fileName, expectedContents): with open(fileName) as f: actualContents = f.read() self.assertEqual(expectedContents, actualContents) def mtime(self, path): return os.stat(path).st_mtime
MIT License
erigones/esdc-ce
api/node/utils.py
get_nodes
python
def get_nodes(request, sr=(), pr=(), order_by=('hostname',), annotate=None, extra=None, **kwargs): if not request.user.is_staff: kwargs['dc'] = request.dc if sr: qs = Node.objects.select_related(*sr) else: qs = Node.objects if pr: qs = qs.prefetch_related(*pr) if annotate: qs = qs.annotate(**annotate) if extra: qs = qs.extra(**extra) if kwargs: return qs.filter(**kwargs).order_by(*order_by) return qs.order_by(*order_by)
Return queryset of nodes available for current admin
https://github.com/erigones/esdc-ce/blob/f83a62d0d430e3c8f9aac23d958583b0efce4312/api/node/utils.py#L35-L57
from api.utils.db import get_object from vms.models import Node def get_node(request, hostname, attrs=None, where=None, exists_ok=True, noexists_fail=True, sr=(), pr=(), dc=False, api=True, extra=None, annotate=None): if attrs is None: attrs = {} if not request.user.is_staff: attrs['dc'] = request.dc if dc: attrs['dc'] = dc attrs['hostname'] = hostname if api: return get_object(request, Node, attrs, where=where, exists_ok=exists_ok, noexists_fail=noexists_fail, sr=sr, pr=pr, extra=extra, annotate=annotate) if sr: qs = Node.objects.select_related(*sr) else: qs = Node.objects if where: return qs.filter(where).get(**attrs) else: return qs.get(**attrs)
Apache License 2.0
drexly/openhgsenti
lib/django/contrib/gis/db/models/query.py
GeoQuerySet._geomset_attribute
python
def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs): s = { 'geom_args': ('geom',), 'select_field': GeomField(), 'procedure_fmt': '%(geo_col)s,%(geom)s', 'procedure_args': {'geom': geom}, } if connections[self.db].ops.oracle: s['procedure_fmt'] += ',%(tolerance)s' s['procedure_args']['tolerance'] = tolerance return self._spatial_attribute(func, s, **kwargs)
DRY routine for setting up a GeoQuerySet method that attaches a Geometry attribute and takes a Geoemtry parameter. This is used for geometry set-like operations (e.g., intersection, difference, union, sym_difference).
https://github.com/drexly/openhgsenti/blob/d7806f58c81127d32091d9875a99ac13aef94a8a/lib/django/contrib/gis/db/models/query.py#L745-L761
import warnings from django.contrib.gis.db.models import aggregates from django.contrib.gis.db.models.fields import ( GeometryField, LineStringField, PointField, get_srid_info, ) from django.contrib.gis.db.models.lookups import GISLookup from django.contrib.gis.db.models.sql import ( AreaField, DistanceField, GeomField, GMLField, ) from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Area, Distance from django.db import connections from django.db.models.expressions import RawSQL from django.db.models.fields import Field from django.db.models.query import QuerySet from django.utils import six from django.utils.deprecation import ( RemovedInDjango20Warning, RemovedInDjango110Warning, ) class GeoQuerySet(QuerySet): def area(self, tolerance=0.05, **kwargs): procedure_args, geo_field = self._spatial_setup( 'area', field_name=kwargs.get('field_name')) s = {'procedure_args': procedure_args, 'geo_field': geo_field, 'setup': False, } connection = connections[self.db] backend = connection.ops if backend.oracle: s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s' s['procedure_args']['tolerance'] = tolerance s['select_field'] = AreaField('sq_m') elif backend.postgis or backend.spatialite: if backend.geography: s['select_field'] = AreaField('sq_m') elif not geo_field.geodetic(connection): s['select_field'] = AreaField(Area.unit_attname(geo_field.units_name(connection))) else: raise Exception('Area on geodetic coordinate systems not supported.') return self._spatial_attribute('area', s, **kwargs) def centroid(self, **kwargs): return self._geom_attribute('centroid', **kwargs) def collect(self, **kwargs): warnings.warn( "The collect GeoQuerySet method is deprecated. Use the Collect() " "aggregate in an aggregate() or annotate() method.", RemovedInDjango110Warning, stacklevel=2 ) return self._spatial_aggregate(aggregates.Collect, **kwargs) def difference(self, geom, **kwargs): return self._geomset_attribute('difference', geom, **kwargs) def distance(self, geom, **kwargs): return self._distance_attribute('distance', geom, **kwargs) def envelope(self, **kwargs): return self._geom_attribute('envelope', **kwargs) def extent(self, **kwargs): warnings.warn( "The extent GeoQuerySet method is deprecated. Use the Extent() " "aggregate in an aggregate() or annotate() method.", RemovedInDjango110Warning, stacklevel=2 ) return self._spatial_aggregate(aggregates.Extent, **kwargs) def extent3d(self, **kwargs): warnings.warn( "The extent3d GeoQuerySet method is deprecated. Use the Extent3D() " "aggregate in an aggregate() or annotate() method.", RemovedInDjango110Warning, stacklevel=2 ) return self._spatial_aggregate(aggregates.Extent3D, **kwargs) def force_rhr(self, **kwargs): return self._geom_attribute('force_rhr', **kwargs) def geojson(self, precision=8, crs=False, bbox=False, **kwargs): backend = connections[self.db].ops if not backend.geojson: raise NotImplementedError('Only PostGIS 1.3.4+ and SpatiaLite 3.0+ ' 'support GeoJSON serialization.') if not isinstance(precision, six.integer_types): raise TypeError('Precision keyword must be set with an integer.') options = 0 if crs and bbox: options = 3 elif bbox: options = 1 elif crs: options = 2 s = {'desc': 'GeoJSON', 'procedure_args': {'precision': precision, 'options': options}, 'procedure_fmt': '%(geo_col)s,%(precision)s,%(options)s', } return self._spatial_attribute('geojson', s, **kwargs) def geohash(self, precision=20, **kwargs): s = {'desc': 'GeoHash', 'procedure_args': {'precision': precision}, 'procedure_fmt': '%(geo_col)s,%(precision)s', } return self._spatial_attribute('geohash', s, **kwargs) def gml(self, precision=8, version=2, **kwargs): backend = connections[self.db].ops s = {'desc': 'GML', 'procedure_args': {'precision': precision}} if backend.postgis: s['procedure_fmt'] = '%(version)s,%(geo_col)s,%(precision)s' s['procedure_args'] = {'precision': precision, 'version': version} if backend.oracle: s['select_field'] = GMLField() return self._spatial_attribute('gml', s, **kwargs) def intersection(self, geom, **kwargs): return self._geomset_attribute('intersection', geom, **kwargs) def kml(self, **kwargs): s = {'desc': 'KML', 'procedure_fmt': '%(geo_col)s,%(precision)s', 'procedure_args': {'precision': kwargs.pop('precision', 8)}, } return self._spatial_attribute('kml', s, **kwargs) def length(self, **kwargs): return self._distance_attribute('length', None, **kwargs) def make_line(self, **kwargs): warnings.warn( "The make_line GeoQuerySet method is deprecated. Use the MakeLine() " "aggregate in an aggregate() or annotate() method.", RemovedInDjango110Warning, stacklevel=2 ) return self._spatial_aggregate(aggregates.MakeLine, geo_field_type=PointField, **kwargs) def mem_size(self, **kwargs): return self._spatial_attribute('mem_size', {}, **kwargs) def num_geom(self, **kwargs): return self._spatial_attribute('num_geom', {}, **kwargs) def num_points(self, **kwargs): return self._spatial_attribute('num_points', {}, **kwargs) def perimeter(self, **kwargs): return self._distance_attribute('perimeter', None, **kwargs) def point_on_surface(self, **kwargs): return self._geom_attribute('point_on_surface', **kwargs) def reverse_geom(self, **kwargs): s = {'select_field': GeomField()} kwargs.setdefault('model_att', 'reverse_geom') if connections[self.db].ops.oracle: s['geo_field_type'] = LineStringField return self._spatial_attribute('reverse', s, **kwargs) def scale(self, x, y, z=0.0, **kwargs): if connections[self.db].ops.spatialite: if z != 0.0: raise NotImplementedError('SpatiaLite does not support 3D scaling.') s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s', 'procedure_args': {'x': x, 'y': y}, 'select_field': GeomField(), } else: s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s', 'procedure_args': {'x': x, 'y': y, 'z': z}, 'select_field': GeomField(), } return self._spatial_attribute('scale', s, **kwargs) def snap_to_grid(self, *args, **kwargs): if False in [isinstance(arg, (float,) + six.integer_types) for arg in args]: raise TypeError('Size argument(s) for the grid must be a float or integer values.') nargs = len(args) if nargs == 1: size = args[0] procedure_fmt = '%(geo_col)s,%(size)s' procedure_args = {'size': size} elif nargs == 2: xsize, ysize = args procedure_fmt = '%(geo_col)s,%(xsize)s,%(ysize)s' procedure_args = {'xsize': xsize, 'ysize': ysize} elif nargs == 4: xsize, ysize, xorigin, yorigin = args procedure_fmt = '%(geo_col)s,%(xorigin)s,%(yorigin)s,%(xsize)s,%(ysize)s' procedure_args = {'xsize': xsize, 'ysize': ysize, 'xorigin': xorigin, 'yorigin': yorigin} else: raise ValueError('Must provide 1, 2, or 4 arguments to `snap_to_grid`.') s = {'procedure_fmt': procedure_fmt, 'procedure_args': procedure_args, 'select_field': GeomField(), } return self._spatial_attribute('snap_to_grid', s, **kwargs) def svg(self, relative=False, precision=8, **kwargs): relative = int(bool(relative)) if not isinstance(precision, six.integer_types): raise TypeError('SVG precision keyword argument must be an integer.') s = { 'desc': 'SVG', 'procedure_fmt': '%(geo_col)s,%(rel)s,%(precision)s', 'procedure_args': { 'rel': relative, 'precision': precision, } } return self._spatial_attribute('svg', s, **kwargs) def sym_difference(self, geom, **kwargs): return self._geomset_attribute('sym_difference', geom, **kwargs) def translate(self, x, y, z=0.0, **kwargs): if connections[self.db].ops.spatialite: if z != 0.0: raise NotImplementedError('SpatiaLite does not support 3D translation.') s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s', 'procedure_args': {'x': x, 'y': y}, 'select_field': GeomField(), } else: s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s', 'procedure_args': {'x': x, 'y': y, 'z': z}, 'select_field': GeomField(), } return self._spatial_attribute('translate', s, **kwargs) def transform(self, srid=4326, **kwargs): if not isinstance(srid, six.integer_types): raise TypeError('An integer SRID must be provided.') field_name = kwargs.get('field_name') self._spatial_setup('transform', field_name=field_name) self.query.add_context('transformed_srid', srid) return self._clone() def union(self, geom, **kwargs): return self._geomset_attribute('union', geom, **kwargs) def unionagg(self, **kwargs): warnings.warn( "The unionagg GeoQuerySet method is deprecated. Use the Union() " "aggregate in an aggregate() or annotate() method.", RemovedInDjango110Warning, stacklevel=2 ) return self._spatial_aggregate(aggregates.Union, **kwargs) def _spatial_setup(self, att, desc=None, field_name=None, geo_field_type=None): connection = connections[self.db] func = getattr(connection.ops, att, False) if desc is None: desc = att if not func: raise NotImplementedError('%s stored procedure not available on ' 'the %s backend.' % (desc, connection.ops.name)) procedure_args = {'function': func} geo_field = self._geo_field(field_name) if not geo_field: raise TypeError('%s output only available on GeometryFields.' % func) if geo_field_type is not None and not isinstance(geo_field, geo_field_type): raise TypeError('"%s" stored procedures may only be called on %ss.' % (func, geo_field_type.__name__)) procedure_args['geo_col'] = self._geocol_select(geo_field, field_name) return procedure_args, geo_field def _spatial_aggregate(self, aggregate, field_name=None, geo_field_type=None, tolerance=0.05): geo_field = self._geo_field(field_name) if not geo_field: raise TypeError('%s aggregate only available on GeometryFields.' % aggregate.name) if geo_field_type is not None and not isinstance(geo_field, geo_field_type): raise TypeError('%s aggregate may only be called on %ss.' % (aggregate.name, geo_field_type.__name__)) agg_col = field_name or geo_field.name agg_kwargs = {} if connections[self.db].ops.oracle: agg_kwargs['tolerance'] = tolerance return self.aggregate(geoagg=aggregate(agg_col, **agg_kwargs))['geoagg'] def _spatial_attribute(self, att, settings, field_name=None, model_att=None): warnings.warn( "The %s GeoQuerySet method is deprecated. See GeoDjango Functions " "documentation to find the expression-based replacement." % att, RemovedInDjango20Warning, stacklevel=2 ) settings.setdefault('desc', None) settings.setdefault('geom_args', ()) settings.setdefault('geom_field', None) settings.setdefault('procedure_args', {}) settings.setdefault('procedure_fmt', '%(geo_col)s') settings.setdefault('select_params', []) connection = connections[self.db] if settings.get('setup', True): default_args, geo_field = self._spatial_setup( att, desc=settings['desc'], field_name=field_name, geo_field_type=settings.get('geo_field_type')) for k, v in six.iteritems(default_args): settings['procedure_args'].setdefault(k, v) else: geo_field = settings['geo_field'] if not isinstance(model_att, six.string_types): model_att = att for name in settings['geom_args']: geom = geo_field.get_prep_value(settings['procedure_args'][name]) params = geo_field.get_db_prep_lookup('contains', geom, connection=connection) geom_placeholder = geo_field.get_placeholder(geom, None, connection) old_fmt = '%%(%s)s' % name new_fmt = geom_placeholder % '%%s' settings['procedure_fmt'] = settings['procedure_fmt'].replace(old_fmt, new_fmt) settings['select_params'].extend(params) fmt = '%%(function)s(%s)' % settings['procedure_fmt'] if settings.get('select_field'): select_field = settings['select_field'] if connection.ops.oracle: select_field.empty_strings_allowed = False else: select_field = Field() self.query.add_annotation( RawSQL(fmt % settings['procedure_args'], settings['select_params'], select_field), model_att) return self def _distance_attribute(self, func, geom=None, tolerance=0.05, spheroid=False, **kwargs): procedure_args, geo_field = self._spatial_setup(func, field_name=kwargs.get('field_name')) connection = connections[self.db] geodetic = geo_field.geodetic(connection) geography = geo_field.geography if geodetic: dist_att = 'm' else: dist_att = Distance.unit_attname(geo_field.units_name(connection)) distance = func == 'distance' length = func == 'length' perimeter = func == 'perimeter' if not (distance or length or perimeter): raise ValueError('Unknown distance function: %s' % func) geom_3d = geo_field.dim == 3 lookup_params = [geom or 'POINT (0 0)', 0] backend = connection.ops if spheroid or (backend.postgis and geodetic and (not geography) and length): lookup_params.append('spheroid') lookup_params = geo_field.get_prep_value(lookup_params) params = geo_field.get_db_prep_lookup('distance_lte', lookup_params, connection=connection) geom_args = bool(geom) if backend.oracle: if distance: procedure_fmt = '%(geo_col)s,%(geom)s,%(tolerance)s' elif length or perimeter: procedure_fmt = '%(geo_col)s,%(tolerance)s' procedure_args['tolerance'] = tolerance else: srid = self.query.get_context('transformed_srid') if srid: u, unit_name, s = get_srid_info(srid, connection) geodetic = unit_name.lower() in geo_field.geodetic_units if geodetic and not connection.features.supports_distance_geodetic: raise ValueError( 'This database does not support linear distance ' 'calculations on geodetic coordinate systems.' ) if distance: if srid: geom_args = False procedure_fmt = '%s(%%(geo_col)s, %s)' % (backend.transform, srid) if geom.srid is None or geom.srid == srid: if backend.spatialite: procedure_fmt += ', %s(%%%%s, %s)' % (backend.from_text, srid) else: procedure_fmt += ', %%s' else: if backend.spatialite: procedure_fmt += (', %s(%s(%%%%s, %s), %s)' % ( backend.transform, backend.from_text, geom.srid, srid)) else: procedure_fmt += ', %s(%%%%s, %s)' % (backend.transform, srid) else: procedure_fmt = '%(geo_col)s,%(geom)s' if not geography and geodetic: if not backend.geography: if not isinstance(geo_field, PointField): raise ValueError('Spherical distance calculation only supported on PointFields.') if not str(Geometry(six.memoryview(params[0].ewkb)).geom_type) == 'Point': raise ValueError( 'Spherical distance calculation only supported with ' 'Point Geometry parameters' ) if spheroid: procedure_fmt += ",'%(spheroid)s'" procedure_args.update({'function': backend.distance_spheroid, 'spheroid': params[1]}) else: procedure_args.update({'function': backend.distance_sphere}) elif length or perimeter: procedure_fmt = '%(geo_col)s' if not geography and geodetic and length: procedure_fmt += ",'%(spheroid)s'" procedure_args.update({'function': backend.length_spheroid, 'spheroid': params[1]}) elif geom_3d and connection.features.supports_3d_functions: if perimeter: procedure_args.update({'function': backend.perimeter3d}) elif length: procedure_args.update({'function': backend.length3d}) s = {'select_field': DistanceField(dist_att), 'setup': False, 'geo_field': geo_field, 'procedure_args': procedure_args, 'procedure_fmt': procedure_fmt, } if geom_args: s['geom_args'] = ('geom',) s['procedure_args']['geom'] = geom elif geom: s['select_params'] = [backend.Adapter(geom)] return self._spatial_attribute(func, s, **kwargs) def _geom_attribute(self, func, tolerance=0.05, **kwargs): s = {'select_field': GeomField()} if connections[self.db].ops.oracle: s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s' s['procedure_args'] = {'tolerance': tolerance} return self._spatial_attribute(func, s, **kwargs)
Apache License 2.0
osmr/imgclsmob
tensorflow2/tf2cv/models/densenet.py
densenet169
python
def densenet169(**kwargs): return get_densenet(blocks=169, model_name="densenet169", **kwargs)
DenseNet-169 model from 'Densely Connected Convolutional Networks,' https://arxiv.org/abs/1608.06993. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.tensorflow/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/tensorflow2/tf2cv/models/densenet.py#L282-L293
__all__ = ['DenseNet', 'densenet121', 'densenet161', 'densenet169', 'densenet201', 'DenseUnit', 'TransitionBlock'] import os import tensorflow as tf import tensorflow.keras.layers as nn from .common import pre_conv1x1_block, pre_conv3x3_block, AvgPool2d, SimpleSequential, get_channel_axis, flatten from .preresnet import PreResInitBlock, PreResActivation class DenseUnit(nn.Layer): def __init__(self, in_channels, out_channels, dropout_rate, data_format="channels_last", **kwargs): super(DenseUnit, self).__init__(**kwargs) self.data_format = data_format self.use_dropout = (dropout_rate != 0.0) bn_size = 4 inc_channels = out_channels - in_channels mid_channels = inc_channels * bn_size self.conv1 = pre_conv1x1_block( in_channels=in_channels, out_channels=mid_channels, data_format=data_format, name="conv1") self.conv2 = pre_conv3x3_block( in_channels=mid_channels, out_channels=inc_channels, data_format=data_format, name="conv2") if self.use_dropout: self.dropout = nn.Dropout( rate=dropout_rate, name="dropout") def call(self, x, training=None): identity = x x = self.conv1(x, training=training) x = self.conv2(x, training=training) if self.use_dropout: x = self.dropout(x, training=training) x = tf.concat([identity, x], axis=get_channel_axis(self.data_format)) return x class TransitionBlock(nn.Layer): def __init__(self, in_channels, out_channels, data_format="channels_last", **kwargs): super(TransitionBlock, self).__init__(**kwargs) self.conv = pre_conv1x1_block( in_channels=in_channels, out_channels=out_channels, data_format=data_format, name="conv") self.pool = AvgPool2d( pool_size=2, strides=2, padding=0) def call(self, x, training=None): x = self.conv(x, training=training) x = self.pool(x) return x class DenseNet(tf.keras.Model): def __init__(self, channels, init_block_channels, dropout_rate=0.0, in_channels=3, in_size=(224, 224), classes=1000, data_format="channels_last", **kwargs): super(DenseNet, self).__init__(**kwargs) self.in_size = in_size self.classes = classes self.data_format = data_format self.features = SimpleSequential(name="features") self.features.add(PreResInitBlock( in_channels=in_channels, out_channels=init_block_channels, data_format=data_format, name="init_block")) in_channels = init_block_channels for i, channels_per_stage in enumerate(channels): stage = SimpleSequential(name="stage{}".format(i + 1)) if i != 0: stage.add(TransitionBlock( in_channels=in_channels, out_channels=(in_channels // 2), data_format=data_format, name="trans{}".format(i + 1))) in_channels = in_channels // 2 for j, out_channels in enumerate(channels_per_stage): stage.add(DenseUnit( in_channels=in_channels, out_channels=out_channels, dropout_rate=dropout_rate, data_format=data_format, name="unit{}".format(j + 1))) in_channels = out_channels self.features.add(stage) self.features.add(PreResActivation( in_channels=in_channels, data_format=data_format, name="post_activ")) self.features.add(nn.AveragePooling2D( pool_size=7, strides=1, data_format=data_format, name="final_pool")) self.output1 = nn.Dense( units=classes, input_dim=in_channels, name="output1") def call(self, x, training=None): x = self.features(x, training=training) x = flatten(x, self.data_format) x = self.output1(x) return x def get_densenet(blocks, model_name=None, pretrained=False, root=os.path.join("~", ".tensorflow", "models"), **kwargs): if blocks == 121: init_block_channels = 64 growth_rate = 32 layers = [6, 12, 24, 16] elif blocks == 161: init_block_channels = 96 growth_rate = 48 layers = [6, 12, 36, 24] elif blocks == 169: init_block_channels = 64 growth_rate = 32 layers = [6, 12, 32, 32] elif blocks == 201: init_block_channels = 64 growth_rate = 32 layers = [6, 12, 48, 32] else: raise ValueError("Unsupported DenseNet version with number of layers {}".format(blocks)) from functools import reduce channels = reduce(lambda xi, yi: xi + [reduce(lambda xj, yj: xj + [xj[-1] + yj], [growth_rate] * yi, [xi[-1][-1] // 2])[1:]], layers, [[init_block_channels * 2]])[1:] net = DenseNet( channels=channels, init_block_channels=init_block_channels, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file in_channels = kwargs["in_channels"] if ("in_channels" in kwargs) else 3 input_shape = (1,) + (in_channels,) + net.in_size if net.data_format == "channels_first" else (1,) + net.in_size + (in_channels,) net.build(input_shape=input_shape) net.load_weights( filepath=get_model_file( model_name=model_name, local_model_store_dir_path=root)) return net def densenet121(**kwargs): return get_densenet(blocks=121, model_name="densenet121", **kwargs) def densenet161(**kwargs): return get_densenet(blocks=161, model_name="densenet161", **kwargs)
MIT License
chainer/chainer-benchmark
benchmarks/utils/backend.py
backends
python
def backends(*modes): assert all([m in _backend_modes for m in modes]) def _wrap_class(klass): assert isinstance(klass, type) return _inject_backend_mode(klass, modes) return _wrap_class
Class decorator to parameterize the benchmark class with backends. This is a special form of :func:`parameterize` to parameterize the backend variation. For all `time_*` functions and `setup` function in the class, this decorator: * wraps the function to be called with the Chainer configuration (`use_cudnn` and `use_ideep`) set to the current backend variation. * wraps the function to perform CPU/GPU synchronization after the benchmark, when the current backend variation uses GPU. The time taken for synchronization is counted as a elapsed time in the benchmark. * injects the array module (`cupy` or `numpy` depending on the current variation) as `self.xp` so that benchmark code can use it to work with array modules with each backend. * provides access to `is_backend_gpu()` and `is_backend_ideep()` methods so that benchmark code can use it to change behavior depending on the backend variation (e.g., `if is_backend_gpu(): model.to_gpu()`). This decorator adds parameter axis with the name of `backend`. Note that `cpu-ideep` mode will automatically be skipped if the current benchmark setup does not support it, e.g., when running benchmark against older Chainer version that does not support iDeep. You cannot apply `parameterize` decorator to the class already decorated by this decorator. If you want to use `parameterize` along with this decorator, make `parameterize` the most inner (i.e., the closest to the class declaration) decorator. Example of usage is as follows: >>> @backend('gpu', 'gpu-cudnn', 'cpu', 'cpu-ideep') ... class ConvolutionBenchmark(object): ... def time_benchmark(self): ... ... You can temporarily limit the backend variation by setting list of comma-separated backend names to CHAINER_BENCHMARK_BACKENDS environment variable. For example, ``CHAINER_BENCHMARK_BACKENDS=gpu-cudnn,cpu-ideep`` can be used to skip running benchmark for ``gpu`` and ``cpu``.
https://github.com/chainer/chainer-benchmark/blob/8d0c8f5052b5e2a85ad522ff48899ffc9a2bfafb/benchmarks/utils/backend.py#L38-L87
from functools import wraps import inspect import os import warnings import chainer import cupy import numpy from benchmarks.utils.helper import _is_func from benchmarks.utils.helper import parameterize from benchmarks.utils.helper import sync _backend_modes = [ 'gpu', 'gpu-cudnn', 'cpu', 'cpu-ideep', ] _enabled_backend_modes = ( os.environ['CHAINER_BENCHMARK_BACKENDS'].split(',') if 'CHAINER_BENCHMARK_BACKENDS' in os.environ else _backend_modes ) assert all([x in _backend_modes for x in _enabled_backend_modes])
MIT License
lianghongzhuo/pointnetgpd
dex-net/src/dexnet/grasping/grasp.py
ParallelJawPtGrasp3D.distance
python
def distance(g1, g2, alpha=0.05): center_dist = np.linalg.norm(g1.center - g2.center) axis_dist = (2.0 / np.pi) * np.arccos(np.abs(g1.axis.dot(g2.axis))) return center_dist + alpha * axis_dist
Evaluates the distance between two grasps. Parameters ---------- g1 : :obj:`ParallelJawPtGrasp3D` the first grasp to use g2 : :obj:`ParallelJawPtGrasp3D` the second grasp to use alpha : float parameter weighting rotational versus spatial distance Returns ------- float distance between grasps g1 and g2
https://github.com/lianghongzhuo/pointnetgpd/blob/c61ab1111d08007ecb41972142acdb67ed6a496d/dex-net/src/dexnet/grasping/grasp.py#L213-L232
from abc import ABCMeta, abstractmethod from copy import deepcopy import IPython import logging import matplotlib.pyplot as plt import numpy as np from numpy.linalg import inv, norm import time from autolab_core import Point, RigidTransform from meshpy import Sdf3D, StablePose try: from gqcnn import Grasp2D except: pass from dexnet import abstractstatic from dexnet.grasping import Contact3D, GraspableObject3D class Grasp(object): __metaclass__ = ABCMeta samples_per_grid = 2 @abstractmethod def close_fingers(self, obj): pass @abstractmethod def configuration(self): pass @abstractmethod def frame(self): pass @abstractstatic def params_from_configuration(configuration): pass @abstractstatic def configuration_from_params(*params): pass class PointGrasp(Grasp): __metaclass__ = ABCMeta @abstractmethod def create_line_of_action(g, axis, width, obj, num_samples): pass class ParallelJawPtGrasp3D(PointGrasp): def __init__(self, configuration, frame='object', grasp_id=None): grasp_center, grasp_axis, grasp_width, grasp_angle, jaw_width, min_grasp_width = ParallelJawPtGrasp3D.params_from_configuration(configuration) self.center_ = grasp_center self.axis_ = grasp_axis / np.linalg.norm(grasp_axis) self.max_grasp_width_ = grasp_width self.jaw_width_ = jaw_width self.min_grasp_width_ = min_grasp_width self.approach_angle_ = grasp_angle self.frame_ = frame self.grasp_id_ = grasp_id @property def center(self): return self.center_ @center.setter def center(self, x): self.center_ = x @property def axis(self): return self.axis_ @property def open_width(self): return self.max_grasp_width_ @property def close_width(self): return self.min_grasp_width_ @property def jaw_width(self): return self.jaw_width_ @property def approach_angle(self): return self.approach_angle_ @property def configuration(self): return ParallelJawPtGrasp3D.configuration_from_params(self.center_, self.axis_, self.max_grasp_width_, self.approach_angle_, self.jaw_width_, self.min_grasp_width_) @property def frame(self): return self.frame_ @property def id(self): return self.grasp_id_ @frame.setter def frame(self, f): self.frame_ = f @approach_angle.setter def approach_angle(self, angle): self.approach_angle_ = angle @property def endpoints(self): return self.center_ - (self.max_grasp_width_ / 2.0) * self.axis_, self.center_ + ( self.max_grasp_width_ / 2.0) * self.axis_, @staticmethod
MIT License
ericssonresearch/calvin-base
calvin/csparser/parser.py
CalvinParser.p_values
python
def p_values(self, p): p[0] = p[1] + [p[2].value] if p[1] is not None else []
values : values value COMMA | values value | empty
https://github.com/ericssonresearch/calvin-base/blob/bc4645c2061c30ca305a660e48dc86e3317f5b6f/calvin/csparser/parser.py#L385-L389
import os import ply.lex as lex import ply.yacc as yacc import calvin_rules from calvin_rules import tokens as calvin_tokens import astnode as ast from astprint import BraceFormatter from calvin.utilities.issuetracker import IssueTracker _parser_instance = None def get_parser(): global _parser_instance if _parser_instance is None: _parser_instance = CalvinParser() return _parser_instance class CalvinParser(object): def __init__(self, lexer=None): super(CalvinParser, self).__init__() if lexer: self.lexer = lexer else: self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=False) this_file = os.path.realpath(__file__) containing_dir = os.path.dirname(this_file) self.parser = yacc.yacc(module=self, debug=True, optimize=False, outputdir=containing_dir) tokens = calvin_tokens precedence = ( ('left', 'OR'), ('left', 'AND'), ('right', 'UNOT'), ) def p_script(self, p): root = ast.Node() root.add_children(p[1] + p[2] + p[3]) p[0] = root def p_empty(self, p): pass def p_opt_constdefs(self, p): p[0] = p[1] or [] def p_constdefs(self, p): if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_constdef(self, p): constdef = ast.Constant(ident=p[2], arg=p[4], debug_info=self.debug_info(p, 1)) p[0] = constdef def p_opt_compdefs(self, p): p[0] = p[1] or [] def p_compdefs(self, p): if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_compdef(self, p): p[0] = ast.Component(name=p[2], arg_names=p[4], inports=p[6], outports=p[8], docstring=p[10], program=p[11], debug_info=self.debug_info(p, 1)) def p_docstring(self, p): p[0] = p[1] or "Someone(TM) should write some documentation for this component." def p_comp_statements(self, p): if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_comp_statement(self, p): p[0] = p[1] def p_opt_program(self, p): p[0] = [] if p[1] is None else [ast.Block(program=p[1], namespace='__scriptname__', debug_info=self.debug_info(p, 1))] def p_program(self, p): if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_statement(self, p): p[0] = p[1] def p_assignment(self, p): p[0] = ast.Assignment(ident=p[1], actor_type=p[3], args=p[5], debug_info=self.debug_info(p, 1)) def p_opt_direction(self, p): if p[1] is None: p[0] = None else: if p[2] not in ['in', 'out']: info = { 'line': p.lineno(2), 'col': self._find_column(p.lexpos(2)) } self.issuetracker.add_error('Invalid direction ({}).'.format(p[2]), info) p[0] = p[2] def p_port_property(self, p): _, (actor, port), direction, _, args, _ = p[:] p[0] = ast.PortProperty(actor=actor, port=port, direction=direction, args=args, debug_info=self.debug_info(p, 3)) def p_internal_port_property(self, p): _, (actor, port), direction, _, args, _ = p[:] p[0] = ast.PortProperty(actor=actor, port=port, direction=direction, args=args, debug_info=self.debug_info(p, 3)) def p_link_error(self, p): info = { 'line': p.lineno(2), 'col': self._find_column(p.lexpos(2)) } self.issuetracker.add_error('Pointless construct.', info) def p_link(self, p): p[0] = ast.Link(outport=p[1], inport=p[3], debug_info=self.debug_info(p, 1)) def p_void(self, p): p[0] = ast.Void(debug_info=self.debug_info(p, 1)) def p_inport_list(self, p): if len(p) == 2: p[0] = ast.PortList() p[0].add_child(p[1]) else: p[1].add_child(p[3]) p[0] = p[1] def p_real_inport_list(self, p): if len(p) == 2: p[0] = ast.PortList() p[0].add_child(p[1]) else: p[1].add_child(p[3]) p[0] = p[1] def p_inport(self, p): p[0]=p[1] def p_transformed_inport(self, p): arg, label = p[1] p[0] = ast.TransformedPort(port=p[2], value=arg, label=label, debug_info=self.debug_info(p, 2)) def p_implicit_outport(self, p): arg, label = (p[1], None) if len(p) == 2 else (p[2], p[1]) p[0] = ast.ImplicitPort(arg=arg, label=label, debug_info=self.debug_info(p, 1)) def p_real_or_internal_inport(self, p): p[0] = p[1] def p_opt_tag(self, p): p[0] = p[1] if p[1] is None else p[2] def p_tag_value(self, p): p[0] = p[1] def p_real_inport(self, p): _, tag, (actor, port) = p[:] p[0] = ast.InPort(actor=actor, port=port, tag=tag, debug_info=self.debug_info(p, 2)) def p_real_outport(self, p): actor, port = p[1] p[0] = ast.OutPort(actor=actor, port=port, debug_info=self.debug_info(p, 1)) def p_internal_inport(self, p): _, port = p[1] p[0] = ast.InternalInPort(port=port, debug_info=self.debug_info(p, 1)) def p_internal_outport(self, p): _, port = p[1] p[0] = ast.InternalOutPort(port=port, debug_info=self.debug_info(p, 1)) def p_port_transform(self, p): p[0] = (p[2], None) if len(p) == 4 else (p[3], p[2]) def p_qualified_port(self, p): p[0] = (p[1], p[3]) def p_unqualified_port(self, p): p[0] = (None, p[2]) def p_label(self, p): p[0] = p[2] def p_named_args(self, p): p[0] = p[1] + [p[2]] if p[1] is not None else [] def p_named_arg(self, p): p[0] = ast.NamedArg(ident=p[1], arg=p[3], debug_info=self.debug_info(p, 1)) def p_argument(self, p): p[0] = p[1] def p_opt_argnames(self, p): p[0] = p[1] if p[1] is not None else [] def p_argnames(self, p): p[0] = [p[1]] if len(p) == 2 else p[1]+ [p[3]] def p_identifiers(self, p): p[0] = [p[1]] if len(p) == 2 else p[1]+ [p[3]] def p_identifier(self, p): p[0] = ast.Id(ident=p[1], debug_info=self.debug_info(p, 1)) def p_string(self, p): p[0] = p[1] if len(p) == 2 else p[1] + p[2] def p_value(self, p): p[0] = ast.Value(value=p[1], debug_info=self.debug_info(p, 1)) def p_bool(self, p): p[0] = bool(p.slice[1].type == 'TRUE') def p_null(self, p): p[0] = None def p_dictionary(self, p): p[0] = dict(p[2]) def p_members(self, p): p[0] = p[1] + [p[2]] if p[1] is not None else [] def p_member(self, p): p[0] = (p[1], p[3].value)
Apache License 2.0
amadeusitgroup/graphdash
graphdash/flask_utils.py
dns_resolve
python
def dns_resolve(ip_addr): try: name = socket.gethostbyaddr(ip_addr)[0] except (socket.herror, socket.gaierror): name = None return name
Safe DNS query.
https://github.com/amadeusitgroup/graphdash/blob/20e3c69e6209b4f6427d6cca078808bf20ba7709/graphdash/flask_utils.py#L38-L46
from functools import wraps from datetime import datetime import socket from flask import current_app, request, make_response def after_request_log(response): name = dns_resolve(request.remote_addr) current_app.logger.warn(u"""[client {ip} {host}] {http} "{method} {path}" {status} Request: {method} {path} Version: {http} Status: {status} Url: {url} IP: {ip} Hostname: {host} Agent: {agent_platform} | {agent_browser} | {agent_browser_version} Raw Agent: {agent} """.format(method=request.method, path=request.path, url=request.url, ip=request.remote_addr, host=name if name is not None else '?', agent_platform=request.user_agent.platform, agent_browser=request.user_agent.browser, agent_browser_version=request.user_agent.version, agent=request.user_agent.string, http=request.environ.get('SERVER_PROTOCOL'), status=response.status)) return response
Apache License 2.0
voxel51/eta
eta/core/module.py
ModuleMetadata.has_output
python
def has_output(self, name): return name in self.outputs
Returns True/False if the module has an output `name`.
https://github.com/voxel51/eta/blob/e51510fda0722ac7cadb17b109bad413a6602ed3/eta/core/module.py#L607-L609
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from builtins import * from future.utils import iteritems from collections import OrderedDict from glob import glob import logging import os import eta from eta.core.config import Config, ConfigError, Configurable from eta.core.diagram import HasBlockDiagram, BlockdiagModule import eta.core.logging as etal import eta.core.serial as etas import eta.core.types as etat import eta.core.utils as etau logger = logging.getLogger(__name__) def run(module_name, module_config_or_path): if etau.is_str(module_config_or_path): return _run(module_name, module_config_or_path) with etau.TempDir() as d: module_config_path = os.path.join(d, "config.json") etas.write_json(module_config_or_path, module_config_path) return _run(module_name, module_config_path) def _run(module_name, module_config_path): module_exe = find_exe(module_name) args = ["python", module_exe, module_config_path] return etau.call(args) def load_all_metadata(): return {k: _load_metadata(v) for k, v in iteritems(find_all_metadata())} def load_metadata(module_name): return _load_metadata(find_metadata(module_name)) def _load_metadata(config): metadata = ModuleMetadata.from_json(config) name = os.path.splitext(os.path.basename(config))[0] if metadata.info.name != name: raise ModuleMetadataError( "Name '%s' from ModuleMetadata must match module name '%s'" % (metadata.info.name, name) ) return metadata def find_all_metadata(): d = {} mdirs = etau.make_search_path(eta.config.module_dirs) for mdir in mdirs: for path in glob(os.path.join(mdir, "*.json")): name = os.path.splitext(os.path.basename(path))[0] if name not in d: d[name] = path else: logger.debug( "Module '%s' already exists; ignoring %s", name, path ) return d def find_metadata(module_name): try: return find_all_metadata()[module_name] except KeyError: raise ModuleMetadataError("Could not find module '%s'" % module_name) def find_exe(module_name=None, module_metadata=None): if module_metadata is None: module_metadata = load_metadata(module_name) meta_path = find_metadata(module_metadata.info.name) exe_path = os.path.join( os.path.dirname(meta_path), module_metadata.info.exe ) if not os.path.isfile(exe_path): raise ModuleMetadataError( "Could not find module executable '%s'" % exe_path ) return exe_path def setup(module_config, pipeline_config_path=None): if pipeline_config_path: from eta.core.pipeline import PipelineConfig pipeline_config = PipelineConfig.from_json(pipeline_config_path) module_config.base.eta_config.update(pipeline_config.eta_config) module_config.base.logging_config = pipeline_config.logging_config etal.custom_setup(module_config.base.logging_config) eta.set_config_settings(**module_config.base.eta_config) class BaseModuleConfig(Config): def __init__(self, d): self.base = self.parse_object( d, "base", BaseModuleConfigSettings, default=None ) if self.base is None: self.base = BaseModuleConfigSettings.default() class BaseModuleConfigSettings(Config): def __init__(self, d): self.eta_config = self.parse_dict(d, "eta_config", default={}) self.logging_config = self.parse_object( d, "logging_config", etal.LoggingConfig, default=etal.LoggingConfig.default(), ) class ModuleMetadataConfig(Config): def __init__(self, d): self.info = self.parse_object(d, "info", ModuleInfoConfig) self.inputs = self.parse_object_array(d, "inputs", ModuleInputConfig) self.outputs = self.parse_object_array( d, "outputs", ModuleOutputConfig ) self.parameters = self.parse_object_array( d, "parameters", ModuleParameterConfig ) def attributes(self): return ["info", "inputs", "outputs", "parameters"] class ModuleInfoConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.version = self.parse_string(d, "version") self.description = self.parse_string(d, "description") self.exe = self.parse_string(d, "exe") def attributes(self): return ["name", "type", "version", "description", "exe"] class ModuleInputConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.description = self.parse_string(d, "description") self.required = self.parse_bool(d, "required", default=True) def attributes(self): return ["name", "type", "description", "required"] class ModuleOutputConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.description = self.parse_string(d, "description") self.required = self.parse_bool(d, "required", default=True) def attributes(self): return ["name", "type", "description", "required"] class ModuleParameterConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.description = self.parse_string(d, "description") self.required = self.parse_bool(d, "required", default=True) if not self.required: self.default = self.parse_raw(d, "default") elif "default" in d: raise ConfigError( "Module parameter '%s' is required, so it should not have a " "default value" % self.name ) def attributes(self): attrs = ["name", "type", "description", "required"] if not self.required: attrs.append("default") return attrs class ModuleInfo(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.type) self.version = config.version self.description = config.description self.exe = config.exe @staticmethod def _parse_type(type_str): type_ = etat.parse_type(type_str) if not etat.is_module(type_): raise ModuleMetadataError( "'%s' is not a valid module type" % type_ ) return type_ class ModuleInput(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.type) self.description = config.description self.required = config.required def is_valid_path(self, path): return self.type.is_valid_path(path) @property def is_required(self): return self.required def _parse_type(self, type_str): type_ = etat.parse_type(type_str) if not etat.is_data(type_): raise ModuleMetadataError( ( "Module input '%s' has type '%s' but must be a subclass " "of Data" ) % (self.name, type_) ) return type_ class ModuleOutput(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.type) self.description = config.description self.required = config.required def is_valid_path(self, path): return self.type.is_valid_path(path) @property def is_required(self): return self.required def _parse_type(self, type_str): type_ = etat.parse_type(type_str) if not etat.is_concrete_data(type_): raise ModuleMetadataError( ( "Module output '%s' has type '%s' but must be a subclass " "of ConcreteData" ) % (self.name, type_) ) return type_ class ModuleParameter(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.name, config.type) self.description = config.description self.required = config.required if not self.required: self._default = config.default self._validate_default() def is_valid_value(self, val): if self.is_builtin: return self.type.is_valid_value(val) return self.type.is_valid_path(val) @property def is_required(self): return self.required @property def is_builtin(self): return etat.is_builtin(self.type) @property def is_data(self): return etat.is_data(self.type) @property def default_value(self): if self.is_required: raise ModuleMetadataError( "Module parameter '%s' is required, so it has no default " "value" % self.name ) return self._default @staticmethod def _parse_type(name, type_str): type_ = etat.parse_type(type_str) if not etat.is_builtin(type_) and not etat.is_concrete_data(type_): raise ModuleMetadataError( "Module parameter '%s' has type '%s' but must be a subclass " "of Builtin or ConcreteData" % (name, type_) ) return type_ def _validate_default(self): if self._default is None: is_valid = True elif self.is_builtin: is_valid = self.type.is_valid_value(self._default) else: is_valid = self.type.is_valid_path(self._default) if not is_valid: raise ModuleMetadataError( "Default value '%s' is invalid for module parameter '%s' of " "'%s'" % (self._default, self.name, self.type) ) class ModuleMetadata(Configurable, HasBlockDiagram): def __init__(self, config): self.validate(config) self.config = config self.info = None self.inputs = OrderedDict() self.outputs = OrderedDict() self.parameters = OrderedDict() self._parse_metadata(config) def has_input(self, name): return name in self.inputs
Apache License 2.0
purestorage-openconnect/py-pure-client
pypureclient/flasharray/FA_2_2/configuration.py
Configuration.to_debug_report
python
def to_debug_report(self): return "Python SDK Debug Report:\n" "OS: {env}\n" "Python Version: {pyversion}\n" "Version of the API: 2.2\n" "SDK Package Version: 2.2". format(env=sys.platform, pyversion=sys.version)
Gets the essential information for debugging. :return: The report for debugging.
https://github.com/purestorage-openconnect/py-pure-client/blob/2d9fdef0b73321cea9613e7d1eb881b42845099b/pypureclient/flasharray/FA_2_2/configuration.py#L225-L235
from __future__ import absolute_import import copy import logging import multiprocessing import sys import urllib3 import six from six.moves import http_client as httplib class TypeWithDefault(type): def __init__(cls, name, bases, dct): super(TypeWithDefault, cls).__init__(name, bases, dct) cls._default = None def __call__(cls): if cls._default is None: cls._default = type.__call__(cls) return copy.copy(cls._default) def set_default(cls, default): cls._default = copy.copy(default) class Configuration(six.with_metaclass(TypeWithDefault, object)): def __init__(self): self.host = "https://localhost" self.temp_folder_path = None self.api_key = {} self.api_key_prefix = {} self.username = "" self.password = "" self.logger = {} self.logger["package_logger"] = logging.getLogger("pypureclient.flasharray.FA_2_2") self.logger["urllib3_logger"] = logging.getLogger("urllib3") self.logger_format = '%(asctime)s %(levelname)s %(message)s' self.logger_stream_handler = None self.logger_file_handler = None self.logger_file = None self.debug = False self.verify_ssl = True self.ssl_ca_cert = None self.cert_file = None self.key_file = None self.assert_hostname = None self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 self.proxy = None self.safe_chars_for_path_param = '' @property def logger_file(self): return self.__logger_file @logger_file.setter def logger_file(self, value): self.__logger_file = value if self.__logger_file: self.logger_file_handler = logging.FileHandler(self.__logger_file) self.logger_file_handler.setFormatter(self.logger_formatter) for _, logger in six.iteritems(self.logger): logger.addHandler(self.logger_file_handler) if self.logger_stream_handler: logger.removeHandler(self.logger_stream_handler) else: self.logger_stream_handler = logging.StreamHandler() self.logger_stream_handler.setFormatter(self.logger_formatter) for _, logger in six.iteritems(self.logger): logger.addHandler(self.logger_stream_handler) if self.logger_file_handler: logger.removeHandler(self.logger_file_handler) @property def debug(self): return self.__debug @debug.setter def debug(self, value): self.__debug = value if self.__debug: for _, logger in six.iteritems(self.logger): logger.setLevel(logging.DEBUG) httplib.HTTPConnection.debuglevel = 1 else: for _, logger in six.iteritems(self.logger): logger.setLevel(logging.WARNING) httplib.HTTPConnection.debuglevel = 0 @property def logger_format(self): return self.__logger_format @logger_format.setter def logger_format(self, value): self.__logger_format = value self.logger_formatter = logging.Formatter(self.__logger_format) def get_api_key_with_prefix(self, identifier): if (self.api_key.get(identifier) and self.api_key_prefix.get(identifier)): return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] elif self.api_key.get(identifier): return self.api_key[identifier] def get_basic_auth_token(self): return urllib3.util.make_headers( basic_auth=self.username + ':' + self.password ).get('authorization') def auth_settings(self): return { }
BSD 2-Clause Simplified License
bvbohnen/x4_customizer
Plugins/GUI/VFS_Window/VFS_Tree_Model.py
VFS_Tree_Model.Gen_Child_Q_Items
python
def Gen_Child_Q_Items(self, vfs_or_q_item): if isinstance(vfs_or_q_item, VFS_Item): vfs_item = vfs_or_q_item q_item = None else: vfs_item = vfs_or_q_item.vfs_item q_item = vfs_or_q_item if vfs_item in self.q_expanded_vfs_items: return [] self.q_expanded_vfs_items.append(vfs_item) child_q_items = vfs_item.Get_Child_Q_Items( include_folders = True, base_q_item = q_item ) for child in child_q_items: self.path_q_item_dict[child.vfs_item.virtual_path] = child return child_q_items
For a given vfs_item or q_item, tell it to generated its q_item children, and record the generated nodes. Does nothing if the vfs_item has already been expanded. (Used to delay child q item generation, to avoid startup slowdown.) Returns any new child q_items.
https://github.com/bvbohnen/x4_customizer/blob/6f865008690916a66a44c97331d9a2692baedb35/Plugins/GUI/VFS_Window/VFS_Tree_Model.py#L57-L90
from collections import OrderedDict from time import time from Framework import Settings from PyQt5 import QtWidgets from PyQt5.QtGui import QStandardItemModel, QStandardItem from PyQt5.QtCore import QItemSelectionModel from .VFS_Item import VFS_Item class VFS_Tree_Model(QStandardItemModel): def __init__(self, window, qt_view): super().__init__(window) self.path_item_dict = None self.path_q_item_dict = {} self.q_expanded_vfs_items = [] self.last_selected_virtual_path = None self.qt_view = qt_view self.window = window self.qt_view .setModel(self) self.qt_view.selectionModel().selectionChanged.connect( self.Handle_selectionChanged) self.qt_view.expanded.connect( self.Handle_expanded) return
MIT License
pytlab/catplot
catplot/canvas.py
Canvas._set_axes
python
def _set_axes(self): self.axes = self.figure.add_subplot(111, facecolor=self.facecolor) if self.edgecolor: for child in self.axes.get_children(): if isinstance(child, Spine): child.set_color(self.edgecolor) if self.x_ticks is not None: self.axes.set_xticks(self.x_ticks) if self.y_ticks is not None: self.axes.set_yticks(self.y_ticks)
Set some essential attributes of axes in canvas. We put these attribute settings here for code reuse.
https://github.com/pytlab/catplot/blob/63ad46218b17d5cdffdd026dad7d775cf4caa50b/catplot/canvas.py#L72-L90
import logging from collections import namedtuple import uuid import numpy as np import matplotlib.pyplot as plt from matplotlib.spines import Spine import catplot.descriptors as dc from catplot.grid_components.edges import GridEdge, Arrow2D from catplot.grid_components.nodes import GridNode class Canvas(object): margin_ratio = dc.MarginRatio("margin_ratio") def __init__(self, **kwargs): self.margin_ratio = kwargs.pop("margin_ratio", 0.1) self.figsize = kwargs.pop("figsize", None) self.dpi = kwargs.pop("dpi", None) self.facecolor = kwargs.pop("facecolor", None) self.edgecolor = kwargs.pop("edgecolor", None) self.x_ticks = kwargs.pop("x_ticks", None) self.y_ticks = kwargs.pop("y_ticks", None) self.figure = plt.figure(figsize=self.figsize, dpi=self.dpi) self._logger = logging.getLogger(self.__class__.__name__) self._logger.setLevel(logging.INFO) formatter = logging.Formatter("%(name)s %(levelname)-8s %(message)s") handler = logging.StreamHandler() handler.setLevel(logging.INFO) handler.setFormatter(formatter) self._logger.addHandler(handler)
MIT License
cmberryau/pypowerbi
pypowerbi/datasets.py
Datasets.tables_from_get_tables_response
python
def tables_from_get_tables_response(cls, response): response_dict = json.loads(response.text) tables = [] for entry in response_dict[cls.get_datasets_value_key]: tables.append(Table.from_dict(entry)) return tables
Creates a list of tables from a http response object :param response: The http response object :return: A list of tables created from the given http response object
https://github.com/cmberryau/pypowerbi/blob/dbee379775fb2889cf3f1dff4267cc91af623e1a/pypowerbi/datasets.py#L542-L555
import requests import json from pypowerbi.utils import convert_datetime_fields from requests.exceptions import HTTPError from .dataset import * class Datasets: groups_snippet = 'groups' datasets_snippet = 'datasets' tables_snippet = 'tables' rows_snippet = 'rows' parameters_snippet = 'parameters' set_parameters_snippet = 'Default.UpdateParameters' bind_gateway_snippet = 'Default.BindToGateway' refreshes_snippet = 'refreshes' refresh_schedule_snippet = 'refreshSchedule' get_datasets_value_key = 'value' def __init__(self, client): self.client = client self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' def count(self, group_id=None): return len(self.get_datasets(group_id)) def has_dataset(self, dataset_id, group_id=None): datasets = self.get_datasets(group_id) for dataset in datasets: if dataset.id == str(dataset_id): return True return False def get_datasets(self, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') return self.datasets_from_get_datasets_response(response) def get_dataset(self, dataset_id, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') return Dataset.from_dict(json.loads(response.text)) def post_dataset(self, dataset, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}' headers = self.client.auth_header json_dict = DatasetEncoder().default(dataset) response = requests.post(url, headers=headers, json=json_dict) if response.status_code != 201: raise HTTPError(response, f'Post Datasets request returned http code: {response.json()}') return Dataset.from_dict(json.loads(response.text)) def delete_dataset(self, dataset_id, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}' headers = self.client.auth_header response = requests.delete(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Delete Dataset request returned http error: {response.json()}') def delete_all_datasets(self, group_id=None): datasets = self.get_datasets(group_id) for dataset in datasets: self.delete_dataset(group_id, dataset.id) def get_tables(self, dataset_id, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.tables_snippet}' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') return self.tables_from_get_tables_response(response) def put_table(self, dataset_id, table_name, table, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/' f'{self.tables_snippet}/{table_name}' headers = self.client.auth_header json_dict = TableEncoder().default(table) response = requests.post(url, headers=headers, json=json_dict) if response.status_code != 200: raise HTTPError(response, f'Post row request returned http error: {response.json()}') def post_rows(self, dataset_id, table_name, rows, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/' f'{self.tables_snippet}/{table_name}/{self.rows_snippet}' headers = self.client.auth_header row_encoder = RowEncoder() json_dict = { 'rows': [row_encoder.default(x) for x in rows] } response = requests.post(url, headers=headers, json=json_dict) if response.status_code != 200: raise HTTPError(response, f'Post row request returned http error: {response.json()}') def delete_rows(self, dataset_id, table_name, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/' f'{self.tables_snippet}/{table_name}/{self.rows_snippet}' headers = self.client.auth_header response = requests.delete(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Post row request returned http error: {response.json()}') def get_dataset_parameters(self, dataset_id, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.parameters_snippet}' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Get Dataset parameters request returned http error: {response.json()}') return json.loads(response.text) def set_dataset_parameters(self, dataset_id, params, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.set_parameters_snippet}' update_details = [{"name": k, "newValue": str(v)} for k, v in params.items()] body = {"updateDetails": update_details} headers = self.client.auth_header response = requests.post(url, headers=headers, json=body) if response.status_code != 200: raise HTTPError(response, f'Setting dataset parameters failed with http error: {response.json()}') def refresh_dataset(self, dataset_id, notify_option=None, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refreshes_snippet}' headers = self.client.auth_header if notify_option is not None: json_dict = { 'notifyOption': notify_option } else: json_dict = None response = requests.post(url, headers=headers, json=json_dict) if response.status_code != 202: raise HTTPError(response, f'Refresh dataset request returned http error: {response.json()}') def get_dataset_gateway_datasources(self, dataset_id, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}{self.datasets_snippet}/{dataset_id}/datasources' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: print(url) raise HTTPError(response, f'Dataset gateway datasources request returned http error: {response.json()}') data_sources = json.loads(response.text)["value"] return data_sources def bind_dataset_gateway(self, dataset_id, gateway_id, group_id=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.bind_gateway_snippet}' body = {"gatewayObjectId": gateway_id} headers = self.client.auth_header response = requests.post(url, headers=headers, json=body) if response.status_code != 200: raise HTTPError(response, f'Binding gateway to dataset failed with http error: {response.json()}') def get_dataset_refresh_history(self, dataset_id, group_id=None, top=None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refreshes_snippet}' if top is not None: url = f'{url}?$top={top}' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: raise HTTPError(response, f'Dataset refresh history request returned http error: {response.json()}') refresh_data = json.loads(response.text)["value"] time_fields = ['startTime', 'endTime'] refresh_data = convert_datetime_fields(refresh_data, time_fields) return refresh_data def update_refresh_schedule( self, dataset_id: str, refresh_schedule: RefreshSchedule, group_id: Optional[str] = None ): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refresh_schedule_snippet}' headers = self.client.auth_header body = RefreshScheduleRequest(refresh_schedule).as_dict() response = requests.patch(url, headers=headers, json=body) if response.status_code != 200: raise HTTPError(f'Update refresh schedule request returned the following http error:{response.json()}') def get_refresh_schedule(self, dataset_id: str, group_id: Optional[str] = None): if group_id is None: groups_part = '/' else: groups_part = f'/{self.groups_snippet}/{group_id}/' url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refresh_schedule_snippet}' headers = self.client.auth_header response = requests.get(url, headers=headers) if response.status_code != 200: raise HTTPError(f'Get refresh schedule request returned the following http error:{response.json()}') return self.refresh_schedule_from_get_refresh_schedule_response(response) @classmethod def datasets_from_get_datasets_response(cls, response): response_dict = json.loads(response.text) datasets = [] for entry in response_dict[cls.get_datasets_value_key]: datasets.append(Dataset.from_dict(entry)) return datasets @classmethod
MIT License
buriburisuri/sugartensor
sugartensor/sg_layer.py
sg_aconv
python
def sg_aconv(tensor, opt): opt += tf.sg_opt(size=(3, 3), rate=2, pad='SAME') opt.size = opt.size if isinstance(opt.size, (tuple, list)) else [opt.size, opt.size] w = tf.sg_initializer.he_uniform('W', (opt.size[0], opt.size[1], opt.in_dim, opt.dim), regularizer=opt.regularizer, summary=opt.summary) b = tf.sg_initializer.constant('b', opt.dim, summary=opt.summary) if opt.bias else 0 out = tf.nn.atrous_conv2d(tensor, w, rate=opt.rate, padding=opt.pad) + b return out
r"""Applies a 2-D atrous (or dilated) convolution. Args: tensor: A 4-D `Tensor` (automatically passed by decorator). opt: size: A tuple/list of positive integers of length 2 representing `[kernel height, kernel width]`. Can be an integer if both values are the same. If not specified, (3, 3) is set automatically. rate: A positive integer. The stride with which we sample input values across the `height` and `width` dimensions. Default is 2. in_dim: A positive `integer`. The size of input dimension. dim: A positive `integer`. The size of output dimension. pad: Either `SAME` (Default) or `VALID`. bias: Boolean. If True, biases are added. regularizer: A (Tensor -> Tensor or None) function; the result of applying it on a newly created variable will be added to the collection tf.GraphKeys.REGULARIZATION_LOSSES and can be used for regularization summary: If True, summaries are added. The default is True. Returns: A `Tensor` with the same type as `tensor`.
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_layer.py#L141-L175
from __future__ import absolute_import import sugartensor as tf __author__ = 'namju.kim@kakaobrain.com' @tf.sg_layer_func def sg_bypass(tensor, opt): return tensor @tf.sg_layer_func def sg_dense(tensor, opt): w = tf.sg_initializer.he_uniform('W', (opt.in_dim, opt.dim), regularizer=opt.regularizer, summary=opt.summary) b = tf.sg_initializer.constant('b', opt.dim, summary=opt.summary) if opt.bias else 0 out = tf.matmul(tensor, w) + b return out @tf.sg_layer_func def sg_conv(tensor, opt): opt += tf.sg_opt(size=(3, 3), stride=(1, 1, 1, 1), pad='SAME') opt.size = opt.size if isinstance(opt.size, (tuple, list)) else [opt.size, opt.size] opt.stride = opt.stride if isinstance(opt.stride, (tuple, list)) else [1, opt.stride, opt.stride, 1] opt.stride = [1, opt.stride[0], opt.stride[1], 1] if len(opt.stride) == 2 else opt.stride w = tf.sg_initializer.he_uniform('W', (opt.size[0], opt.size[1], opt.in_dim, opt.dim), regularizer=opt.regularizer, summary=opt.summary) b = tf.sg_initializer.constant('b', opt.dim, summary=opt.summary) if opt.bias else 0 out = tf.nn.conv2d(tensor, w, strides=opt.stride, padding=opt.pad) + b return out @tf.sg_layer_func def sg_conv1d(tensor, opt): opt += tf.sg_opt(size=2, stride=1, pad='SAME') w = tf.sg_initializer.he_uniform('W', (opt.size, opt.in_dim, opt.dim), regularizer=opt.regularizer, summary=opt.summary) b = tf.sg_initializer.constant('b', opt.dim, summary=opt.summary) if opt.bias else 0 out = tf.nn.conv1d(tensor, w, stride=opt.stride, padding=opt.pad) + b return out @tf.sg_layer_func
MIT License
geoscienceaustralia/pyrate
utils/plot_sbas_network.py
epoch_baselines
python
def epoch_baselines(epochs, bperp, masidx, slvidx, supermaster): nifgs = len(bperp) nepochs = len(epochs) print(nifgs, "interferograms and", nepochs, "epochs in the network.") A = np.zeros((nifgs+1,nepochs)) A[0,supermaster] = 1 b = np.zeros(nifgs+1) b[1:nifgs+1] = bperp for i in range(nifgs): imas = masidx[i] islv = slvidx[i] A[i+1,imas] = -1 A[i+1,islv] = 1 x = np.linalg.lstsq(A, b, rcond=None) return x[:][0]
Determine relative perpendicular baselines of epochs from interferometric baselines INPUT: epochs list of epoch dates bperp list of interferogram absolute perpendicular baselines masidx list of master indices from get_index() slvidx list of slave indices from get_index() supermaster epoch to set relative bperp to zero (integer) OUTPUT: epochbperp list of epoch relative perpendicular baselines
https://github.com/geoscienceaustralia/pyrate/blob/c2260b9fddaa86d2e561dca24fc422ac19faf64f/utils/plot_sbas_network.py#L121-L159
import rasterio import glob import numpy as np import matplotlib.pyplot as plt import matplotlib.dates as mdates import os, sys from mpl_toolkits.axes_grid1 import make_axes_locatable from datetime import datetime, timedelta print('') if len(sys.argv) != 2: print('Exiting: Provide path to <PyRate outdir> as command line argument') print('') print('Usage: python3 utils/plot_time_series.py <path to PyRate outdir>') exit() else: path = sys.argv[1] print(f"Looking for PyRate products in: {path}") def readtif(tifname: str): print(f"Reading file: {tifname}") with rasterio.open(tifname) as src: md = src.tags() return md def plot_baseline_time_sbas(epochs, Bperps, epoch1, epoch2, filename): fig = plt.figure() ax1 = fig.add_subplot(111) divider = make_axes_locatable(ax1) for n, m in zip(epoch1, epoch2): x = [epochs[n], epochs[m]] y = [Bperps[n], Bperps[m]] ax1.plot_date(x, y, xdate=True, ydate=False, linestyle='-', color = 'r', linewidth=1.0) ax1.plot_date(epochs, Bperps, xdate=True, ydate=False, marker="o", markersize=14, markerfacecolor="black", linestyle="None") labels = [i+1 for i in range(len(Bperps))] for a, b, c in zip(epochs, Bperps, labels): ax1.text(a, b, c, color="white", ha="center", va="center", size=9, weight="bold") years = mdates.MonthLocator(bymonth=[1, 7]) months = mdates.MonthLocator() yearsFmt = mdates.DateFormatter("%Y-%m-%d") ax1.xaxis.set_major_locator(years) ax1.xaxis.set_major_formatter(yearsFmt) ax1.xaxis.set_minor_locator(months) date_min = epochs.min() date_max = epochs.max() date_range = date_max - date_min date_add = date_range.days/15 ax1.set_xlim(date_min - timedelta(days=date_add), date_max + timedelta(days=date_add)) Bperp_min = min(Bperps) Bperp_max = max(Bperps) Bperp_range = Bperp_max - Bperp_min ax1.set_ylim(Bperp_min - Bperp_range/15, Bperp_max + Bperp_range/15) ax1.set_xlabel("Date (YYYY-MM-DD)") ax1.set_ylabel("Perpendicular Baseline (m)") ax1.grid(True) fig.autofmt_xdate() plt.savefig(filename, orientation="landscape", transparent=False, format="png") return
Apache License 2.0
mlbench/mlbench-core
mlbench_core/dataset/nlp/pytorch/wmt16/wmt16_tokenizer.py
WMT16Tokenizer.segment
python
def segment(self, line): line = line.strip().split() entry = [self.tok2idx[i] for i in line] entry = [wmt16_config.BOS] + entry + [wmt16_config.EOS] return entry
Tokenizes single sentence and adds special BOS and EOS tokens. :param line: sentence returns: list representing tokenized sentence
https://github.com/mlbench/mlbench-core/blob/4fd3c7e6f1a5be69e52383ab2eb64cad257218c2/mlbench_core/dataset/nlp/pytorch/wmt16/wmt16_tokenizer.py#L77-L88
import os from collections import defaultdict from functools import partial import torch from mlbench_core.dataset.nlp.pytorch.wmt16 import wmt16_config def _pad_vocabulary(vocab, math): if math == "fp16": pad = 8 elif math == "fp32": pad = 1 else: raise NotImplementedError() vocab_size = len(vocab) padded_vocab_size = (vocab_size + pad - 1) // pad * pad for i in range(0, padded_vocab_size - vocab_size): token = f"madeupword{i:04d}" vocab.append(token) assert len(vocab) % pad == 0 return vocab class WMT16Tokenizer: def __init__( self, base_dir, math_precision=None, separator="@@", ): self.separator = separator vocab = [ wmt16_config.PAD_TOKEN, wmt16_config.UNK_TOKEN, wmt16_config.BOS_TOKEN, wmt16_config.EOS_TOKEN, ] vocab_fname = os.path.join(base_dir, wmt16_config.VOCAB_FNAME) with open(vocab_fname, encoding="utf-8") as vfile: for line in vfile: vocab.append(line.strip()) vocab = _pad_vocabulary(vocab, math_precision) self.vocab_size = len(vocab) self.tok2idx = defaultdict(partial(int, wmt16_config.UNK)) for idx, token in enumerate(vocab): self.tok2idx[token] = idx self.idx2tok = {} for key, value in self.tok2idx.items(): self.idx2tok[value] = key
Apache License 2.0
rwl/muntjac
muntjac/ui/tree_table.py
TreeTable.setAnimationsEnabled
python
def setAnimationsEnabled(self, animationsEnabled): self._animationsEnabled = animationsEnabled self.requestRepaint()
Animations can be enabled by passing true to this method. Currently expanding rows slide in from the top and collapsing rows slide out the same way. NOTE! not supported in Internet Explorer 6 or 7. @param animationsEnabled true or false whether to enable animations or not.
https://github.com/rwl/muntjac/blob/8db97712edd81b4d25deaaa48587d2a08010f2c8/muntjac/ui/tree_table.py#L465-L474
import logging from muntjac.ui.tree import ExpandEvent, IExpandListener, CollapseEvent, ICollapseListener, COLLAPSE_METHOD, EXPAND_METHOD from muntjac.ui.treetable.hierarchical_container_ordered_wrapper import HierarchicalContainerOrderedWrapper from muntjac.data.container import IOrdered, IHierarchical from muntjac.terminal.gwt.client.ui.v_tree_table import VTreeTable from muntjac.ui.table import Table from muntjac.data.util.container_hierarchical_wrapper import ContainerHierarchicalWrapper from muntjac.ui.treetable.collapsible import ICollapsible from muntjac.data.util.hierarchical_container import HierarchicalContainer logger = logging.getLogger(__name__) class TreeTable(Table, IHierarchical): def __init__(self, caption=None, dataSource=None): if dataSource is None: dataSource = HierarchicalContainer() super(TreeTable, self).__init__(caption, dataSource) self._cStrategy = None self._focusedRowId = None self._hierarchyColumnId = None self._toggledItemId = None self._animationsEnabled = None self._clearFocusedRowPending = None def getContainerStrategy(self): if self._cStrategy is None: if isinstance(self.getContainerDataSource(), ICollapsible): self._cStrategy = CollapsibleStrategy(self) else: self._cStrategy = HierarchicalStrategy(self) return self._cStrategy def paintRowAttributes(self, target, itemId): super(TreeTable, self).paintRowAttributes(target, itemId) depth = self.getContainerStrategy().getDepth(itemId) target.addAttribute('depth', depth) if self.getContainerDataSource().areChildrenAllowed(itemId): target.addAttribute('ca', True) isOpen = self.getContainerStrategy().isNodeOpen(itemId) target.addAttribute('open', isOpen) def paintRowIcon(self, target, cells, indexInRowbuffer): if self.getRowHeaderMode() == self.ROW_HEADER_MODE_HIDDEN: cell = cells[self.CELL_ITEMID][indexInRowbuffer] itemIcon = self.getItemIcon(cell) if itemIcon is not None: target.addAttribute('icon', itemIcon) elif cells[self.CELL_ICON][indexInRowbuffer] is not None: cell = cells[self.CELL_ICON][indexInRowbuffer] target.addAttribute('icon', cell) def changeVariables(self, source, variables): super(TreeTable, self).changeVariables(source, variables) if 'toggleCollapsed' in variables: obj = variables.get('toggleCollapsed') itemId = self.itemIdMapper.get(obj) self._toggledItemId = itemId self.toggleChildVisibility(itemId) if 'selectCollapsed' in variables: if self.isSelectable(): self.select(itemId) elif 'focusParent' in variables: key = variables.get('focusParent') refId = self.itemIdMapper.get(key) itemId = self.getParent(refId) self.focusParent(itemId) def focusParent(self, itemId): inView = False inPageId = self.getCurrentPageFirstItemId() i = 0 while inPageId is not None and i < self.getPageLength(): if inPageId == itemId: inView = True break inPageId = self.nextItemId(inPageId) i += 1 if not inView: self.setCurrentPageFirstItemId(itemId) if self.isSelectable(): if self.isMultiSelect(): self.setValue([itemId]) else: self.setValue(itemId) self.setFocusedRow(itemId) def setFocusedRow(self, itemId): self._focusedRowId = itemId if self._focusedRowId is None: self._clearFocusedRowPending = True self.requestRepaint() def paintContent(self, target): if self._focusedRowId is not None: row = self.itemIdMapper.key(self._focusedRowId) target.addAttribute('focusedRow', row) self._focusedRowId = None elif self._clearFocusedRowPending: target.addAttribute('clearFocusPending', True) self._clearFocusedRowPending = False target.addAttribute('animate', self._animationsEnabled) if self._hierarchyColumnId is not None: visibleColumns2 = self.getVisibleColumns() for i in range(len(visibleColumns2)): obj = visibleColumns2[i] if self._hierarchyColumnId == obj: ahci = VTreeTable.ATTRIBUTE_HIERARCHY_COLUMN_INDEX target.addAttribute(ahci, i) break super(TreeTable, self).paintContent(target) self._toggledItemId = None def isPartialRowUpdate(self): return self._toggledItemId is not None def getFirstAddedItemIndex(self): return self.indexOfId(self._toggledItemId) + 1 def getAddedRowCount(self): ds = self.getContainerDataSource() return self.countSubNodesRecursively(ds, self._toggledItemId) def countSubNodesRecursively(self, hc, itemId): count = 0 if (self.getContainerStrategy().isNodeOpen(itemId) or (itemId == self._toggledItemId)): children = hc.getChildren(itemId) if children is not None: count += len(children) if children is not None else 0 for idd in children: count += self.countSubNodesRecursively(hc, idd) return count def getFirstUpdatedItemIndex(self): return self.indexOfId(self._toggledItemId) def getUpdatedRowCount(self): return 1 def shouldHideAddedRows(self): return not self.getContainerStrategy().isNodeOpen(self._toggledItemId) def toggleChildVisibility(self, itemId): self.getContainerStrategy().toggleChildVisibility(itemId) idx = self.getCurrentPageFirstItemIndex() self.setCurrentPageFirstItemIndex(idx, False) self.requestRepaint() if self.isCollapsed(itemId): self.fireCollapseEvent(itemId) else: self.fireExpandEvent(itemId) def size(self): return len(self.getContainerStrategy()) def __len__(self): return self.size() def getContainerDataSource(self): return super(TreeTable, self).getContainerDataSource() def setContainerDataSource(self, newDataSource): self._cStrategy = None if not isinstance(newDataSource, IHierarchical): newDataSource = ContainerHierarchicalWrapper(newDataSource) if not isinstance(newDataSource, IOrdered): newDataSource = HierarchicalContainerOrderedWrapper(newDataSource) super(TreeTable, self).setContainerDataSource(newDataSource) def containerItemSetChange(self, event): self._toggledItemId = None self.getContainerStrategy().containerItemSetChange(event) super(TreeTable, self).containerItemSetChange(event) def getIdByIndex(self, index): return self.getContainerStrategy().getIdByIndex(index) def indexOfId(self, itemId): return self.getContainerStrategy().indexOfId(itemId) def nextItemId(self, itemId): return self.getContainerStrategy().nextItemId(itemId) def lastItemId(self): return self.getContainerStrategy().lastItemId() def prevItemId(self, itemId): return self.getContainerStrategy().prevItemId(itemId) def isLastId(self, itemId): return self.getContainerStrategy().isLastId(itemId) def getItemIds(self): return self.getContainerStrategy().getItemIds() def areChildrenAllowed(self, itemId): return self.getContainerDataSource().areChildrenAllowed(itemId) def getChildren(self, itemId): return self.getContainerDataSource().getChildren(itemId) def getParent(self, itemId=None): if itemId is not None: return self.getContainerDataSource().getParent(itemId) else: super(TreeTable, self).getParent() def hasChildren(self, itemId): return self.getContainerDataSource().hasChildren(itemId) def isRoot(self, itemId): return self.getContainerDataSource().isRoot(itemId) def rootItemIds(self): return self.getContainerDataSource().rootItemIds() def setChildrenAllowed(self, itemId, areChildrenAllowed): return self.getContainerDataSource().setChildrenAllowed(itemId, areChildrenAllowed) def setParent(self, itemId, newParentId): return self.getContainerDataSource().setParent(itemId, newParentId) def setCollapsed(self, itemId, collapsed): if self.isCollapsed(itemId) != collapsed: self.toggleChildVisibility(itemId) def isCollapsed(self, itemId): return not self.getContainerStrategy().isNodeOpen(itemId) def setHierarchyColumn(self, hierarchyColumnId): self._hierarchyColumnId = hierarchyColumnId def getHierarchyColumnId(self): return self._hierarchyColumnId def addListener(self, listener, iface=None): if (isinstance(listener, ICollapseListener) and (iface is None or issubclass(iface, ICollapseListener))): self.registerListener(CollapseEvent, listener, COLLAPSE_METHOD) if (isinstance(listener, IExpandListener) and (iface is None or issubclass(iface, IExpandListener))): self.registerListener(ExpandEvent, listener, EXPAND_METHOD) super(TreeTable, self).addListener(listener, iface) def addCallback(self, callback, eventType=None, *args): if eventType is None: eventType = callback._eventType if issubclass(eventType, CollapseEvent): self.registerCallback(CollapseEvent, callback, None, *args) elif issubclass(eventType, ExpandEvent): self.registerCallback(ExpandEvent, callback, None, *args) else: super(TreeTable, self).addCallback(callback, eventType, *args) def removeListener(self, listener, iface=None): if (isinstance(listener, ICollapseListener) and (iface is None or issubclass(iface, ICollapseListener))): self.withdrawListener(CollapseEvent, listener, COLLAPSE_METHOD) if (isinstance(listener, IExpandListener) and (iface is None or issubclass(iface, IExpandListener))): self.withdrawListener(ExpandEvent, listener, EXPAND_METHOD) super(TreeTable, self).removeListener(listener, iface) def removeCallback(self, callback, eventType=None): if eventType is None: eventType = callback._eventType if issubclass(eventType, CollapseEvent): self.withdrawCallback(CollapseEvent, callback) elif issubclass(eventType, ExpandEvent): self.withdrawCallback(ExpandEvent, callback) else: super(TreeTable, self).removeCallback(callback, eventType) def fireExpandEvent(self, itemId): evt = ExpandEvent(self, itemId) self.fireEvent(evt) def fireCollapseEvent(self, itemId): evt = CollapseEvent(self, itemId) self.fireEvent(evt) def isAnimationsEnabled(self): return self._animationsEnabled
Apache License 2.0
kokeshii/rsvpbot
rsvp.py
RSVP.get_this_event
python
def get_this_event(self, message): event_id = self.event_id(message) return self.events.get(event_id)
Returns the event relevant to this Zulip thread.
https://github.com/kokeshii/rsvpbot/blob/bb7f02c62cee6155f46829d834330a767ec9b945/rsvp.py#L49-L52
from __future__ import with_statement import re import json import rsvp_commands from strings import ERROR_INVALID_COMMAND class RSVP(object): def __init__(self, key_word, backend): self.backend = backend self.key_word = key_word self.command_list = ( rsvp_commands.RSVPInitCommand(key_word), rsvp_commands.RSVPHelpCommand(key_word), rsvp_commands.RSVPCancelCommand(key_word), rsvp_commands.RSVPMoveCommand(key_word), rsvp_commands.RSVPSetLimitCommand(key_word), rsvp_commands.RSVPSetDateCommand(key_word), rsvp_commands.RSVPSetTimeCommand(key_word), rsvp_commands.RSVPSetTimeAllDayCommand(key_word), rsvp_commands.RSVPSetStringAttributeCommand(key_word), rsvp_commands.RSVPSummaryCommand(key_word), rsvp_commands.RSVPPingCommand(key_word), rsvp_commands.RSVPCreditsCommand(key_word), rsvp_commands.RSVPCreateCalendarEventCommand(key_word), rsvp_commands.RSVPSetDurationCommand(key_word), rsvp_commands.RSVPConfirmCommand(key_word) ) self.events = self.backend.get_all_events() def commit_events(self): self.backend.commit_events(self.events) def __exit__(self, type, value, traceback): self.commit_events()
MIT License
probcomp/cgpm
src/utils/sampling.py
mh_sample
python
def mh_sample( x, logpdf_target, jump_std, D, num_samples=1, burn=1, lag=1, rng=None): assert D[0] <= x <= D[1] if rng is None: rng = gu.gen_rng() num_collected = 0 iters = 0 samples = [] t_samples = num_samples * lag + burn checkevery = max(20, int(t_samples/100.0)) accepted = 0.0 acceptance_rate = 0.0 iters = 1.0 aiters = 1.0 log_correction = lambda x, x_prime, jstd: 0 if D[0] == 0 and D[1] == 1: def jumpfun(x, jstd): x_prime = fabs(rng.normal(x, jstd)) if x_prime > 1.0: x_prime = x_prime % 1 return x_prime elif 0 <= D[0] and D[1] == float('inf'): jumpfun = lambda x, jstd: fabs(x + rng.normal(0.0, jstd)) else: def jumpfun(x, jstd): MAX_TRIALS = 1000 for _ in xrange(MAX_TRIALS): x_prime = rng.normal(x, jstd) if D[0] < x_prime < D[1]: return x_prime raise RuntimeError('MH failed to rejection sample the proposal.') def log_correction(x, x_prime, jstd): from scipy.stats import norm if D[0] == float('inf') and D[1] == float('inf'): return 0 return norm.logcdf((D[1]-x)/jump_std-(D[0]-x)/jump_std) - norm.logcdf((D[1]-x_prime)/jump_std-(D[0]-x_prime)/jump_std) logp = logpdf_target(x) while num_collected < num_samples: x_prime = jumpfun(x, jump_std) assert D[0] < x_prime < D[1] logp_prime = logpdf_target(x_prime) if log(rng.rand()) < logp_prime - logp: x = x_prime logp = logp_prime accepted += 1.0 acceptance_rate = accepted/aiters if iters > burn and iters % lag == 0: num_collected += 1 samples.append(x) if iters % checkevery == 0: if acceptance_rate >= .4: jump_std *= 1.1 elif acceptance_rate <= .2: jump_std *= .9019 accepted = 0.0 acceptance_rate = 0.0 aiters = 0.0 iters += 1.0 aiters += 1.0 if num_samples == 1: return samples[0] else: return samples
Uses MH to sample from logpdf_target. Parameters ---------- x : float Seed point. logpdf_target : function(x) Evaluates the log pdf of the target distribution at x. jump_std : float Standard deviation of jump distance, auto-tunes. D : tuple<float, float> Support of the target distribution. num_samples : int, optional Number of samples to return, default 1. burn : int, optional Number of samples to discard before any are collected, default 1. lag : int, optional Number of moves between successive samples, default 1. Returns ------- samples : int or list If num_samples == 1 returns a float. Othewrise returns a `num_samples` length list. Example ------- >>> # Sample from posterior of CRP(x) with exponential(1) prior >>> x = 1.0 >>> logpdf_target = lambda x : gu.logp_crp(10, [5,3,2] , x) - x >>> jump_std = 0.5 >>> D = (0.0, float('Inf')) >>> sample = mh_sample(x logpdf_target, jump_std, D)
https://github.com/probcomp/cgpm/blob/56a481829448bddc9cdfebd42f65023287d5b7c7/src/utils/sampling.py#L25-L144
from math import fabs from math import log import numpy as np from cgpm.utils import general as gu
Apache License 2.0
ufkapano/graphs-dict
graphtheory/planarity/halin.py
HalinGraph.other_neighbor
python
def other_neighbor(self, a, b, c): neighbors = list(node for node in self._graph_copy.iteradjacent(b) if node != a and node != c) return neighbors.pop()
Return a neighbor of b which is not included in the triangle.
https://github.com/ufkapano/graphs-dict/blob/ab42c51b8fa2c4cdb8d5cfd0b7de4702824f22c9/graphtheory/planarity/halin.py#L55-L64
from graphtheory.structures.edges import Edge class HalinGraph: def __init__(self, graph): if graph.is_directed(): raise ValueError("the graph is directed") self.graph = graph self._graph_copy = self.graph.copy() self.outer = set() self.degree3 = set(node for node in self.graph.iternodes() if self.graph.degree(node) == 3) self._calls = [] def run(self): while self.degree3: node = self.degree3.pop() if (self._graph_copy.has_node(node) and self._graph_copy.v() > 4 and self._graph_copy.degree(node) == 3): a, b, c = tuple(self._graph_copy.iteradjacent(node)) self._reduce(a, node, b) self._reduce(a, node, c) self._reduce(b, node, c) if not self.is_outer_k4(): raise ValueError("not a Halin graph") self._reconstruct_cycle()
BSD 3-Clause New or Revised License
learningequality/kolibri
kolibri/core/tasks/worker.py
Worker.cancel
python
def cancel(self, job_id): try: future = self.future_job_mapping[job_id] is_future_cancelled = future.cancel() except KeyError: is_future_cancelled = True if is_future_cancelled: return True if future.running(): setattr(future, "_is_cancelled", True) return False return False
Request a cancellation from the futures executor pool. If that didn't work (because it's already running), then mark a special variable inside the future that we can check inside a special check_for_cancel function passed to the job. :param job_id: :return:
https://github.com/learningequality/kolibri/blob/1658e8407839cc9f20bfd14ecfa10b2996b2847d/kolibri/core/tasks/worker.py#L186-L210
import logging import traceback from concurrent.futures import CancelledError from kolibri.core.tasks.compat import PoolExecutor from kolibri.core.tasks.job import execute_job from kolibri.core.tasks.job import Priority from kolibri.core.tasks.storage import Storage from kolibri.core.tasks.utils import InfiniteLoopThread logger = logging.getLogger(__name__) class Worker(object): def __init__(self, connection, regular_workers=2, high_workers=1): self.job_future_mapping = {} self.future_job_mapping = {} self.storage = Storage(connection) self.regular_workers = regular_workers self.max_workers = regular_workers + high_workers self.workers = self.start_workers() self.job_checker = self.start_job_checker() def shutdown_workers(self, wait=True): job_ids = list(self.future_job_mapping.keys()) for job_id in job_ids: logger.info("Canceling job id {}.".format(job_id)) self.cancel(job_id) self.workers.shutdown(wait=wait) def start_workers(self): pool = PoolExecutor(max_workers=self.max_workers) return pool def handle_finished_future(self, future): job = self.job_future_mapping[future] del self.job_future_mapping[future] del self.future_job_mapping[job.job_id] try: result = future.result() except CancelledError: self.report_cancelled(job.job_id) return except Exception as e: if hasattr(e, "traceback"): traceback = e.traceback else: traceback = "" self.report_error(job.job_id, e, traceback) return self.report_success(job.job_id, result) def shutdown(self, wait=True): logger.info("Asking job schedulers to shut down.") self.job_checker.stop() self.shutdown_workers(wait=wait) if wait: self.job_checker.join() def start_job_checker(self): t = InfiniteLoopThread( self.check_jobs, thread_name="JOBCHECKER", wait_between_runs=0.2 ) t.start() return t def check_jobs(self): job_to_start = self.get_next_job() while job_to_start: self.start_next_job(job_to_start) job_to_start = self.get_next_job() for job in self.storage.get_canceling_jobs(): job_id = job.job_id if job_id in self.future_job_mapping: self.cancel(job_id) else: self.report_cancelled(job_id) def report_cancelled(self, job_id): self.storage.mark_job_as_canceled(job_id) def report_success(self, job_id, result): self.storage.complete_job(job_id, result=result) def report_error(self, job_id, exc, trace): trace = traceback.format_exc() logger.error("Job {} raised an exception: {}".format(job_id, trace)) self.storage.mark_job_as_failed(job_id, exc, trace) def update_progress(self, job_id, progress, total_progress, stage=""): self.storage.update_job_progress(job_id, progress, total_progress) def get_next_job(self): job = None workers_currently_busy = len(self.future_job_mapping) if workers_currently_busy < self.regular_workers: job = self.storage.get_next_queued_job() elif workers_currently_busy < self.max_workers: job = self.storage.get_next_queued_job(priority_order=[Priority.HIGH]) else: logger.debug("All workers busy.") return None return job def start_next_job(self, job): self.storage.mark_job_as_running(job.job_id) db_type_lookup = { "sqlite": "sqlite", "postgresql": "postgres", } db_type = db_type_lookup[self.storage.engine.dialect.name] future = self.workers.submit( execute_job, job_id=job.job_id, db_type=db_type, db_url=self.storage.engine.url, ) self.job_future_mapping[future] = job self.future_job_mapping[job.job_id] = future future.add_done_callback(self.handle_finished_future) return future
MIT License
qdata/textattack
textattack/search_methods/greedy_word_swap_wir.py
GreedyWordSwapWIR._get_index_order
python
def _get_index_order(self, initial_text): len_text = len(initial_text.words) if self.wir_method == "unk": leave_one_texts = [ initial_text.replace_word_at_index(i, "[UNK]") for i in range(len_text) ] leave_one_results, search_over = self.get_goal_results(leave_one_texts) index_scores = np.array([result.score for result in leave_one_results]) elif self.wir_method == "weighted-saliency": leave_one_texts = [ initial_text.replace_word_at_index(i, "[UNK]") for i in range(len_text) ] leave_one_results, search_over = self.get_goal_results(leave_one_texts) saliency_scores = np.array([result.score for result in leave_one_results]) softmax_saliency_scores = softmax( torch.Tensor(saliency_scores), dim=0 ).numpy() delta_ps = [] for idx in range(len_text): transformed_text_candidates = self.get_transformations( initial_text, original_text=initial_text, indices_to_modify=[idx], ) if not transformed_text_candidates: delta_ps.append(0.0) continue swap_results, _ = self.get_goal_results(transformed_text_candidates) score_change = [result.score for result in swap_results] if not score_change: delta_ps.append(0.0) continue max_score_change = np.max(score_change) delta_ps.append(max_score_change) index_scores = softmax_saliency_scores * np.array(delta_ps) elif self.wir_method == "delete": leave_one_texts = [ initial_text.delete_word_at_index(i) for i in range(len_text) ] leave_one_results, search_over = self.get_goal_results(leave_one_texts) index_scores = np.array([result.score for result in leave_one_results]) elif self.wir_method == "gradient": victim_model = self.get_victim_model() index_scores = np.zeros(initial_text.num_words) grad_output = victim_model.get_grad(initial_text.tokenizer_input) gradient = grad_output["gradient"] word2token_mapping = initial_text.align_with_model_tokens(victim_model) for i, word in enumerate(initial_text.words): matched_tokens = word2token_mapping[i] if not matched_tokens: index_scores[i] = 0.0 else: agg_grad = np.mean(gradient[matched_tokens], axis=0) index_scores[i] = np.linalg.norm(agg_grad, ord=1) search_over = False elif self.wir_method == "random": index_order = np.arange(len_text) np.random.shuffle(index_order) search_over = False else: raise ValueError(f"Unsupported WIR method {self.wir_method}") if self.wir_method != "random": index_order = (-index_scores).argsort() return index_order, search_over
Returns word indices of ``initial_text`` in descending order of importance.
https://github.com/qdata/textattack/blob/3f0d5290bebc8436a60869576bede9138ea34cda/textattack/search_methods/greedy_word_swap_wir.py#L37-L116
import numpy as np import torch from torch.nn.functional import softmax from textattack.goal_function_results import GoalFunctionResultStatus from textattack.search_methods import SearchMethod from textattack.shared.validators import ( transformation_consists_of_word_swaps_and_deletions, ) class GreedyWordSwapWIR(SearchMethod): def __init__(self, wir_method="unk"): self.wir_method = wir_method
MIT License
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1_job_spec.py
V1JobSpec.active_deadline_seconds
python
def active_deadline_seconds(self): return self._active_deadline_seconds
Gets the active_deadline_seconds of this V1JobSpec. # noqa: E501 Specifies the duration in seconds relative to the startTime that the job may be active before the system tries to terminate it; value must be positive integer # noqa: E501 :return: The active_deadline_seconds of this V1JobSpec. # noqa: E501 :rtype: int
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1_job_spec.py#L90-L98
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1JobSpec(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'active_deadline_seconds': 'int', 'backoff_limit': 'int', 'completions': 'int', 'manual_selector': 'bool', 'parallelism': 'int', 'selector': 'V1LabelSelector', 'template': 'V1PodTemplateSpec', 'ttl_seconds_after_finished': 'int' } attribute_map = { 'active_deadline_seconds': 'activeDeadlineSeconds', 'backoff_limit': 'backoffLimit', 'completions': 'completions', 'manual_selector': 'manualSelector', 'parallelism': 'parallelism', 'selector': 'selector', 'template': 'template', 'ttl_seconds_after_finished': 'ttlSecondsAfterFinished' } def __init__(self, active_deadline_seconds=None, backoff_limit=None, completions=None, manual_selector=None, parallelism=None, selector=None, template=None, ttl_seconds_after_finished=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._active_deadline_seconds = None self._backoff_limit = None self._completions = None self._manual_selector = None self._parallelism = None self._selector = None self._template = None self._ttl_seconds_after_finished = None self.discriminator = None if active_deadline_seconds is not None: self.active_deadline_seconds = active_deadline_seconds if backoff_limit is not None: self.backoff_limit = backoff_limit if completions is not None: self.completions = completions if manual_selector is not None: self.manual_selector = manual_selector if parallelism is not None: self.parallelism = parallelism if selector is not None: self.selector = selector self.template = template if ttl_seconds_after_finished is not None: self.ttl_seconds_after_finished = ttl_seconds_after_finished @property
Apache License 2.0
porimol/countryinfo
countryinfo/countryinfo.py
CountryInfo.all
python
def all(self): _all = self.__countries return _all
return all of the countries information :return: dict
https://github.com/porimol/countryinfo/blob/05cdf7522eff3567ae743f90042a4e1b3891a1be/countryinfo/countryinfo.py#L315-L323
from glob import glob from os.path import isfile, realpath, dirname import json from pprint import pprint class CountryInfo: def __init__(self, country_name=None): self.__country_name = country_name.lower() if country_name else '' __file_dir_path = dirname(realpath(__file__)) __country_files = __file_dir_path + '/data/' __files_path = [files for files in glob(__country_files + '*.json')] self.__countries = {} for file_path in __files_path: if isfile(file_path): with open(file_path, encoding='utf-8') as file: country_info = json.load(file) if country_info.get('name', None): self.__countries[country_info['name'].lower()] = country_info if self.__country_name in map(lambda an: an.lower(), country_info.get('altSpellings', [])): self.__country_name = country_info['name'].lower() def info(self): if self.__country_name: _all = self.__countries[self.__country_name] return _all def provinces(self): if self.__country_name: _provinces = self.__countries[self.__country_name]['provinces'] return _provinces def iso(self, alpha=None): if self.__country_name: _iso = self.__countries[self.__country_name]['ISO'] if alpha == 2: return _iso.get('alpha2') elif alpha == 3: return _iso.get('alpha3') return _iso def alt_spellings(self): if self.__country_name: try: _alt_spellings = self.__countries[self.__country_name]['altSpellings'] return _alt_spellings except KeyError: return [] def area(self): if self.__country_name: _area = self.__countries[self.__country_name]['area'] return _area def borders(self): if self.__country_name: _borders = self.__countries[self.__country_name]['borders'] return _borders def calling_codes(self): if self.__country_name: _calling_codes = self.__countries[self.__country_name]['callingCodes'] return _calling_codes def capital(self): if self.__country_name: _capital = self.__countries[self.__country_name]['capital'] return _capital def capital_latlng(self): if self.__country_name: _capital_latlng = self.__countries[self.__country_name]['capital_latlng'] return _capital_latlng def currencies(self): if self.__country_name: _currencies = self.__countries[self.__country_name]['currencies'] return _currencies def demonym(self): if self.__country_name: _demonym = self.__countries[self.__country_name]['demonym'] return _demonym def flag(self): if self.__country_name: _flag = self.__countries[self.__country_name]['flag'] return _flag def geo_json(self): if self.__country_name: _geo_json = self.__countries[self.__country_name]['geoJSON'] return _geo_json def languages(self): if self.__country_name: _languages = self.__countries[self.__country_name]['languages'] return _languages def latlng(self): if self.__country_name: _latlng = self.__countries[self.__country_name]['latlng'] return _latlng def name(self): return self.__country_name def native_name(self): if self.__country_name: _native_name = self.__countries[self.__country_name].get('nativeName') return _native_name def population(self): if self.__country_name: _population = self.__countries[self.__country_name]['population'] return _population def region(self): if self.__country_name: _region = self.__countries[self.__country_name]['region'] return _region def subregion(self): if self.__country_name: _subregion = self.__countries[self.__country_name]['subregion'] return _subregion def timezones(self): if self.__country_name: _timezones = self.__countries[self.__country_name]['timezones'] return _timezones def tld(self): if self.__country_name: _tld = self.__countries[self.__country_name]['tld'] return _tld def translations(self): if self.__country_name: try: _translations = self.__countries[self.__country_name]['translations'] return _translations except KeyError: return [] def wiki(self): if self.__country_name: _wiki = self.__countries[self.__country_name]['wiki'] return _wiki
MIT License
clawpack/visclaw
src/python/visclaw/data.py
ClawPlotData.iplotclaw
python
def iplotclaw(self): return (self._mode == 'iplotclaw')
Return True if interactive plotting with iplotclaw is being done.
https://github.com/clawpack/visclaw/blob/b5c137e86642d155e4112bf1c270ebb6f1452731/src/python/visclaw/data.py#L362-L366
from __future__ import absolute_import from __future__ import print_function import os import copy import numpy as np import re import logging import clawpack.clawutil.data as clawdata import time import clawpack.pyclaw.controller class ClawPlotData(clawdata.ClawData): def __init__(self, controller=None): super(ClawPlotData,self).__init__() if controller: controller.plotdata = self self.add_attribute('rundir',copy.copy(controller.rundir)) self.add_attribute('outdir',copy.copy(controller.outdir)) if len(controller.frames)>0: for i,frame in enumerate(controller.frames): self.framesoln_dict[str(i)] = frame self.add_attribute('format',copy.copy(controller.output_format)) else: self.add_attribute('rundir',os.getcwd()) self.add_attribute('outdir',os.getcwd()) self.add_attribute('format','ascii') self.add_attribute('output_controller', None) self.output_controller = clawpack.pyclaw.controller.OutputController( self.outdir, file_format=self.format) self.add_attribute('plotdir',os.getcwd()) self.add_attribute('overwrite',True) self.add_attribute('plotter','matplotlib') self.add_attribute('msgfile','') self.add_attribute('verbose',True) self.add_attribute('ion',False) self.add_attribute('printfigs',True) self.add_attribute('print_format','png') self.add_attribute('print_framenos','all') self.add_attribute('print_gaugenos','all') self.add_attribute('print_fignos','all') self.add_attribute('iplotclaw_fignos','all') self.add_attribute('latex',True) self.add_attribute('latex_fname','plots') self.add_attribute('latex_title','Clawpack Results') self.add_attribute('latex_framesperpage','all') self.add_attribute('latex_framesperline',2) self.add_attribute('latex_figsperline','all') self.add_attribute('latex_makepdf',False) self.add_attribute('html',True) self.add_attribute('html_index_fname','_PlotIndex.html') self.add_attribute('html_index_title','Plot Index') self.add_attribute('html_homelink',None) self.add_attribute('html_movie','JSAnimation') self.add_attribute('html_movie_width', 500) self.add_attribute('html_eagle',False) self.add_attribute('kml',False) self.add_attribute('kml_index_fname','_GoogleEarth') self.add_attribute('kml_publish',None) self.add_attribute('kml_name',"GeoClaw") self.add_attribute('kml_starttime',None) self.add_attribute('kml_tz_offset',None) self.add_attribute('kml_time_scale',1.0) self.add_attribute('kml_map_topo_to_latlong',None) self.add_attribute('kml_user_files',[]) self.add_attribute('gif_movie',False) self.add_attribute('setplot',False) self.add_attribute('mapc2p',None) self.add_attribute('beforeframe',None) self.add_attribute('afterframe',None) self.add_attribute('plotfigure_dict',{}) try: from collections import OrderedDict d = OrderedDict() except: d = {} self.add_attribute('otherfigure_dict',d) self.add_attribute('framesoln_dict',{}) self.add_attribute('gaugesoln_dict',{}) self.add_attribute('save_frames',True) self.add_attribute('save_figures',True) self.add_attribute('refresh_gauges',False) self.add_attribute('timeframes_framenos',None) self.add_attribute('timeframes_frametimes',None) self.add_attribute('timeframes_fignos',None) self.add_attribute('timeframes_fignames',None) self.add_attribute('gauges_gaugenos',None) self.add_attribute('gauges_fignos',None) self.add_attribute('gauges_fignames',None) self.add_attribute('parallel', False) self.add_attribute('num_procs', None) self.add_attribute('proc_frames', None) self.add_attribute('_parallel_todo', None) self._next_FIG = 1000 self._fignames = [] self._fignos = [] self._mode = 'unknown' self._figname_from_num = {} self._otherfignames = [] def new_plotfigure(self, name=None, figno=None, type='each_frame'): if (self._mode != 'iplotclaw') and (name in self._fignames): print('*** Warning, figure named %s has already been created' % name) if (self._mode != 'iplotclaw') and (figno in self._fignos): print('*** Warning, figure number %s has already been created' % figno) if figno is None: self._next_FIG += 1 figno = self._next_FIG if name is None: name = "FIG%s" % figno if name in self._fignames: print("*** Error in new_plotfigure: Figure name already used... ",name) raise Exception("Figure name already used") elif figno in self._fignos: print("*** Error in new_plotfigure: Figure number already used... ",figno) raise Exception("Figure number already used") self._fignames.append(name) self._fignos.append(figno) plotfigure = ClawPlotFigure(name, figno, type, self) if not self.save_figures: self.plotfigure_dict.clear() self.plotfigure_dict[name] = plotfigure self._figname_from_num[figno] = name return plotfigure def getframe(self,frameno,outdir=None,refresh=False): from clawpack.pyclaw import solution framesoln_dict = self.framesoln_dict if 0: if outdir: key = (frameno, outdir) else: key = frameno outdir = self.outdir if outdir is None: outdir = self.outdir outdir = os.path.abspath(outdir) key = (frameno, outdir) if refresh or (key not in framesoln_dict): framesoln = solution.Solution(frameno,path=outdir,file_format=self.format) if not self.save_frames: framesoln_dict.clear() framesoln_dict[key] = framesoln if key != frameno: print(' Reading Frame %s at t = %g from outdir = %s' % (frameno,framesoln.t,outdir)) else: print(' Reading Frame %s at t = %g ' % (frameno,framesoln.t)) else: framesoln = self.framesoln_dict[key] return framesoln def clearfigures(self): self.plotfigure_dict.clear() self._fignames = [] self._fignos = [] self._next_FIG = 1000 self._otherfignames = [] def clearframes(self, framenos='all'): if isinstance(framenos, int): framenos = [framenos] if framenos=='all': self.framesoln_dict.clear() print('Cleared all frames') else: for frameno in framenos: xxx = self.plotdata.framesoln_dict.pop(frameno,None) if xxx is None: print('No frame data to clear for frame ',frameno) else: print('Cleared data for frame ',frameno) def getgauge(self, gauge_id, outdir=None, verbose=True): if outdir is None: outdir = self.outdir outdir = os.path.abspath(outdir) key = (gauge_id, outdir) if self.refresh_gauges or (key not in self.gaugesoln_dict): try: import clawpack.pyclaw.gauges as gauges self.gaugesoln_dict[key] = gauges.GaugeSolution( gauge_id=gauge_id, path=outdir) if verbose: print("Read in gauge %s." % gauge_id) except Exception as e: import warnings warnings.warn(str(e)) return None return self.gaugesoln_dict[key] def plotframe(self, frameno): from clawpack.visclaw import frametools frametools.plotframe(frameno, self) def printframes(self, verbose=True): print("*** printframes is deprecated. Use plotpages.plotclaw_driver") print("*** for added capabilities.") raise DeprecationWarning("The method 'printframes' is deprecated.") def fignos(self): return self._fignos def mode(self): return self._mode
BSD 3-Clause New or Revised License
cisco-en-programmability/dnacentersdk
dnacentersdk/api/v1_3_3/software_image_management_swim.py
SoftwareImageManagementSwim.get_software_image_details
python
def get_software_image_details(self, application_type=None, created_time=None, family=None, image_integrity_status=None, image_name=None, image_series=None, image_size_greater_than=None, image_size_lesser_than=None, image_uuid=None, is_cco_latest=None, is_cco_recommended=None, is_tagged_golden=None, limit=None, name=None, offset=None, sort_by=None, sort_order='asc', version=None, headers=None, **request_parameters): check_type(headers, dict) check_type(image_uuid, basestring) check_type(name, basestring) check_type(family, basestring) check_type(application_type, basestring) check_type(image_integrity_status, basestring) check_type(version, basestring) check_type(image_series, basestring) check_type(image_name, basestring) check_type(is_tagged_golden, bool) check_type(is_cco_recommended, bool) check_type(is_cco_latest, bool) check_type(created_time, int) check_type(image_size_greater_than, int) check_type(image_size_lesser_than, int) check_type(sort_by, basestring) check_type(sort_order, basestring) check_type(limit, int) check_type(offset, int) if headers is not None: if 'X-Auth-Token' in headers: check_type(headers.get('X-Auth-Token'), basestring, may_be_none=False) _params = { 'imageUuid': image_uuid, 'name': name, 'family': family, 'applicationType': application_type, 'imageIntegrityStatus': image_integrity_status, 'version': version, 'imageSeries': image_series, 'imageName': image_name, 'isTaggedGolden': is_tagged_golden, 'isCCORecommended': is_cco_recommended, 'isCCOLatest': is_cco_latest, 'createdTime': created_time, 'imageSizeGreaterThan': image_size_greater_than, 'imageSizeLesserThan': image_size_lesser_than, 'sortBy': sort_by, 'sortOrder': sort_order, 'limit': limit, 'offset': offset, } _params.update(request_parameters) _params = dict_from_items_with_values(_params) path_params = { } with_custom_headers = False _headers = self._session.headers or {} if headers: _headers.update(dict_of_str(headers)) with_custom_headers = True e_url = ('/dna/intent/api/v1/image/importation') endpoint_full_url = apply_path_params(e_url, path_params) if with_custom_headers: json_data = self._session.get(endpoint_full_url, params=_params, headers=_headers) else: json_data = self._session.get(endpoint_full_url, params=_params) return self._object_factory('bpm_0c8f7a0b49b9aedd_v1_3_3', json_data)
Returns software image list based on a filter criteria. For example: "filterbyName = cat3k%". Args: image_uuid(basestring): imageUuid query parameter. name(basestring): name query parameter. family(basestring): family query parameter. application_type(basestring): applicationType query parameter. image_integrity_status(basestring): imageIntegrityStatus FAILURE, UNKNOWN, VERIFIED. version(basestring): software Image Version. image_series(basestring): image Series. image_name(basestring): image Name. is_tagged_golden(bool): is Tagged Golden. is_cco_recommended(bool): is recommended from cisco.com. is_cco_latest(bool): is latest from cisco.com. created_time(int): time in milliseconds (epoch format). image_size_greater_than(int): size in bytes. image_size_lesser_than(int): size in bytes. sort_by(basestring): sort results by this field. sort_order(basestring): sort order 'asc' or 'des'. Default is asc. limit(int): limit query parameter. offset(int): offset query parameter. headers(dict): Dictionary of HTTP Headers to send with the Request . **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: MyDict: JSON response. Access the object's properties by using the dot notation or the bracket notation. Raises: TypeError: If the parameter types are incorrect. MalformedRequest: If the request body created is invalid. ApiError: If the DNA Center cloud returns an error.
https://github.com/cisco-en-programmability/dnacentersdk/blob/ef2adde6113e7a6acd28a287007eb470fa39d31f/dnacentersdk/api/v1_3_3/software_image_management_swim.py#L75-L214
from __future__ import ( absolute_import, division, print_function, unicode_literals, ) from builtins import * from past.builtins import basestring from ...restsession import RestSession from ...utils import ( check_type, dict_from_items_with_values, apply_path_params, dict_of_str, ) class SoftwareImageManagementSwim(object): def __init__(self, session, object_factory, request_validator): check_type(session, RestSession) super(SoftwareImageManagementSwim, self).__init__() self._session = session self._object_factory = object_factory self._request_validator = request_validator
MIT License
vmware-samples/automotive-iot-samples
python_sample/car_edge/auto.py
Automobile.read_sensors
python
def read_sensors(self): data_dict = None if self.gps_conn: data_dict = {} line = "" while line == "": temp_line = str(self.gps_conn.readline()) if "$GPRMC" in temp_line: line = temp_line GPS_data = self.parse_gps_data(line) if not data_dict: data_dict = GPS_data if (self.obd_conn.is_connected()): if not data_dict: data_dict = {} for key in SENSOR_CMDS: try: val = str(self.obd_conn.query(SENSOR_CMDS[key]).value.magnitude) except: val = "Error" data_dict[key] = val if data_dict : data_dict["TIME"] = time.time() if (constants.DEBUG): print(data_dict) return data_dict
if OBD or GPS connection exist, it shall retrieve requested sensor data and return as a dictionary.
https://github.com/vmware-samples/automotive-iot-samples/blob/5fa52ca6e11907579598568f6c67c15dcfbb1025/python_sample/car_edge/auto.py#L136-L179
from config import config import constants import obd import serial import time import sys DEBUG = config["COMMON"]["DEBUG"] SENSOR_CMDS = { "SPEED" : obd.commands.SPEED, "RPM" : obd.commands.RPM, "FUEL_LEVEL" : obd.commands.FUEL_LEVEL, "RELATIVE_ACCEL_POS" : obd.commands.RELATIVE_ACCEL_POS, "ABSOLUTE_LOAD" : obd.commands.ABSOLUTE_LOAD, "ENGINE_LOAD" : obd.commands.ENGINE_LOAD, "RELATIVE_THROTTLE_POS" : obd.commands.RELATIVE_THROTTLE_POS, "THROTTLE_POS_B" : obd.commands.THROTTLE_POS_B, "THROTTLE_POS_C" : obd.commands.THROTTLE_POS_C } GPS_METRICS = [ 'fix_time', 'validity', 'latitude', 'latitude_hemisphere' , 'longitude' , 'longitude_hemisphere' , 'speed', 'true_course', 'fix_date', 'variation', 'variation_e_w', 'checksum', 'decimal_latitude', 'decimal_longitude' ] class AutoID () : driverName = "Someone" driverID = "1234" vehicleModel = "E24 M6 BMW" vehicleID= "some_vehicle" def __init__(self, driverName, driverID, vehicleModel, vehicleID): self.driverName = driverName self.driverID = driverID self.vehicleModel = vehicleModel self.vehicleID = vehicleID def clean_keys(keys_str): pre_keys = keys_str.split(",") keys = [] for key in pre_keys: if (key != ""): keys.append(key.strip()) return keys class DataParser(): sensors_str = None keys = None def __init__(self, sensors_str): self.sensors_str = sensors_str self.keys = clean_keys(sensors_str) print( self.keys) def parse(self, data_str): if (constants.DEBUG): print("Data str to be parsed: " + data_str) values = data_str.split(",") if (constants.DEBUG): print(values) return dict(zip(self.keys, values)) class Automobile(): autoID = None obd_conn = None gps_conn = None sensors = [] sensors_connections = [] metrics = {} def __init__(self, autoID, sensors): self.autoID = autoID self.sensors = sensors self.metrics = None if "OBD" in self.sensors: try : self.obd_conn = obd.OBD() if self.obd_conn.status() != "Not Connected": self.sensors_connections.append(self.obd_conn) except: print("Unexpected error: unable to connect to OBD", sys.exc_info()[0]) self.obd_conn = None if "GPS" in self.sensors: for i in range(11): try : print("Trying top connect to GPS on serial /dev/ttyUSB" + str(i)) self.gps_conn = serial.Serial("/dev/ttyUSB" + str(i), constants.GPS_BAUD_RATE, timeout=constants.SAMPLING_FREQUENCY) self.sensors_connections.append(self.gps_conn) print("Connected to serial /dev/ttyUSB" + str(i)) break except: print("Unexpected error: unable to GPS", sys.exc_info()[0]) self.gps_conn = None def get_tracked_metrics(self): metric_list = [] if self.gps_conn: metric_list += GPS_METRICS metric_list.append("TIME") if self.obd_conn: metric_list += SENSOR_CMDS.keys() if not self.gps_conn and not self.obd_conn: return None return metric_list
Apache License 2.0
xuru/pyvisdk
pyvisdk/mo/file_manager.py
FileManager.CopyDatastoreFile_Task
python
def CopyDatastoreFile_Task(self, sourceName, destinationName, sourceDatacenter=None, destinationDatacenter=None, force=None): return self.delegate("CopyDatastoreFile_Task")(sourceName, sourceDatacenter, destinationName, destinationDatacenter, force)
Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination.Copies the source file or folder to the destination. :param sourceName: The name of the source, either a URL or a datastore path referring to the file or folder to be copied. :param sourceDatacenter: Ifis a datastore path, the datacenter for that datastore path. Not needed when invoked directly on ESX. If not specified on a call to VirtualCenter,must be a URL. :param destinationName: The name of the destination, either a URL or a datastore path referring to the destination file or folder. :param destinationDatacenter: Ifis a datastore path, the datacenter for that datastore path. Not needed when invoked directly on ESX. If not specified on a call to VirtualCenter, it is assumed that the destination path belongs to the source datacenter. :param force: If true, overwrite any identically named file at the destination. If not specified, it is assumed to be false.
https://github.com/xuru/pyvisdk/blob/de24eb4426eb76233dc2e57640d3274ffd304eb3/pyvisdk/mo/file_manager.py#L43-L63
from pyvisdk.base.managed_object_types import ManagedObjectTypes from pyvisdk.base.base_entity import BaseEntity import logging log = logging.getLogger(__name__) class FileManager(BaseEntity): def __init__(self, core, name=None, ref=None, type=ManagedObjectTypes.FileManager): super(FileManager, self).__init__(core, name=name, ref=ref, type=type) def ChangeOwner(self, name, owner, datacenter=None): return self.delegate("ChangeOwner")(name, datacenter, owner)
MIT License
google/uncertainty-baselines
uncertainty_baselines/models/wide_resnet_hetsngp.py
wide_resnet_hetsngp
python
def wide_resnet_hetsngp(input_shape, batch_size, depth, width_multiplier, num_classes, l2, use_mc_dropout, use_filterwise_dropout, dropout_rate, use_gp_layer, gp_input_dim, gp_hidden_dim, gp_scale, gp_bias, gp_input_normalization, gp_random_feature_type, gp_cov_discount_factor, gp_cov_ridge_penalty, use_spec_norm, spec_norm_iteration, spec_norm_bound, temperature, num_factors=10, num_mc_samples=5000, eps=1e-5, sngp_var_weight=1., het_var_weight=1.): Conv2D = make_conv2d_layer(use_spec_norm, spec_norm_iteration, spec_norm_bound) OutputLayer = functools.partial( ed.layers.HeteroscedasticSNGPLayer, num_factors=num_factors, num_inducing=gp_hidden_dim, gp_kernel_scale=gp_scale, gp_output_bias=gp_bias, normalize_input=gp_input_normalization, gp_cov_momentum=gp_cov_discount_factor, gp_cov_ridge_penalty=gp_cov_ridge_penalty, use_custom_random_features=True, custom_random_features_initializer=make_random_feature_initializer( gp_random_feature_type), temperature=temperature, train_mc_samples=num_mc_samples, test_mc_samples=num_mc_samples, share_samples_across_batch=True, logits_only=True, eps=eps, dtype=tf.float32, sngp_var_weight=sngp_var_weight, het_var_weight=het_var_weight) if (depth - 4) % 6 != 0: raise ValueError('depth should be 6n+4 (e.g., 16, 22, 28, 40).') if use_mc_dropout and not use_filterwise_dropout: raise ValueError('cannot use mc dropout with filterwise dropout disabled.') num_blocks = (depth - 4) // 6 inputs = tf.keras.layers.Input(shape=input_shape, batch_size=batch_size) x = Conv2D(16, strides=1, kernel_regularizer=tf.keras.regularizers.l2(l2))(inputs) if use_filterwise_dropout: x = apply_dropout(x, dropout_rate, use_mc_dropout) for strides, filters in zip([1, 2, 2], [16, 32, 64]): x = group(x, filters=filters * width_multiplier, strides=strides, num_blocks=num_blocks, l2=l2, use_mc_dropout=use_mc_dropout, use_filterwise_dropout=use_filterwise_dropout, dropout_rate=dropout_rate, use_spec_norm=use_spec_norm, spec_norm_iteration=spec_norm_iteration, spec_norm_bound=spec_norm_bound) x = BatchNormalization(beta_regularizer=tf.keras.regularizers.l2(l2), gamma_regularizer=tf.keras.regularizers.l2(l2))(x) x = tf.keras.layers.Activation('relu')(x) x = tf.keras.layers.AveragePooling2D(pool_size=8)(x) x = tf.keras.layers.Flatten()(x) if use_gp_layer: if gp_input_dim > 0: x = tf.keras.layers.Dense( gp_input_dim, kernel_initializer='random_normal', use_bias=False, trainable=False)(x) outputs = OutputLayer(num_classes)(x) else: outputs = tf.keras.layers.Dense( num_classes, kernel_initializer='he_normal', kernel_regularizer=tf.keras.regularizers.l2(l2), bias_regularizer=tf.keras.regularizers.l2(l2))(x) return tf.keras.Model(inputs=inputs, outputs=outputs)
Builds Wide ResNet HetSNGP. Following Zagoruyko and Komodakis (2016), it accepts a width multiplier on the number of filters. Using three groups of residual blocks, the network maps spatial features of size 32x32 -> 16x16 -> 8x8. Args: input_shape: tf.Tensor. batch_size: The batch size of the input layer. Required by the spectral normalization. depth: Total number of convolutional layers. "n" in WRN-n-k. It differs from He et al. (2015)'s notation which uses the maximum depth of the network counting non-conv layers like dense. width_multiplier: Integer to multiply the number of typical filters by. "k" in WRN-n-k. num_classes: Number of output classes. l2: L2 regularization coefficient. use_mc_dropout: Whether to apply Monte Carlo dropout. use_filterwise_dropout: Whether to apply filterwise dropout. dropout_rate: Dropout rate. use_gp_layer: Whether to use Gaussian process layer as the output layer. gp_input_dim: The input dimension to GP layer. gp_hidden_dim: The hidden dimension of the GP layer, which corresponds to the number of random features used for the approximation. gp_scale: The length-scale parameter for the RBF kernel of the GP layer. gp_bias: The bias term for GP layer. gp_input_normalization: Whether to normalize the input using LayerNorm for GP layer. This is similar to automatic relevance determination (ARD) in the classic GP learning. gp_random_feature_type: The type of random feature to use for `RandomFeatureGaussianProcess`. gp_cov_discount_factor: The discount factor to compute the moving average of precision matrix. gp_cov_ridge_penalty: Ridge penalty parameter for GP posterior covariance. use_spec_norm: Whether to apply spectral normalization. spec_norm_iteration: Number of power iterations to perform for estimating the spectral norm of weight matrices. spec_norm_bound: Upper bound to spectral norm of weight matrices. temperature: Float or scalar `Tensor` representing the softmax temperature. num_factors: Int. Number of factors for the heteroscedastic variance. num_mc_samples: The number of Monte-Carlo samples used to estimate the predictive distribution. eps: Float. Clip probabilities into [eps, 1.0] softmax or [eps, 1.0 - eps] sigmoid before applying log (softmax), or inverse sigmoid. sngp_var_weight: Weight in [0,1] for the SNGP variance in the output. het_var_weight: Weight in [0,1] for the het. variance in the output. Returns: tf.keras.Model.
https://github.com/google/uncertainty-baselines/blob/d37c17c4b08a88d6546bbf299b59127a03398404/uncertainty_baselines/models/wide_resnet_hetsngp.py#L147-L300
import functools from absl import logging import edward2 as ed import tensorflow as tf BatchNormalization = functools.partial( tf.keras.layers.BatchNormalization, epsilon=1e-5, momentum=0.9) def make_random_feature_initializer(random_feature_type): if random_feature_type == 'orf': return ed.initializers.OrthogonalRandomFeatures(stddev=0.05) elif random_feature_type == 'rff': return tf.keras.initializers.RandomNormal(stddev=0.05) else: return random_feature_type def make_conv2d_layer(use_spec_norm, spec_norm_iteration, spec_norm_bound): Conv2DBase = functools.partial( tf.keras.layers.Conv2D, kernel_size=3, padding='same', use_bias=False, kernel_initializer='he_normal') def Conv2DNormed(*conv_args, **conv_kwargs): return ed.layers.SpectralNormalizationConv2D( Conv2DBase(*conv_args, **conv_kwargs), iteration=spec_norm_iteration, norm_multiplier=spec_norm_bound) return Conv2DNormed if use_spec_norm else Conv2DBase def apply_dropout(inputs, dropout_rate, use_mc_dropout): logging.info('apply_dropout input shape %s', inputs.shape) dropout_layer = tf.keras.layers.Dropout( dropout_rate, noise_shape=[inputs.shape[0], 1, 1, inputs.shape[3]]) if use_mc_dropout: return dropout_layer(inputs, training=True) return dropout_layer(inputs) def basic_block(inputs, filters, strides, l2, use_mc_dropout, use_filterwise_dropout, dropout_rate, use_spec_norm, spec_norm_iteration, spec_norm_bound): Conv2D = make_conv2d_layer(use_spec_norm, spec_norm_iteration, spec_norm_bound) x = inputs y = inputs y = BatchNormalization(beta_regularizer=tf.keras.regularizers.l2(l2), gamma_regularizer=tf.keras.regularizers.l2(l2))(y) y = tf.keras.layers.Activation('relu')(y) if use_filterwise_dropout: y = apply_dropout(y, dropout_rate, use_mc_dropout) y = Conv2D(filters, strides=strides, kernel_regularizer=tf.keras.regularizers.l2(l2))(y) y = BatchNormalization(beta_regularizer=tf.keras.regularizers.l2(l2), gamma_regularizer=tf.keras.regularizers.l2(l2))(y) y = tf.keras.layers.Activation('relu')(y) if use_filterwise_dropout: y = apply_dropout(y, dropout_rate, use_mc_dropout) y = Conv2D(filters, strides=1, kernel_regularizer=tf.keras.regularizers.l2(l2))(y) if not x.shape.is_compatible_with(y.shape): x = Conv2D(filters, kernel_size=1, strides=strides, kernel_regularizer=tf.keras.regularizers.l2(l2))(x) if use_filterwise_dropout: y = apply_dropout(y, dropout_rate, use_mc_dropout) x = tf.keras.layers.add([x, y]) return x def group(inputs, filters, strides, num_blocks, **kwargs): x = basic_block(inputs, filters=filters, strides=strides, **kwargs) for _ in range(num_blocks - 1): x = basic_block(x, filters=filters, strides=1, **kwargs) return x
Apache License 2.0
tmancal74/quantarhei
quantarhei/spectroscopy/linear_dichroism.py
LinDichSpectrumCalculator._calculate_aggregate
python
def _calculate_aggregate(self, relaxation_tensor=None, relaxation_hamiltonian=None, rate_matrix=None): ta = self.TimeAxis if relaxation_hamiltonian is None: HH = self.system.get_Hamiltonian() else: HH = relaxation_hamiltonian SS = HH.diagonalize() DD = self.system.get_TransitionDipoleMoment() DD.transform(SS) tr = {"ta":ta} if relaxation_tensor is not None: RR = relaxation_tensor RR.transform(SS) gg = [] if isinstance(RR, TimeDependent): for ii in range(HH.dim): gg.append(RR.data[:,ii,ii,ii,ii]) else: for ii in range(HH.dim): gg.append([RR.data[ii,ii,ii,ii]]) tr["gg"] = gg[1] elif rate_matrix is not None: RR = rate_matrix gg = [] if isinstance(RR, TimeDependent): for ii in range(HH.dim): gg.append(RR.data[:,ii,ii]) else: for ii in range(HH.dim): gg.append([RR.data[ii,ii]]) tr["gg"] = gg[1] else: tr["gg"] = [0.0] if not self.system._has_lindich_axes: self.system.set_lindich_axes(self.vector_perp_to_membrane) q = self.system.get_lindich_axes() else: try: q = self.system.get_lindich_axes() except: raise Exception('No orthogonal axis system provided for\ calculation of linear dichroism.') tr["dd_vec"] = numpy.dot(DD.data[0][1], q) tr["om"] = HH.data[1,1]-HH.data[0,0]-self.rwa ct = self._excitonic_coft(SS,self.system,0) tr["ct"] = ct self.system._has_system_bath_coupling = True data = numpy.real(self.one_transition_spectrum(tr)) for ii in range(2,HH.dim): if relaxation_tensor is not None: tr["gg"] = gg[ii] else: tr["gg"] = [0.0] tr["dd_vec"] = numpy.dot(DD.data[0][ii], q) tr["om"] = HH.data[ii,ii]-HH.data[0,0]-self.rwa tr["ct"] = self._excitonic_coft(SS,self.system,ii-1) data += numpy.real(self.one_transition_spectrum(tr)) Nt = len(self.frequencyAxis.data)//2 do = self.frequencyAxis.data[1]-self.frequencyAxis.data[0] st = self.frequencyAxis.data[Nt//2] axis = FrequencyAxis(st,Nt,do) S1 = numpy.linalg.inv(SS) HH.transform(S1) DD.transform(S1) if relaxation_tensor is not None: RR.transform(S1) spect = LinDichSpectrum(axis=axis, data=data) return spect
Calculates the linear dichroism spectrum of a molecular aggregate
https://github.com/tmancal74/quantarhei/blob/54a40cc55cdedf86bf04a5d705227fe69461d408/quantarhei/spectroscopy/linear_dichroism.py#L769-L883
import numpy import scipy import matplotlib.pyplot as plt from ..utils import derived_type from ..builders import Molecule from ..builders import Aggregate from ..core.time import TimeAxis from ..core.frequency import FrequencyAxis from ..core.dfunction import DFunction from ..core.managers import energy_units from ..core.managers import EnergyUnitsManaged from ..core.time import TimeDependent from ..core.units import cm2int from ..core.saveable import Saveable class LinDichSpectrumBase(DFunction, EnergyUnitsManaged): def __init__(self, axis=None, data=None): super().__init__() self.axis = axis self.data = data def set_axis(self, axis): self.axis = axis def set_data(self, data): self.data = data def set_by_interpolation(self, x, y, xaxis="frequency"): from scipy import interpolate if xaxis == "frequency": om = self.convert_2_internal_u(x) elif xaxis == "wavelength": om = 1.0e-7*x om = 1.0/om om = om*cm2int if om[1] > om[2]: om = numpy.flip(om,0) y = numpy.flip(y,0) omin = numpy.amin(om) omax = numpy.amax(om) length = om.shape[0] step = (omax-omin)/length waxis = FrequencyAxis(omin, length, step) tck = interpolate.splrep(om, y, s=0) ynew = interpolate.splev(waxis.data, tck, der=0) self.axis = waxis self.data = ynew def clear_data(self): shp = self.data.shape self.data = numpy.zeros(shp, dtype=numpy.float64) def normalize2(self,norm=1.0): mx = numpy.max(self.data) self.data = norm*self.data/mx def normalize(self): self.normalize2(norm=1.0) def subtract(self, val): self.data -= val def add_to_data(self, spect): if self.axis is None: self.axis = spect.axis.copy() if not numpy.allclose(spect.axis.data, self.axis.data): numpy.savetxt("spect_data_wrong.dat", spect.axis.data) numpy.savetxt("self_data_wrong.dat", self.axis.data) raise Exception("Incompatible axis") if self.data is None: self.data = numpy.zeros(len(spect.data), dtype=spect.axis.data.dtype) self.data += spect.data def load_data(self, filename, ext=None, replace=False): super().load_data(filename, ext=ext, axis='frequency', replace=replace) def plot(self, **kwargs): if "ylabel" not in kwargs: ylabel = r'$\alpha(\omega)$ [a.u.]' kwargs["ylabel"] = ylabel fig = super().plot(**kwargs) if fig is not None: return fig def gaussian_fit(self, N=1, guess=None, plot=False, Nsvf=251): from scipy.signal import savgol_filter from scipy.interpolate import UnivariateSpline x = self.axis.data y = self.data if guess is None: raise Exception("Guess is required at this time") guess = [1.0, 11000.0, 300.0, 0.2, 11800, 400, 0.2, 12500, 300] if not self._splines_initialized: self._set_splines() der = self._spline_r.derivative() y1 = der(x) y1sm = savgol_filter(y1,Nsvf,polyorder=3) y1sm_spl_der = UnivariateSpline(x,y1sm,s=0).derivative()(x) y2sm = savgol_filter(y1sm_spl_der,Nsvf,polyorder=3) plt.plot(x, y2sm) plt.show() def funcf(x, *p): return _n_gaussians(x, N, *p) from scipy.optimize import curve_fit popt, pcov = curve_fit(funcf, x, y, p0=guess) if plot: plt.plot(x,y) plt.plot(x,_n_gaussians(x, N, *popt)) for i in range(N): a = popt[3*i] print(i, a) b = popt[3*i+1] c = popt[3*i+2] y = _gaussian(x, a, b, c) plt.plot(x, y,'-r') plt.show() return popt, pcov def _gaussian(x, height, center, fwhm, offset=0.0): return height*numpy.exp(-(((x - center)**2)*4.0*numpy.log(2.0))/ (fwhm**2)) + offset def _n_gaussians(x, N, *params): n = len(params) k = n//3 if (k*3 == n) and (k == N): res = 0.0 pp = numpy.zeros(3) for i in range(k): pp[0:3] = params[3*i:3*i+3] arg = tuple(pp) res += _gaussian(x, *arg) res += params[n-1] return res else: raise Exception("Inconsistend number of parameters") class LinDichSpectrum(LinDichSpectrumBase): pass class LinDichSpectrumContainer(Saveable): def __init__(self, axis=None): self.axis = axis self.count = 0 self.spectra = {} def set_axis(self, axis): self.axis = axis def set_spectrum(self, spect, tag=None): frq = spect.axis if self.axis is None: self.axis = frq if self.axis.is_equal_to(frq): if tag is None: tag1 = str(self.count) else: tag1 = str(tag) self.spectra[tag1] = spect self.count += 1 else: raise Exception("Incompatible time axis (equal axis required)") def get_spectrum(self, tag): if not isinstance(tag, str): tag = str(tag) if tag in self.spectra.keys(): return self.spectra[tag] else: raise Exception("Unknown spectrum") def get_spectra(self): ven = [value for (key, value) in sorted(self.spectra.items())] return ven class LinDichSpectrumCalculator(EnergyUnitsManaged): TimeAxis = derived_type("TimeAxis",TimeAxis) system = derived_type("system",[Molecule,Aggregate]) def __init__(self, timeaxis, system=None, dynamics="secular", relaxation_tensor=None, rate_matrix=None, effective_hamiltonian=None, vector_perp_to_membrane = numpy.array([-1, -4, 1])): self.TimeAxis = timeaxis self.system = system self.dynamics = dynamics self._relaxation_tensor = None self._rate_matrix = None self._relaxation_hamiltonian = None self._has_relaxation_tensor = False if relaxation_tensor is not None: self._relaxation_tensor = relaxation_tensor self._has_relaxation_tensor = True if effective_hamiltonian is not None: self._relaxation_hamiltonian = effective_hamiltonian if rate_matrix is not None: self._rate_matrix = rate_matrix self._has_rate_matrix = True self.vector_perp_to_membrane = vector_perp_to_membrane self.vector_perp_to_membrane = self.vector_perp_to_membrane*(1/numpy.linalg.norm(vector_perp_to_membrane)) self.rwa = 0.0 def bootstrap(self,rwa=0.0): self.rwa = self.convert_2_internal_u(rwa) with energy_units("int"): self.frequencyAxis = self.TimeAxis.get_FrequencyAxis() self.frequencyAxis.data += self.rwa def calculate(self): with energy_units("int"): if self.system is not None: if isinstance(self.system,Molecule): spect = self._calculate_monomer() elif isinstance(self.system,Aggregate): spect = self._calculate_aggregate( relaxation_tensor= self._relaxation_tensor, rate_matrix= self._rate_matrix, relaxation_hamiltonian= self._relaxation_hamiltonian) else: raise Exception("System to calculate spectrum for not defined") return spect def _calculateMolecule(self,rwa): if self.system._has_system_bath_coupling: raise Exception("Not yet implemented") else: stick_width = 1.0/0.1 def _c2g(self,timeaxis,coft): ta = timeaxis rr = numpy.real(coft) ri = numpy.imag(coft) sr = scipy.interpolate.UnivariateSpline(ta.data, rr,s=0).antiderivative()(ta.data) sr = scipy.interpolate.UnivariateSpline(ta.data, sr,s=0).antiderivative()(ta.data) si = scipy.interpolate.UnivariateSpline(ta.data, ri,s=0).antiderivative()(ta.data) si = scipy.interpolate.UnivariateSpline(ta.data, si,s=0).antiderivative()(ta.data) gt = sr + 1j*si return gt def one_transition_spectrum(self,tr): ta = tr["ta"] dd = tr["dd_vec"] om = tr["om"] gg = tr["gg"] if self.system._has_system_bath_coupling: ct = tr["ct"] gt = self._c2g(ta,ct.data) at = numpy.exp(-gt -1j*om*ta.data) else: at = numpy.exp(-1j*om*ta.data) if len(gg) == 1: gam = gg[0] rt = numpy.exp(gam*ta.data) at *= rt else: rt = numpy.exp((gg)*ta.data) at *= rt ft = numpy.fft.hfft(at)*ta.step ft = numpy.fft.fftshift(ft) ft = numpy.flipud(ft) ft = (-dd[0]**2 + 0.5*dd[1]**2 + 0.5*dd[2]**2)*ft Nt = ta.length return ft[Nt//2:Nt+Nt//2] def _excitonic_coft(self,SS,AG,n): c0 = AG.monomers[0].get_egcf((0,1)) Nt = len(c0) sbi = AG.get_SystemBathInteraction() cfm = sbi.CC ct = numpy.zeros((Nt),dtype=numpy.complex128) Na = AG.nmono for kk in range(Na): for ll in range(Na): ct += ((SS[kk+1,n+1]**2)*(SS[ll+1,n+1]**2)*cfm.get_coft(kk,ll)) return ct def _calculate_monomer(self): raise Exception('Not yet implemented. Usually, LD is calculated\ for aggregates.')
MIT License
phuks-co/throat
app/views/errors.py
not_found
python
def not_found(_): if request.path.startswith("/api"): if request.path.startswith("/api/v3"): return jsonify(msg="Method not found or not implemented"), 404 return jsonify(status="error", error="Method not found or not implemented"), 404 return render_error_template("errors/404.html"), 404
404 Not found error
https://github.com/phuks-co/throat/blob/27c7c18faa371def668bdbe6f7e95c6bf32a1829/app/views/errors.py#L22-L28
from flask import Blueprint, request, redirect, url_for, jsonify from ..misc import engine, logger, ensure_locale_loaded, get_locale from ..caching import cache bp = Blueprint("errors", __name__) @bp.app_errorhandler(401) def unauthorized(_): return redirect(url_for("auth.login")) @bp.app_errorhandler(403) def forbidden_error(_): return render_error_template("errors/403.html"), 403 @bp.app_errorhandler(404)
MIT License
stan-dev/httpstan
httpstan/cache.py
fit_path
python
def fit_path(fit_name: str) -> Path: fit_directory, fit_id = fit_name.rsplit("/", maxsplit=1) fit_filename = fit_id + ".jsonlines.gz" return cache_directory() / fit_directory / fit_filename
Get the path to a fit file. File may not exist.
https://github.com/stan-dev/httpstan/blob/8da756d8f6c6bb001b219e4a3acb8364d6b4d3ee/httpstan/cache.py#L29-L34
import logging import shutil import typing from importlib.machinery import EXTENSION_SUFFIXES from pathlib import Path import appdirs import httpstan logger = logging.getLogger("httpstan") def cache_directory() -> Path: return Path(appdirs.user_cache_dir("httpstan", version=httpstan.__version__)) def model_directory(model_name: str) -> Path: model_id = model_name.split("/")[1] return cache_directory() / "models" / model_id
ISC License
fsecurelabs/leonidas
generator/lib/definitions.py
Definitions._generate_definitions
python
def _generate_definitions(self, case): self.categories.add(case["category"]) if case["platform"] == "aws": for permission in case["permissions"]: self.permissions["aws"].add(permission) if "leonidas_aws" in case["executors"].keys(): if case["executors"]["leonidas_aws"]["implemented"]: case["executors"]["leonidas_aws"][ "rendered" ] = self._generate_aws_code(case) case["sigma"] = self.templates["aws_sigma"].render(case) self.case_set.append(case)
Construct the definitions given a specific file's data
https://github.com/fsecurelabs/leonidas/blob/e9db5870ca3b5f63e19937680400f8c9cd49f50c/generator/lib/definitions.py#L159-L173
import ast from collections import defaultdict import os import traceback import sys import time import jinja2 import yaml class Definitions: def __init__(self, config, env): self.definitions_path = config["definitions_path"] self.config = config self.env = env self.case_set = [] self.categories = set() self.permissions = defaultdict(set) self.casecount = 0 self.templates = {} self.templates["aws_sigma"] = env.get_template( os.path.join("aws", "sigma.jinja2") ) self._file_list = [] self._validator = defaultdict(dict) self._validator["case"] = { "name": "Case {} is missing a name", "description": "Case {} is missing a description", "category": "Case {} has no category name", "mitre_ids": "Case {} has no associated MITRE IDs", "platform": "Case {} does not define a platform", "permissions": "Case {} does not define the permissions required for execution", "input_arguments": "Case {} does not define input arguments. Note that this key must exist, even if it is empty", "executors": "Case {} has no executors", "detection": "Case {} has no defined detections", } self._validator["aws"] = { "implemented": "Case {} is missing the implemented field in the leonidas_aws executor", "code": "Case number {} has a python executor but either no code, or the code present does not parse as valid Python", "clients": "Case number {} has a python executor but no clients defined", } self._validator["arg"] = { "description": "Argument number {} in case {} has no description", "type": "Argument number {} in case {} has no type", "value": "Argument number {} in case {} has no default value", } self._validator["detection"] = { "sigma_id": "Detection for case {} has no sigma_id field. This should be a unique GUID value", "status": 'Detection for case {} has no status set. This should be either "experimental", "test" or "production"', "level": 'Detection for case {} has no level set. This should be either "high", "medium" or "low"', "sources": "Detection for case {} needs at least one source", } def construct_definitions(self): self._construct_filelist() for item in self._file_list: filedata = yaml.safe_load(open(item, "r").read()) try: filedata["last_modified"] = time.strftime( "%Y-%m-%d", time.gmtime(os.path.getmtime(item)) ) self._generate_definitions(filedata) except Exception: traceback.print_exc(file=sys.stdout) continue def validate(self): self._construct_filelist() validation_success = True for item in self._file_list: print("validating {}".format(item)) filedata = yaml.safe_load(open(item, "r").read()) if not self._validate_file(filedata, item): validation_success = False self.casecount = self.casecount + 1 return validation_success def get_aws_policy(self): policy_template = self.env.get_template("iam-policy.jinja2") rendered = policy_template.render({"permissions": self.permissions["aws"]}) return rendered def _validate_file(self, case, filename): validation_success = True for key in self._validator["case"]: if key not in case.keys(): validation_success = False print(self._validator["case"][key].format(filename)) if "input_arguments" in case.keys(): if case["input_arguments"]: argcount = 0 for arg in case["input_arguments"]: argcount = argcount + 1 for key in self._validator["arg"]: if key not in case["input_arguments"][arg]: print( self._validator["arg"][key].format(argcount, filename) ) if "executors" in case.keys(): if "leonidas_aws" in case["executors"].keys(): if case["executors"]["leonidas_aws"]["implemented"]: try: ast.parse(case["executors"]["leonidas_aws"]["code"]) except Exception: print(self._validator["aws"]["code"].format(filename)) validation_success = False try: clients = case["executors"]["leonidas_aws"]["clients"] if len(clients) == 0: print(self._validator["aws"]["clients"].format(filename)) validation_success = False except Exception: print(self._validator["aws"]["clients"].format(filename)) validation_success = False else: print("No executors defined in {}".format(filename)) validation_success = False if "detection" in case.keys(): for key in self._validator["detection"]: if key not in case["detection"].keys(): print(self._validator["detection"][key].format(filename)) validation_success = False return validation_success def _construct_filelist(self): for root, subdirs, files in os.walk(self.definitions_path): for item in files: if item.split(".")[-1] == "yml": filepath = os.path.join(root, item) if filepath not in self._file_list: self._file_list.append(filepath)
MIT License
nilearn/nistats
nistats/reporting/_get_clusters_table.py
get_clusters_table
python
def get_clusters_table(stat_img, stat_threshold, cluster_threshold=None, min_distance=8.): cols = ['Cluster ID', 'X', 'Y', 'Z', 'Peak Stat', 'Cluster Size (mm3)'] stat_map = get_data(stat_img) conn_mat = np.zeros((3, 3, 3), int) conn_mat[1, 1, :] = 1 conn_mat[1, :, 1] = 1 conn_mat[:, 1, 1] = 1 voxel_size = np.prod(stat_img.header.get_zooms()) binarized = stat_map > stat_threshold binarized = binarized.astype(int) if np.sum(binarized) == 0: warnings.warn('Attention: No clusters with stat higher than %f' % stat_threshold) return pd.DataFrame(columns=cols) label_map = ndimage.measurements.label(binarized, conn_mat)[0] clust_ids = sorted(list(np.unique(label_map)[1:])) for c_val in clust_ids: if cluster_threshold is not None and np.sum( label_map == c_val) < cluster_threshold: stat_map[label_map == c_val] = 0 binarized[label_map == c_val] = 0 if np.sum(stat_map > stat_threshold) == 0: warnings.warn('Attention: No clusters with more than %d voxels' % cluster_threshold) return pd.DataFrame(columns=cols) label_map = ndimage.measurements.label(binarized, conn_mat)[0] clust_ids = sorted(list(np.unique(label_map)[1:])) peak_vals = np.array( [np.max(stat_map * (label_map == c)) for c in clust_ids]) clust_ids = [clust_ids[c] for c in (-peak_vals).argsort()] rows = [] for c_id, c_val in enumerate(clust_ids): cluster_mask = label_map == c_val masked_data = stat_map * cluster_mask cluster_size_mm = int(np.sum(cluster_mask) * voxel_size) subpeak_ijk, subpeak_vals = _local_max(masked_data, stat_img.affine, min_distance=min_distance) subpeak_xyz = np.asarray(coord_transform(subpeak_ijk[:, 0], subpeak_ijk[:, 1], subpeak_ijk[:, 2], stat_img.affine)).tolist() subpeak_xyz = np.array(subpeak_xyz).T n_subpeaks = np.min((len(subpeak_vals), 4)) for subpeak in range(n_subpeaks): if subpeak == 0: row = [c_id + 1, subpeak_xyz[subpeak, 0], subpeak_xyz[subpeak, 1], subpeak_xyz[subpeak, 2], subpeak_vals[subpeak], cluster_size_mm] else: sp_id = '{0}{1}'.format(c_id + 1, ascii_lowercase[subpeak - 1]) row = [sp_id, subpeak_xyz[subpeak, 0], subpeak_xyz[subpeak, 1], subpeak_xyz[subpeak, 2], subpeak_vals[subpeak], ''] rows += [row] df = pd.DataFrame(columns=cols, data=rows) return df
Creates pandas dataframe with img cluster statistics. Parameters ---------- stat_img : Niimg-like object, Statistical image (presumably in z- or p-scale). stat_threshold: `float` Cluster forming threshold in same scale as `stat_img` (either a p-value or z-scale value). cluster_threshold : `int` or `None`, optional Cluster size threshold, in voxels. min_distance: `float`, optional Minimum distance between subpeaks in mm. Default is 8 mm. Returns ------- df : `pandas.DataFrame` Table with peaks and subpeaks from thresholded `stat_img`. For binary clusters (clusters with >1 voxel containing only one value), the table reports the center of mass of the cluster, rather than any peaks/subpeaks.
https://github.com/nilearn/nistats/blob/bd4e27fdd5487b1ff920d4ae2473a7671153d2b3/nistats/reporting/_get_clusters_table.py#L94-L194
import warnings from string import ascii_lowercase import numpy as np import pandas as pd import nibabel as nib from scipy import ndimage from nilearn.image.resampling import coord_transform from nistats.utils import get_data def _local_max(data, affine, min_distance): ijk, vals = _identify_subpeaks(data) xyz, ijk, vals = _sort_subpeaks(ijk, vals, affine) ijk, vals = _pare_subpeaks(xyz, ijk, vals, min_distance) return ijk, vals def _identify_subpeaks(data): data_max = ndimage.filters.maximum_filter(data, 3) maxima = (data == data_max) data_min = ndimage.filters.minimum_filter(data, 3) diff = ((data_max - data_min) > 0) maxima[diff == 0] = 0 labeled, n_subpeaks = ndimage.label(maxima) labels_index = range(1, n_subpeaks + 1) ijk = np.array(ndimage.center_of_mass(data, labeled, labels_index)) ijk = np.round(ijk).astype(int) vals = np.apply_along_axis(arr=ijk, axis=1, func1d=_get_val, input_arr=data) return ijk, vals def _sort_subpeaks(ijk, vals, affine): order = (-vals).argsort() vals = vals[order] ijk = ijk[order, :] xyz = nib.affines.apply_affine(affine, ijk) return xyz, ijk, vals def _pare_subpeaks(xyz, ijk, vals, min_distance): keep_idx = np.ones(xyz.shape[0]).astype(bool) for i in range(xyz.shape[0]): for j in range(i + 1, xyz.shape[0]): if keep_idx[i] == 1: dist = np.linalg.norm(xyz[i, :] - xyz[j, :]) keep_idx[j] = dist > min_distance ijk = ijk[keep_idx, :] vals = vals[keep_idx] return ijk, vals def _get_val(row, input_arr): i, j, k = row return input_arr[i, j, k]
BSD 3-Clause New or Revised License
gabstopper/smc-python
smc/core/collection.py
InterfaceCollection.get
python
def get(self, interface_id): return self.items.get(interface_id)
Get the interface by id, if known. The interface is retrieved from the top level Physical or Tunnel Interface. If the interface is an inline interface, you can specify only one of the two inline pairs and the same interface will be returned. If interface type is unknown, use engine.interface for retrieving:: >>> engine = Engine('sg_vm') >>> intf = engine.interface.get(0) >>> print(intf, intf.addresses) (PhysicalInterface(name=Interface 0), [('172.18.1.254', '172.18.1.0/24', '0')]) Get an inline interface:: >>> intf = engine.interface.get('2-3') .. note:: For the inline interface example, you could also just specify '2' or '3' and the fetch will return the pair. :param str,int interface_id: interface ID to retrieve :raises InterfaceNotFound: invalid interface specified :return: interface object by type (Physical, Tunnel, VlanInterface)
https://github.com/gabstopper/smc-python/blob/54386c8a710727cc1acf69334a57b155d2f5408c/smc/core/collection.py#L223-L248
from smc.core.interfaces import TunnelInterface, InterfaceEditor, Layer3PhysicalInterface, ClusterPhysicalInterface, Layer2PhysicalInterface, VirtualPhysicalInterface from smc.core.sub_interfaces import LoopbackClusterInterface, LoopbackInterface from smc.base.structs import BaseIterable from smc.api.exceptions import UnsupportedInterfaceType, InterfaceNotFound def get_all_loopbacks(engine): data = [] if 'fw_cluster' in engine.type: for cvi in engine.data.get('loopback_cluster_virtual_interface', []): data.append( LoopbackClusterInterface(cvi, engine)) for node in engine.nodes: for lb in node.data.get('loopback_node_dedicated_interface', []): data.append(LoopbackInterface(lb, engine)) return data class LoopbackCollection(BaseIterable): def __init__(self, engine): self._engine = engine loopbacks = get_all_loopbacks(engine) super(LoopbackCollection, self).__init__(loopbacks) def get(self, address): loopback = super(LoopbackCollection, self).get(address=address) if loopback: return loopback raise InterfaceNotFound('Loopback address specified was not found') def __getattr__(self, key): if key.startswith('add_'): if 'fw_cluster' not in self._engine.type: return getattr(LoopbackInterface(None, self._engine), key) else: return getattr(LoopbackClusterInterface(None, self._engine), key) raise AttributeError('Cannot proxy to given method: %s for the ' 'following engine type: %s' % (key, self._engine.type)) class InterfaceCollection(BaseIterable): def __init__(self, engine, rel='interfaces'): self._engine = engine self._rel = rel self.href = engine.get_relation(rel, UnsupportedInterfaceType) super(InterfaceCollection, self).__init__(InterfaceEditor(engine))
Apache License 2.0
ax330d/functions-plus
functions_plus.py
FunctionsTree.get
python
def get(self): functions_list = self.get_list_of_functions() functions_tree = self.build_functions_tree(functions_list) return functions_tree
Returns functions tree.
https://github.com/ax330d/functions-plus/blob/a0eac3333fbd1a8df86fcdbb0eb0fbbc7a92990f/functions_plus.py#L183-L191
import re from collections import OrderedDict import idaapi import idc import idautils from idaapi import PluginForm from PyQt5 import QtWidgets, QtGui __author__ = 'xxxzsx, Arthur Gerkis' __version__ = '1.0.1' class FunctionState(object): def __init__(self): self._args = '' self._flags = 0 self._addr = 0 @property def args(self): return self._args @args.setter def args(self, value): self._args = value @property def flags(self): return self._flags @flags.setter def flags(self, value): self._flags = value @property def addr(self): return self._addr @addr.setter def addr(self, value): self._addr = value class FunctionData(object): def __init__(self, state): self._args = state.args self._flags = state.flags self._addr = state.addr @property def args(self): return self._args @property def flags(self): return self._flags @property def addr(self): return self._addr class Cols(object): def __init__(self, show_extra_fields): self.addr = None self.flags = None self.show_extra_fields = show_extra_fields self.names = [ 'Name', 'Address', 'Segment', 'Length', 'Locals', 'Arguments' ] self.handlers = { 0: lambda: None, 1: lambda: self.fmt(self.addr), 2: lambda: '{}'.format(idc.get_segm_name(self.addr)), 3: lambda: self.fmt(idc.get_func_attr(self.addr, idc.FUNCATTR_END) - self.addr), 4: lambda: self.fmt(idc.get_func_attr(self.addr, idc.FUNCATTR_FRSIZE)), 5: lambda: self.fmt(idc.get_func_attr(self.addr, idc.FUNCATTR_ARGSIZE)) } if self.show_extra_fields: self.names.extend(['R', 'F', 'L', 'S', 'B', 'T', '=']) self.handlers.update({ 6: lambda: self.is_true(not self.flags & idc.FUNC_NORET, 'R'), 7: lambda: self.is_true(self.flags & idc.FUNC_FAR, 'F'), 8: lambda: self.is_true(self.flags & idc.FUNC_LIB, 'L'), 9: lambda: self.is_true(self.flags & idc.FUNC_STATIC, 'S'), 10: lambda: self.is_true(self.flags & idc.FUNC_FRAME, 'B'), 11: lambda: self.is_true(idc.get_type(self.addr), 'T'), 12: lambda: self.is_true(self.flags & idc.FUNC_BOTTOMBP, '=') }) def set_data(self, addr, flags): self.addr = addr self.flags = flags def item(self, index): return self.handlers[index]() def is_true(self, flag, char): if flag: return char return '.' def fmt(self, value): return '{:08X}'.format(value) class FunctionsTree(object): def __init__(self): self.chunks_regexp = re.compile(r'(.*?)(?:|\((.*?)\))$') self.simple_regexp = re.compile(r'^[a-zA-Z0-9_]*$')
MIT License
quarkslab/sspam
sspam/pattern_matcher.py
match
python
def match(target_str, pattern_str): target_ast = ast.parse(target_str, mode="eval").body target_ast = pre_processing.all_preprocessings(target_ast) target_ast = Flattening(ast.Add).visit(target_ast) pattern_ast = ast.parse(pattern_str, mode="eval").body pattern_ast = pre_processing.all_preprocessings(pattern_ast) pattern_ast = Flattening(ast.Add).visit(pattern_ast) return PatternMatcher(target_ast).visit(target_ast, pattern_ast)
Apply all pre-processing, then pattern matcher
https://github.com/quarkslab/sspam/blob/6784e1c06157c6984ef04b67382e584d4b5316e0/sspam/pattern_matcher.py#L449-L457
import ast from copy import deepcopy import itertools import astunparse try: import z3 except ImportError: raise Exception("z3 module is needed to use this pattern matcher") from sspam.tools import asttools from sspam.tools.flattening import Flattening, Unflattening from sspam import pre_processing FLEXIBLE = True class EvalPattern(ast.NodeTransformer): def __init__(self, wildcards): self.wildcards = wildcards def visit_Name(self, node): if node.id in self.wildcards: return deepcopy(self.wildcards[node.id]) return node class PatternMatcher(asttools.Comparator): def __init__(self, root, nbits=0): super(PatternMatcher, self).__init__() self.wildcards = {} self.no_solution = [] if isinstance(root, ast.Module): self.root = root.body[0].value elif isinstance(root, ast.Expression): self.root = root.body else: self.root = root if not nbits: self.nbits = asttools.get_default_nbits(self.root) else: self.nbits = nbits getid = asttools.GetIdentifiers() getid.visit(self.root) self.variables = getid.variables self.functions = getid.functions @staticmethod def is_wildcard(node): return isinstance(node, ast.Name) and node.id.isupper() def check_eq_z3(self, target, pattern): getid = asttools.GetIdentifiers() getid.visit(target) if getid.functions: return False for var in self.variables: exec("%s = z3.BitVec('%s', %d)" % (var, var, self.nbits)) target_ast = deepcopy(target) target_ast = Unflattening().visit(target_ast) ast.fix_missing_locations(target_ast) code1 = compile(ast.Expression(target_ast), '<string>', mode='eval') eval_pattern = deepcopy(pattern) EvalPattern(self.wildcards).visit(eval_pattern) eval_pattern = Unflattening().visit(eval_pattern) ast.fix_missing_locations(eval_pattern) getid.reset() getid.visit(eval_pattern) if getid.functions: return False gvar = asttools.GetIdentifiers() gvar.visit(eval_pattern) if any(var.isupper() for var in gvar.variables): return False code2 = compile(ast.Expression(eval_pattern), '<string>', mode='eval') sol = z3.Solver() if isinstance(eval(code1), int) and eval(code1) == 0: return False sol.add(eval(code1) != eval(code2)) return sol.check().r == -1 def check_wildcard(self, target, pattern): if pattern.id in self.wildcards: wild_value = self.wildcards[pattern.id] exact_comp = asttools.Comparator().visit(wild_value, target) if exact_comp: return True if FLEXIBLE: return self.check_eq_z3(target, self.wildcards[pattern.id]) else: return False else: self.wildcards[pattern.id] = target return True def get_model(self, target, pattern): if target.n == 0: return False getwild = asttools.GetIdentifiers() getwild.visit(pattern) if getwild.functions: return False wilds = getwild.variables if len(wilds) > 1: return False wil = wilds.pop() if wil in self.wildcards: if not isinstance(self.wildcards[wil], ast.Num): return False folded = deepcopy(pattern) folded = Unflattening().visit(folded) EvalPattern(self.wildcards).visit(folded) folded = asttools.ConstFolding(folded, self.nbits).visit(folded) return folded.n == target.n else: exec("%s = z3.BitVec('%s', %d)" % (wil, wil, self.nbits)) eval_pattern = deepcopy(pattern) eval_pattern = Unflattening().visit(eval_pattern) ast.fix_missing_locations(eval_pattern) code = compile(ast.Expression(eval_pattern), '<string>', mode='eval') sol = z3.Solver() sol.add(target.n == eval(code)) if sol.check().r == 1: model = sol.model() for inst in model.decls(): self.wildcards[str(inst)] = ast.Num(int(model[inst].as_long())) return True return False def check_not(self, target, pattern): if self.is_wildcard(pattern.operand): wkey = pattern.operand.id if isinstance(target, ast.Num): if wkey not in self.wildcards: mod = 2**self.nbits self.wildcards[wkey] = ast.Num((~target.n) % mod) return True else: wilds2 = self.wildcards[pattern.operand.id] num = ast.Num((~target.n) % 2**self.nbits) return asttools.Comparator().visit(wilds2, num) else: if wkey not in self.wildcards: self.wildcards[wkey] = ast.UnaryOp(ast.Invert(), target) return True return self.check_eq_z3(target, pattern) else: subpattern = pattern.operand newtarget = ast.UnaryOp(ast.Invert(), target) return self.check_eq_z3(newtarget, subpattern) def check_neg(self, target, pattern): if self.is_wildcard(pattern.right): wkey = pattern.right.id if isinstance(target, ast.Num): if wkey not in self.wildcards: mod = 2**self.nbits self.wildcards[wkey] = ast.Num((-target.n) % mod) return True else: wilds2 = self.wildcards[pattern.right.id] num = ast.Num((-target.n) % 2**self.nbits) return asttools.Comparator().visit(wilds2, num) else: if wkey not in self.wildcards: self.wildcards[wkey] = ast.BinOp(ast.Num(-1), ast.Mult(), target) return True return self.check_eq_z3(target, pattern) def check_twomult(self, target, pattern): if isinstance(pattern.left, ast.Num) and pattern.left.n == 2: operand = pattern.right elif isinstance(pattern.right, ast.Num) and pattern.right.n == 2: operand = pattern.left else: return False if isinstance(target, ast.Num) and isinstance(operand, ast.Name): conds = (operand.id in self.wildcards and isinstance(self.wildcards[operand.id], ast.Num)) if conds: eva = (self.wildcards[operand.id].n)*2 % 2**(self.nbits) if eva == target.n: return True else: if target.n % 2 == 0: self.wildcards[operand.id] = ast.Num(target.n / 2) return True return False getwild = asttools.GetIdentifiers() getwild.visit(operand) wilds = getwild.variables for wil in wilds: if wil not in self.wildcards: return False return self.check_eq_z3(target, pattern) def general_check(self, target, pattern): getwild = asttools.GetIdentifiers() getwild.visit(pattern) wilds = list(getwild.variables) if all(wil in self.wildcards for wil in wilds): eval_pattern = deepcopy(pattern) eval_pattern = EvalPattern(self.wildcards).visit(eval_pattern) return self.check_eq_z3(target, eval_pattern) return False def check_pattern(self, target, pattern): if asttools.CheckConstExpr().visit(pattern): if isinstance(target, ast.Num): pattcopy = deepcopy(pattern) eval_pat = asttools.ConstFolding(pattcopy, self.nbits).visit(pattcopy) return self.visit(target, eval_pat) if isinstance(target, ast.Num): return self.get_model(target, pattern) notnode = (isinstance(pattern, ast.UnaryOp) and isinstance(pattern.op, ast.Invert)) if notnode: return self.check_not(target, pattern) negnode = (isinstance(pattern, ast.BinOp) and isinstance(pattern.op, ast.Mult) and isinstance(pattern.left, ast.Num) and pattern.left.n == -1) if negnode: return self.check_neg(target, pattern) multnode = (isinstance(pattern, ast.BinOp) and isinstance(pattern.op, ast.Mult)) if multnode: return self.check_twomult(target, pattern) return False def visit(self, target, pattern): if self.is_wildcard(pattern): return self.check_wildcard(target, pattern) if type(target) != type(pattern): if FLEXIBLE: return self.check_pattern(target, pattern) else: return False nodetype = target.__class__.__name__ comp = getattr(self, "visit_%s" % nodetype, None) if not comp: raise Exception("no comparison function for %s" % nodetype) return comp(target, pattern) def visit_Num(self, target, pattern): mod = 2**self.nbits return (target.n % mod) == (pattern.n % mod) def visit_BinOp(self, target, pattern): if type(target.op) != type(pattern.op): if FLEXIBLE: return self.check_pattern(target, pattern) else: return False previous_state = deepcopy(self.wildcards) cond1 = (self.visit(target.left, pattern.left) and self.visit(target.right, pattern.right)) state = asttools.apply_hooks() nos = self.wildcards in self.no_solution asttools.restore_hooks(state) if cond1 and not nos: return True if nos: self.wildcards = deepcopy(previous_state) if not cond1 and not nos: wildsbackup = deepcopy(self.wildcards) self.wildcards = deepcopy(previous_state) cond1_prime = (self.visit(target.right, pattern.right) and self.visit(target.left, pattern.left)) if cond1_prime: return True else: self.wildcards = deepcopy(wildsbackup) if isinstance(target.op, (ast.Add, ast.Mult, ast.BitAnd, ast.BitOr, ast.BitXor)): cond2 = (self.visit(target.left, pattern.right) and self.visit(target.right, pattern.left)) if cond2: return True wildsbackup = deepcopy(self.wildcards) self.wildcards = deepcopy(previous_state) cond2_prime = (self.visit(target.right, pattern.left) and self.visit(target.left, pattern.right)) if cond2_prime: return True else: self.wildcards = deepcopy(wildsbackup) if target == self.root: self.no_solution.append(self.wildcards) self.wildcards = deepcopy(previous_state) cond1 = (self.visit(target.left, pattern.left) and self.visit(target.right, pattern.right)) if cond1: return True cond2 = (self.visit(target.left, pattern.right) and self.visit(target.right, pattern.left)) return cond1 or cond2 self.wildcards = deepcopy(previous_state) return False def visit_BoolOp(self, target, pattern): conds = (type(target.op) == type(pattern.op) and len(target.values) == len(pattern.values)) if not conds: return False old_context = deepcopy(self.wildcards) for perm in itertools.permutations(target.values): self.wildcards = deepcopy(old_context) res = True i = 0 for i in range(len(pattern.values)): res &= self.visit(perm[i], pattern.values[i]) if res: return res return False def visit_UnaryOp(self, target, pattern): if type(target.op) != type(pattern.op): return False return self.visit(target.operand, pattern.operand) def visit_Call(self, target, pattern): if (not self.visit(target.func, pattern.func) or len(target.args) != len(pattern.args)): return False if (not all([self.visit(t_arg, p_arg) for t_arg, p_arg in zip(target.args, pattern.args)]) or not all([self.visit(t_key, p_key) for t_key, p_key in zip(target.keywords, pattern.keywords)])): return False if (not (target.starargs is None and pattern.starargs is None) or not (target.kwargs is None and pattern.kwargs is None)): return False return True
BSD 3-Clause New or Revised License
aur3lius-dev/spydir
SpyDir.py
Config.update_box
python
def update_box(self, event): for plug in self.loaded_p_list: if plug.get_name() == event.getActionCommand(): plug.enabled = not plug.enabled if plug.enabled: self.update_scroll("[^] Enabled: %s" % event.getActionCommand()) else: self.update_scroll("[^] Disabled: %s" % event.getActionCommand())
Handles the check/uncheck event for the plugin's box.
https://github.com/aur3lius-dev/spydir/blob/5bf1be49c7721092cac343fb2e28ccad0e7ba6fd/SpyDir.py#L709-L721
from os import walk, path from json import loads, dumps from imp import load_source from burp import (IBurpExtender, IBurpExtenderCallbacks, ITab, IContextMenuFactory) from javax.swing import (JPanel, JTextField, GroupLayout, JTabbedPane, JButton, JLabel, JScrollPane, JTextArea, JFileChooser, JCheckBox, JMenuItem, JFrame, JViewport) from java.net import URL, MalformedURLException from java.awt import GridLayout, GridBagLayout, GridBagConstraints, Dimension VERSION = "0.8.7" class BurpExtender(IBurpExtender, IBurpExtenderCallbacks, IContextMenuFactory): def __init__(self): self.config_tab = None self.messages = [] self._callbacks = None def registerExtenderCallbacks(self, callbacks): self.config_tab = SpyTab(callbacks) self._callbacks = callbacks callbacks.addSuiteTab(self.config_tab) callbacks.registerContextMenuFactory(self) def createMenuItems(self, invocation): context = invocation.getInvocationContext() if context == invocation.CONTEXT_MESSAGE_EDITOR_REQUEST or context == invocation.CONTEXT_MESSAGE_VIEWER_REQUEST or context == invocation.CONTEXT_PROXY_HISTORY or context == invocation.CONTEXT_TARGET_SITE_MAP_TABLE: self.messages = invocation.getSelectedMessages() if len(self.messages) == 1: return [JMenuItem('Send URL to SpyDir', actionPerformed=self.pass_url)] else: return None def pass_url(self, event): self.config_tab.update_url(self.messages) class SpyTab(JPanel, ITab): def __init__(self, callbacks): super(SpyTab, self).__init__(GroupLayout(self)) self._callbacks = callbacks config = Config(self._callbacks, self) about = About(self._callbacks) self.tabs = [config, about] self.j_tabs = self.build_ui() self.add(self.j_tabs) def build_ui(self): ui_tab = JTabbedPane() for tab in self.tabs: ui_tab.add(tab.getTabCaption(), tab.getUiComponent()) return ui_tab def switch_focus(self): self.j_tabs.setSelectedIndex(1) self.j_tabs.setSelectedIndex(0) def update_url(self, host): service = host[0].getHttpService() url = "%s://%s:%s" % (service.getProtocol(), service.getHost(), service.getPort()) self.tabs[0].set_url(url) @staticmethod def getTabCaption(): return "SpyDir" def getUiComponent(self): return self class Config(ITab): def __init__(self, callbacks, parent): self._callbacks = callbacks self.config = {} self.ext_stats = {} self.url_reqs = [] self.parse_files = False self.tab = JPanel(GridBagLayout()) self.view_port_text = JTextArea("===SpyDir===") self.delim = JTextField(30) self.ext_white_list = JTextField(30) self.url = JTextField(30) self.parent_window = parent self.plugins = {} self.loaded_p_list = set() self.loaded_plugins = False self.config['Plugin Folder'] = None self.double_click = False self.source_input = "" self.print_stats = True self.curr_conf = JLabel() self.window = JFrame("Select plugins", preferredSize=(200, 250), windowClosing=self.p_close) self.window.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE) self.window.setVisible(False) self.path_vars = JTextField(30) tab_constraints = GridBagConstraints() status_field = JScrollPane(self.view_port_text) self.view_port_text.setEditable(False) labels = self.build_ui() tab_constraints.anchor = GridBagConstraints.FIRST_LINE_END tab_constraints.gridx = 1 tab_constraints.gridy = 0 tab_constraints.fill = GridBagConstraints.HORIZONTAL self.tab.add(JButton( "Resize screen", actionPerformed=self.resize), tab_constraints) tab_constraints.gridx = 0 tab_constraints.gridy = 1 tab_constraints.anchor = GridBagConstraints.FIRST_LINE_START self.tab.add(labels, tab_constraints) tab_constraints.gridx = 1 tab_constraints.gridy = 1 tab_constraints.fill = GridBagConstraints.BOTH tab_constraints.weightx = 1.0 tab_constraints.weighty = 1.0 tab_constraints.anchor = GridBagConstraints.FIRST_LINE_END self.tab.add(status_field, tab_constraints) try: self._callbacks.customizeUiComponent(self.tab) except Exception: pass def build_ui(self): labels = JPanel(GridLayout(21, 1)) checkbox = JCheckBox("Attempt to parse files for URL patterns?", False, actionPerformed=self.set_parse) stats_box = JCheckBox("Show stats?", True, actionPerformed=self.set_show_stats) plug_butt = JButton("Specify plugins location", actionPerformed=self.set_plugin_loc) load_plug_butt = JButton("Select plugins", actionPerformed=self.p_build_ui) parse_butt = JButton("Parse directory", actionPerformed=self.parse) clear_butt = JButton("Clear text", actionPerformed=self.clear) spider_butt = JButton("Send to Spider", actionPerformed=self.scan) save_butt = JButton("Save config", actionPerformed=self.save) rest_butt = JButton("Restore config", actionPerformed=self.restore) source_butt = JButton("Input Source File/Directory", actionPerformed=self.get_source_input) labels.add(source_butt) labels.add(self.curr_conf) labels.add(JLabel("String Delimiter:")) labels.add(self.delim) labels.add(JLabel("Extension Whitelist:")) labels.add(self.ext_white_list) labels.add(JLabel("URL:")) labels.add(self.url) labels.add(JLabel("Path Variables")) labels.add(self.path_vars) labels.add(checkbox) labels.add(stats_box) labels.add(plug_butt) labels.add(parse_butt) labels.add(JButton("Show all endpoints", actionPerformed=self.print_endpoints)) labels.add(clear_butt) labels.add(spider_butt) labels.add(JLabel("")) labels.add(save_butt) labels.add(rest_butt) labels.add(load_plug_butt) self.delim.setToolTipText("Use to manipulate the final URL. " "See About tab for example.") self.ext_white_list.setToolTipText("Define a comma delimited list of" " file extensions to parse. Use *" " to parse all files.") self.url.setToolTipText("Enter the target URL") checkbox.setToolTipText("Parse files line by line using plugins" " to enumerate language/framework specific" " endpoints") parse_butt.setToolTipText("Attempt to enumerate application endpoints") clear_butt.setToolTipText("Clear status window and the parse results") spider_butt.setToolTipText("Process discovered endpoints") save_butt.setToolTipText("Saves the current config settings") rest_butt.setToolTipText("<html>Restores previous config settings:" "<br/>-Input Directory<br/>-String Delim" "<br/>-Ext WL<br/>-URL<br/>-Plugins") source_butt.setToolTipText("Select the application's " "source directory or file to parse") self.path_vars.setToolTipText("Supply a JSON object with values" "for dynamically enumerated query" "string variables") return labels def set_url(self, menu_url): self.url.setText(menu_url) def set_parse(self, event): self.parse_files = not self.parse_files if self.parse_files: if not self.loaded_plugins: self._plugins_missing_warning() def restore(self, event): jdump = None try: jdump = loads(self._callbacks.loadExtensionSetting("config")) except Exception as exc: self.update_scroll( "[!!] Error during restore!\n\tException: %s" % str(exc)) if jdump is not None: self.url.setText(jdump.get('URL')) ewl = "" for ext in jdump.get('Extension Whitelist'): ewl += ext + ", " self.ext_white_list.setText(ewl[:-2]) self.delim.setText(jdump.get('String Delimiter')) self.source_input = jdump.get("Input Directory") self.config['Plugin Folder'] = jdump.get("Plugin Folder") if (self.config['Plugin Folder'] is not None and (len(self.plugins.values()) < 1)): self._load_plugins(self.config['Plugin Folder']) self._update() self.update_scroll("[^] Restore complete!") else: self.update_scroll("[!!] Restore failed!") def save(self, event=None): self._update() try: if not self._callbacks.isInScope(URL(self.url.getText())): self.update_scroll("[!!] URL provided is NOT in Burp Scope!") except MalformedURLException: pass try: self._callbacks.saveExtensionSetting("config", dumps(self.config)) self.update_scroll("[^] Settings saved!") except Exception: self.update_scroll("[!!] Error saving settings to Burp Suite!") def parse(self, event): self._update() file_set = set() fcount = 0 other_dirs = set() self.ext_stats = {} if self.loaded_plugins: self.update_scroll("[^] Attempting to parse files" + " for URL patterns. This might take a minute.") if path.isdir(self.source_input): for dirname, _, filenames in walk(self.source_input): for filename in filenames: fcount += 1 ext = path.splitext(filename)[1] count = self.ext_stats.get(ext, 0) + 1 filename = "%s/%s" % (dirname, filename) self.ext_stats.update({ext: count}) if self.parse_files and self._ext_test(ext): file_set.update(self._code_as_endpoints(filename, ext)) elif self._ext_test(ext): r_files, oths = self._files_as_endpoints(filename, ext) file_set.update(r_files) other_dirs.update(oths) elif path.isfile(self.source_input): ext = path.splitext(self.source_input)[1] file_set.update(self._code_as_endpoints(self.source_input, ext)) else: self.update_scroll("[!!] Input Directory is not valid!") if len(other_dirs) > 0: self.update_scroll("[*] Found files matching file extension in:\n") for other_dir in other_dirs: self.update_scroll(" " * 4 + "%s\n" % other_dir) self._handle_path_vars(file_set) self._print_parsed_status(fcount) return (other_dirs, self.url_reqs) def _handle_path_vars(self, file_set): proto = 'http://' for item in file_set: if item.startswith("http://") or item.startswith("https://"): proto = item.split("//")[0] + '//' item = item.replace(proto, "") item = self._path_vars(item) self.url_reqs.append(proto + item.replace('//', '/')) def _path_vars(self, item): p_vars = None if self.path_vars.getText(): try: p_vars = loads(str(self.path_vars.getText())) except: self.update_scroll("[!] Error reading supplied Path Variables!") if p_vars is not None: rep_str = "" try: for k in p_vars.keys(): rep_str += "[^] Replacing %s with %s!\n" % (k, str(p_vars.get(k))) self.update_scroll(rep_str) for k in p_vars.keys(): if str(k) in item: item = item.replace(k, str(p_vars.get(k))) except AttributeError: self.update_scroll("[!] Error reading supplied Path Variables! This needs to be a JSON dictionary!") return item def scan(self, event): temp_url = self.url.getText() if not self._callbacks.isInScope(URL(temp_url)): if not self.double_click: self.update_scroll("[!!] URL is not in scope! Press Send to " "Spider again to add to scope and scan!") self.double_click = True return else: self._callbacks.sendToSpider(URL(temp_url)) self.update_scroll( "[^] Sending %d requests to Spider" % len(self.url_reqs)) for req in self.url_reqs: self._callbacks.sendToSpider(URL(req)) def clear(self, event): self.view_port_text.setText("===SpyDir===") self.ext_stats = {} def print_endpoints(self, event): req_str = "" if len(self.url_reqs) > 0: self.update_scroll("[*] Printing all discovered endpoints:") for req in sorted(self.url_reqs): req_str += " %s\n" % req else: req_str = "[!!] No endpoints discovered" self.update_scroll(req_str) def set_show_stats(self, event): self.print_stats = not self.print_stats def get_source_input(self, event): source_chooser = JFileChooser() source_chooser.setFileSelectionMode( JFileChooser.FILES_AND_DIRECTORIES) source_chooser.showDialog(self.tab, "Choose Source Location") chosen_source = source_chooser.getSelectedFile() try: self.source_input = chosen_source.getAbsolutePath() except AttributeError: pass if self.source_input is not None: self.update_scroll("[*] Source location: %s" % self.source_input) self.curr_conf.setText(self.source_input) def _parse_file(self, filename, file_url): file_set = set() with open(filename, 'r') as plug_in: lines = plug_in.readlines() ext = path.splitext(filename)[1].upper() if ext in self.plugins.keys() and self._ext_test(ext): for plug in self.plugins.get(ext): if plug.enabled: res = plug.run(lines) if len(res) > 0: for i in res: i = file_url + i file_set.add(i) elif ext == '.TXT' and self._ext_test(ext): for i in lines: i = file_url + i file_set.add(i.strip()) return file_set def set_plugin_loc(self, event): if self.config['Plugin Folder'] is not None: choose_plugin_location = JFileChooser(self.config['Plugin Folder']) else: choose_plugin_location = JFileChooser() choose_plugin_location.setFileSelectionMode( JFileChooser.DIRECTORIES_ONLY) choose_plugin_location.showDialog(self.tab, "Choose Folder") chosen_folder = choose_plugin_location.getSelectedFile() self.config['Plugin Folder'] = chosen_folder.getAbsolutePath() self._load_plugins(self.config['Plugin Folder']) def _load_plugins(self, folder): report = "" if len(self.plugins.keys()) > 0: report = "[^] Plugins reloaded!" for _, _, filenames in walk(folder): for p_name in filenames: n_e = path.splitext(p_name) if n_e[1] == ".py": f_loc = "%s/%s" % (folder, p_name) loaded_plug = self._validate_plugin(n_e[0], f_loc) if loaded_plug: for p in self.loaded_p_list: if p.get_name() == loaded_plug.get_name(): self.loaded_p_list.discard(p) self.loaded_p_list.add(loaded_plug) if not report.startswith("[^]"): report += "%s loaded\n" % loaded_plug.get_name() self._dictify(self.loaded_p_list) if len(self.plugins.keys()) > 0: self.loaded_plugins = True else: report = "[!!] Plugins load failure" self.loaded_plugins = False self.update_scroll(report) return report def _validate_plugin(self, p_name, f_loc): try: plug = load_source(p_name, f_loc) except Exception as exc: self.update_scroll( "[!!] Error loading: %s\n\tType:%s Error: %s" % (f_loc, type(exc), str(exc))) funcs = dir(plug) err = [] if "get_name" not in funcs: err.append("get_name()") if "get_ext" not in funcs: err.append("get_ext()") if "run" not in funcs: err.append("run()") if len(err) < 1: return Plugin(plug, True) for issue in err: self.update_scroll("[!!] %s is missing: %s func" % (p_name, issue)) return None def _dictify(self, plist): for p in plist: exts = p.get_ext().upper() for ext in exts.split(","): prev_load = self.plugins.get(ext, []) prev_load.append(p) self.plugins[ext] = prev_load def _print_parsed_status(self, fcount): if self.parse_files and not self.loaded_plugins: self._plugins_missing_warning() if len(self.url_reqs) > 0: self.update_scroll("[*] Example URL: %s" % self.url_reqs[0]) if self.print_stats: report = (("[*] Found: %r files to be requested.\n\n" + "[*] Stats: \n " + "Found: %r files.\n") % (len(self.url_reqs), fcount)) if len(self.ext_stats) > 0: report += ("[*] Extensions found: %s" % str(dumps(self.ext_stats, sort_keys=True, indent=4))) else: report = ("[*] Found: %r files to be requested.\n" % len(self.url_reqs)) self.update_scroll(report) return report def _plugins_missing_warning(self): self.update_scroll("[!!] No plugins loaded!") def update_scroll(self, text): temp = self.view_port_text.getText().strip() if text not in temp or text[0:4] == "[!!]": self.view_port_text.setText("%s\n%s" % (temp, text)) elif not temp.endswith("[^] Status unchanged"): self.view_port_text.setText("%s\n[^] Status unchanged" % temp) def _code_as_endpoints(self, filename, ext): file_set = set() file_url = self.config.get("URL") if self.loaded_plugins or ext == '.txt': if self._ext_test(ext): file_set.update( self._parse_file(filename, file_url)) else: file_set.update( self._parse_file(filename, file_url)) return file_set def _files_as_endpoints(self, filename, ext): file_url = self.config.get("URL") broken_splt = "" other_dirs = set() file_set = set() str_del = self.config.get("String Delimiter") if not str_del: self.update_scroll("[!!] No available String Delimiter!") return spl_str = filename.split(str_del) try: if len(spl_str) > 1: file_url += ((spl_str[1]) .replace('\\', '/')) else: broken_splt = filename.split(self.source_input)[1] other_dirs.add(broken_splt) except Exception as exc: self.update_scroll("[!!] Error parsing: " + "%s\n\tException: %s" % (filename, str(exc))) if self._ext_test(ext): if file_url != self.config.get("URL"): file_set.add(file_url) else: other_dirs.discard(broken_splt) return file_set, other_dirs def _ext_test(self, ext): val = False if "*" in self.config.get("Extension Whitelist"): val = True else: val = (len(ext) > 0 and (ext.strip().upper() in self.config.get("Extension Whitelist"))) return val def _update(self): self.config["Input Directory"] = self.source_input self.config["String Delimiter"] = self.delim.getText() white_list_text = self.ext_white_list.getText() self.config["Extension Whitelist"] = white_list_text.upper().split(',') file_url = self.url.getText() if not (file_url.startswith('https://') or file_url.startswith('http://')): self.update_scroll("[!] Assuming protocol! Default value: 'http://'") file_url = 'http://' + file_url self.url.setText(file_url) if not file_url.endswith('/') and file_url != "": file_url += '/' self.config["URL"] = file_url del self.url_reqs[:] self.curr_conf.setText(self.source_input) def resize(self, event): if self.parent_window is not None: par_size = self.parent_window.getSize() par_size.setSize(par_size.getWidth() * .99, par_size.getHeight() * .9) self.tab.setPreferredSize(par_size) self.parent_window.validate() self.parent_window.switch_focus() def p_close(self, event): self.window.setVisible(False) self.window.dispose() def p_build_ui(self, event): if not self.loaded_p_list: self.update_scroll("[!!] No plugins loaded!") return scroll_pane = JScrollPane() scroll_pane.setPreferredSize(Dimension(200, 250)) check_frame = JPanel(GridBagLayout()) constraints = GridBagConstraints() constraints.fill = GridBagConstraints.HORIZONTAL constraints.gridy = 0 constraints.anchor = GridBagConstraints.FIRST_LINE_START for plug in self.loaded_p_list: check_frame.add(JCheckBox(plug.get_name(), plug.enabled, actionPerformed=self.update_box), constraints) constraints.gridy += 1 vport = JViewport() vport.setView(check_frame) scroll_pane.setViewport(vport) self.window.contentPane.add(scroll_pane) self.window.pack() self.window.setVisible(True)
MIT License
atomlinter/linter-pylama
bin/deps/pylint/checkers/imports.py
_make_tree_defs
python
def _make_tree_defs(mod_files_list): tree_defs = {} for mod, files in mod_files_list: node = (tree_defs, ()) for prefix in mod.split('.'): node = node[0].setdefault(prefix, [{}, []]) node[1] += files return tree_defs
get a list of 2-uple (module, list_of_files_which_import_this_module), it will return a dictionary to represent this as a tree
https://github.com/atomlinter/linter-pylama/blob/9157f7f84083007161814c93b537a712984f3c86/bin/deps/pylint/checkers/imports.py#L125-L135
import collections from distutils import sysconfig import os import sys import copy import six import astroid from astroid import are_exclusive from astroid.modutils import (get_module_part, is_standard_module) import isort from pylint.interfaces import IAstroidChecker from pylint.utils import get_global_option from pylint.exceptions import EmptyReportError from pylint.checkers import BaseChecker from pylint.checkers.utils import ( check_messages, node_ignores_exception, is_from_fallback_block ) from pylint.graph import get_cycles, DotBackend from pylint.reporters.ureports.nodes import VerbatimText, Paragraph def _qualified_names(modname): names = modname.split('.') return ['.'.join(names[0:i+1]) for i in range(len(names))] def _get_import_name(importnode, modname): if isinstance(importnode, astroid.ImportFrom): if importnode.level: root = importnode.root() if isinstance(root, astroid.Module): modname = root.relative_to_absolute_name( modname, level=importnode.level) return modname def _get_first_import(node, context, name, base, level, alias): fullname = '%s.%s' % (base, name) if base else name first = None found = False for first in context.body: if first is node: continue if first.scope() is node.scope() and first.fromlineno > node.fromlineno: continue if isinstance(first, astroid.Import): if any(fullname == iname[0] for iname in first.names): found = True break elif isinstance(first, astroid.ImportFrom): if level == first.level: for imported_name, imported_alias in first.names: if fullname == '%s.%s' % (first.modname, imported_name): found = True break if name != '*' and name == imported_name and not (alias or imported_alias): found = True break if found: break if found and not are_exclusive(first, node): return first return None def _ignore_import_failure(node, modname, ignored_modules): for submodule in _qualified_names(modname): if submodule in ignored_modules: return True return node_ignores_exception(node, ImportError)
MIT License
jpwsutton/instax_api
instax/packet.py
Packet.__init__
python
def __init__(self, mode=None): pass
Init for Packet.
https://github.com/jpwsutton/instax_api/blob/51db4a49426b18511101d925534540718f45d0ad/instax/packet.py#L124-L126
import logging class PacketFactory(object): MESSAGE_TYPE_SPECIFICATIONS = 79 MESSAGE_TYPE_RESET = 80 MESSAGE_TYPE_PREP_IMAGE = 81 MESSAGE_TYPE_SEND_IMAGE = 82 MESSAGE_TYPE_83 = 83 MESSAGE_TYPE_SET_LOCK_STATE = 176 MESSAGE_TYPE_LOCK_DEVICE = 179 MESSAGE_TYPE_CHANGE_PASSWORD = 182 MESSAGE_TYPE_PRINTER_VERSION = 192 MESSAGE_TYPE_PRINT_COUNT = 193 MESSAGE_TYPE_MODEL_NAME = 194 MESSAGE_TYPE_195 = 195 MESSAGE_TYPE_PRE_PRINT = 196 MESSAGE_MODE_COMMAND = 36 MESSAGE_MODE_RESPONSE = 42 def __init__(self): pass def printRawByteArray(self, byteArray): hexString = ''.join('%02x' % i for i in byteArray) return(' '.join(hexString[i:i + 4] for i in range( 0, len(hexString), 4))) def decode(self, byteArray): self.byteArray = byteArray self.mode = byteArray[0] pType = byteArray[1] if pType == self.MESSAGE_TYPE_SPECIFICATIONS: return(SpecificationsCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_PRINTER_VERSION: return(VersionCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_PRINT_COUNT: return(PrintCountCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_RESET: return(ResetCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_PREP_IMAGE: return(PrepImageCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_SEND_IMAGE: return(SendImageCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_MODEL_NAME: return(ModelNameCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_PRE_PRINT: return(PrePrintCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_LOCK_DEVICE: return(PrinterLockCommand(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_83: return(Type83Command(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_195: return(Type195Command(mode=self.mode, byteArray=byteArray)) elif pType == self.MESSAGE_TYPE_SET_LOCK_STATE: return(LockStateCommand(mode=self.mode, byteArray=byteArray)) else: logging.debug("Unknown Packet Type: " + str(pType)) logging.debug("Packet Bytes: [" + self.printRawByteArray(byteArray) + "]") class Packet(object): MESSAGE_TYPE_SPECIFICATIONS = 79 MESSAGE_TYPE_RESET = 80 MESSAGE_TYPE_PREP_IMAGE = 81 MESSAGE_TYPE_SEND_IMAGE = 82 MESSAGE_TYPE_83 = 83 MESSAGE_TYPE_SET_LOCK_STATE = 176 MESSAGE_TYPE_LOCK_DEVICE = 179 MESSAGE_TYPE_CHANGE_PASSWORD = 182 MESSAGE_TYPE_PRINTER_VERSION = 192 MESSAGE_TYPE_PRINT_COUNT = 193 MESSAGE_TYPE_MODEL_NAME = 194 MESSAGE_TYPE_195 = 195 MESSAGE_TYPE_PRE_PRINT = 196 MESSAGE_MODE_COMMAND = 36 MESSAGE_MODE_RESPONSE = 42 RTN_E_RCV_FRAME = 0 RTN_E_PI_SENSOR = 248 RTN_E_UNMATCH_PASS = 247 RTN_E_MOTOR = 246 RTN_E_CAM_POINT = 245 RTN_E_FILM_EMPTY = 244 RTN_E_RCV_FRAME_1 = 243 RTN_E_RCV_FRAME_2 = 242 RTN_E_RCV_FRAME_3 = 241 RTN_E_RCV_FRAME_4 = 240 RTN_E_CONNECT = 224 RTN_E_CHARGE = 180 RTN_E_TESTING = 165 RTN_E_EJECTING = 164 RTN_E_PRINTING = 163 RTN_E_BATTERY_EMPTY = 162 RTN_E_NOT_IMAGE_DATA = 161 RTN_E_OTHER_USED = 160 RTN_ST_UPDATE = 127 strings = { MESSAGE_MODE_COMMAND: "Command", MESSAGE_MODE_RESPONSE: "Response" }
MIT License
dbcli/pgcli
tests/features/fixture_utils.py
read_fixture_files
python
def read_fixture_files(): current_dir = os.path.dirname(__file__) fixture_dir = os.path.join(current_dir, "fixture_data/") print(f"reading fixture data: {fixture_dir}") fixture_dict = {} for filename in os.listdir(fixture_dir): if filename not in [".", ".."]: fullname = os.path.join(fixture_dir, filename) fixture_dict[filename] = read_fixture_lines(fullname) return fixture_dict
Read all files inside fixture_data directory.
https://github.com/dbcli/pgcli/blob/123e00a086d91281a2ed8cfd84452a796f22ad91/tests/features/fixture_utils.py#L17-L28
import os import codecs def read_fixture_lines(filename): lines = [] for line in codecs.open(filename, "rb", encoding="utf-8"): lines.append(line.strip()) return lines
BSD 3-Clause New or Revised License
roseou/flasky
venv/lib/python2.7/site-packages/pip/_vendor/distlib/index.py
PackageIndex.__init__
python
def __init__(self, url=None): self.url = url or DEFAULT_INDEX self.read_configuration() scheme, netloc, path, params, query, frag = urlparse(self.url) if params or query or frag or scheme not in ('http', 'https'): raise DistlibException('invalid repository: %s' % self.url) self.password_handler = None self.ssl_verifier = None self.gpg = None self.gpg_home = None with open(os.devnull, 'w') as sink: for s in ('gpg2', 'gpg'): try: rc = subprocess.check_call([s, '--version'], stdout=sink, stderr=sink) if rc == 0: self.gpg = s break except OSError: pass
Initialise an instance. :param url: The URL of the index. If not specified, the URL for PyPI is used.
https://github.com/roseou/flasky/blob/0ebf366ddfe9604acfba99756f69a6d63063b3f9/venv/lib/python2.7/site-packages/pip/_vendor/distlib/index.py#L36-L61
import hashlib import logging import os import shutil import subprocess import tempfile try: from threading import Thread except ImportError: from dummy_threading import Thread from distlib import DistlibException from distlib.compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, urlparse, build_opener) from distlib.util import cached_property, zip_dir logger = logging.getLogger(__name__) DEFAULT_INDEX = 'https://pypi.python.org/pypi' DEFAULT_REALM = 'pypi' class PackageIndex(object): boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
MIT License
dotwaffle/pinder
pinder/users/models.py
User.get_username
python
def get_username(self): return self.email
Required by Django to use as a key for authentication.
https://github.com/dotwaffle/pinder/blob/dfe2f17ebde34ff48de3a1c5c0c10dd2b19929b4/pinder/users/models.py#L52-L56
from django.contrib.gis.db import models from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin from .managers import UserManager class Isp(models.Model): asn = models.PositiveIntegerField(primary_key=True) name = models.CharField(max_length=128) def __str__(self): return self.name class User(AbstractBaseUser, PermissionsMixin): USERNAME_FIELD = "email" REQUIRED_FIELDS = ["name"] name = models.CharField(max_length=128) email = models.EmailField(unique=True) isp = models.ForeignKey(Isp) is_staff = models.BooleanField( "Staff status", default=False, help_text="Designates whether the user can log into the admin site." ) is_active = models.BooleanField( "Active", default=True, help_text="Designates whether this user should be treated as active." ) date_joined = models.DateTimeField(auto_now_add=True) date_modified = models.DateTimeField(auto_now=True) objects = UserManager() def __str__(self): return self.name def get_absolute_url(self): return "/users/%s/" % self.asn def get_short_name(self): return self.asn
Apache License 2.0
catalyst-team/catalyst-rl
catalyst_rl/contrib/nn/modules/lama.py
LamaPooling.forward
python
def forward(self, x: torch.Tensor, mask: torch.Tensor = None): batch_size, history_len, feature_size = x.shape x_ = [] for pooling_fn in self.groups.values(): features_ = pooling_fn(x, mask) x_.append(features_) x = torch.cat(x_, dim=1) x = x.view(batch_size, -1) return x
:param x: [batch_size, history_len, feature_size] :return:
https://github.com/catalyst-team/catalyst-rl/blob/75ffa808e2bbb9071a169a1a9c813deb6a69a797/catalyst_rl/contrib/nn/modules/lama.py#L152-L166
import torch from torch import nn from catalyst_rl.utils import outer_init class TemporalLastPooling(nn.Module): def forward(self, x: torch.Tensor, mask: torch.Tensor = None): x_out = x[:, -1:, :] return x_out class TemporalAvgPooling(nn.Module): def forward(self, x: torch.Tensor, mask: torch.Tensor = None): if mask is None: x_out = x.mean(1, keepdim=True) else: x_ = torch.sum(x * mask.float(), dim=1, keepdim=True) mask_ = torch.sum(mask.float(), dim=1, keepdim=True) x_out = x_ / mask_ return x_out class TemporalMaxPooling(nn.Module): def forward(self, x: torch.Tensor, mask: torch.Tensor = None): if mask is not None: mask_ = (~mask.bool()).float() * (-x.max()).float() x = torch.sum(x + mask_, dim=1, keepdim=True) x_out = x.max(1, keepdim=True)[0] return x_out class TemporalAttentionPooling(nn.Module): name2activation = { "softmax": nn.Softmax(dim=1), "tanh": nn.Tanh(), "sigmoid": nn.Sigmoid() } def __init__(self, in_features, activation=None, kernel_size=1, **params): super().__init__() self.in_features = in_features activation = activation or "softmax" self.attention_pooling = nn.Sequential( nn.Conv1d( in_channels=in_features, out_channels=1, kernel_size=kernel_size, **params ), TemporalAttentionPooling.name2activation[activation] ) self.attention_pooling.apply(outer_init) def forward(self, x: torch.Tensor, mask: torch.Tensor = None): batch_size, history_len, feature_size = x.shape x = x.view(batch_size, history_len, -1) x_a = x.transpose(1, 2) x_attn = (self.attention_pooling(x_a) * x_a).transpose(1, 2) x_attn = x_attn.sum(1, keepdim=True) return x_attn class TemporalConcatPooling(nn.Module): def __init__(self, in_features, history_len=1): super().__init__() self.in_features = in_features self.out_features = in_features * history_len def forward(self, x: torch.Tensor, mask: torch.Tensor = None): x = x.view(x.shape[0], -1) return x class TemporalDropLastWrapper(nn.Module): def __init__(self, net): super().__init__() self.net = net def forward(self, x: torch.Tensor, mask: torch.Tensor = None): x = x[:, :-1, :] x_out = self.net(x) return x_out def get_pooling(key, in_features, **params): key_ = key.split("_", 1)[0] if key_ == "last": return TemporalLastPooling() elif key_ == "avg": layer = TemporalAvgPooling() elif key_ == "max": layer = TemporalMaxPooling() elif key_ in ["softmax", "tanh", "sigmoid"]: layer = TemporalAttentionPooling( in_features=in_features, activation=key_, **params ) else: raise NotImplementedError() if "droplast" in key: layer = TemporalDropLastWrapper(layer) return layer class LamaPooling(nn.Module): available_groups = [ "last", "avg", "avg_droplast", "max", "max_droplast", "sigmoid", "sigmoid_droplast", "softmax", "softmax_droplast", "tanh", "tanh_droplast", ] def __init__(self, in_features, groups=None): super().__init__() self.in_features = in_features self.groups = groups or ["last", "avg_droplast", "max_droplast", "softmax_droplast"] self.out_features = in_features * len(self.groups) groups = {} for key in self.groups: if isinstance(key, str): groups[key] = get_pooling(key, self.in_features) elif isinstance(key, dict): key_ = key.pop("key") groups[key_] = get_pooling(key_, in_features, **key) else: raise NotImplementedError() self.groups = nn.ModuleDict(groups)
Apache License 2.0
devopshq/teamcity
dohq_teamcity/api/user_api.py
UserApi.get_permissions
python
def get_permissions(self, user_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__get_permissions_with_http_info(user_locator, **kwargs) else: (data) = self.__get_permissions_with_http_info(user_locator, **kwargs) return data
get_permissions # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_permissions(user_locator, async_req=True) >>> result = thread.get() :param async_req: bool :param str user_locator: (required) :return: str If the method is called asynchronously, returns the request thread.
https://github.com/devopshq/teamcity/blob/84f1757ec1fddef27d39246a75739d047be0e831/dohq_teamcity/api/user_api.py#L288-L307
from __future__ import absolute_import from dohq_teamcity.custom.base_model import TeamCityObject import re import six from dohq_teamcity.models.group import Group from dohq_teamcity.models.groups import Groups from dohq_teamcity.models.permission_assignments import PermissionAssignments from dohq_teamcity.models.properties import Properties from dohq_teamcity.models.role import Role from dohq_teamcity.models.roles import Roles from dohq_teamcity.models.user import User from dohq_teamcity.models.users import Users class UserApi(object): base_name = 'User' def __init__(self, api_client=None): self.api_client = api_client def add_group(self, user_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_group_with_http_info(user_locator, **kwargs) else: (data) = self.__add_group_with_http_info(user_locator, **kwargs) return data def add_role(self, user_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_role_with_http_info(user_locator, **kwargs) else: (data) = self.__add_role_with_http_info(user_locator, **kwargs) return data def add_role_simple(self, user_locator, role_id, scope, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_role_simple_with_http_info(user_locator, role_id, scope, **kwargs) else: (data) = self.__add_role_simple_with_http_info(user_locator, role_id, scope, **kwargs) return data def add_role_simple_post(self, user_locator, role_id, scope, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_role_simple_post_with_http_info(user_locator, role_id, scope, **kwargs) else: (data) = self.__add_role_simple_post_with_http_info(user_locator, role_id, scope, **kwargs) return data def create_user(self, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__create_user_with_http_info(**kwargs) else: (data) = self.__create_user_with_http_info(**kwargs) return data def delete_remember_me(self, user_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__delete_remember_me_with_http_info(user_locator, **kwargs) else: (data) = self.__delete_remember_me_with_http_info(user_locator, **kwargs) return data def delete_role(self, user_locator, role_id, scope, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__delete_role_with_http_info(user_locator, role_id, scope, **kwargs) else: (data) = self.__delete_role_with_http_info(user_locator, role_id, scope, **kwargs) return data def delete_user(self, user_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__delete_user_with_http_info(user_locator, **kwargs) else: (data) = self.__delete_user_with_http_info(user_locator, **kwargs) return data def delete_user_field(self, user_locator, field, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__delete_user_field_with_http_info(user_locator, field, **kwargs) else: (data) = self.__delete_user_field_with_http_info(user_locator, field, **kwargs) return data def get_group(self, user_locator, group_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__get_group_with_http_info(user_locator, group_locator, **kwargs) else: (data) = self.__get_group_with_http_info(user_locator, group_locator, **kwargs) return data def get_groups(self, user_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__get_groups_with_http_info(user_locator, **kwargs) else: (data) = self.__get_groups_with_http_info(user_locator, **kwargs) return data
MIT License
bouni/ard-mediathek
ard_media_downloader.py
ArdMediathekDownloader.validate_url
python
def validate_url(self, url): regex = re.compile( r'^(?:http)s?://' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' r'(?::\d+)?' r'(?:/?|[/?]\S+)$', re.IGNORECASE) try: return re.match(regex, url).group(0) except: raise ValueError("The given URL is not valid.")
Method to check if the URL is a valid URL or not. Returns the URL if everything is fine with it otherwise rises a ValueError.
https://github.com/bouni/ard-mediathek/blob/9d042d9b578316d09c8ec4ad0e821974fd26f24c/ard_media_downloader.py#L27-L42
import os import re import requests from requests_html import HTMLSession from slugify import slugify from tqdm import tqdm class ArdMediathekDownloader(object): def __init__(self, url): self.url = self.validate_url(url) self.subtitle_url = None self._filename = None self.default_filename = "video.mp4" self._derive_filename = False self._quality = 3
MIT License
panoptes/pocs
src/panoptes/pocs/camera/sbigudrv.py
SBIGDriver._bcd_to_int
python
def _bcd_to_int(self, bcd, int_type='ushort'): if int_type == 'ushort': bcd = bcd.to_bytes(ctypes.sizeof(ctypes.c_ushort), byteorder='big') elif int_type == 'ulong': bcd = bcd.to_bytes(ctypes.sizeof(ctypes.c_ulong), byteorder='big') else: self.logger.error('Unknown integer type {}!'.format(int_type)) return return int(bcd.hex())
Function to decode the Binary Coded Decimals returned by the Get CCD Info command. These will be integers of C types ushort or ulong, encoding decimal numbers of the form XX.XX or XXXXXX.XX, i.e. when converting to a numerical value they will need dividing by 100.
https://github.com/panoptes/pocs/blob/fd8c4034c6d2d9a4da46e6a1d5aa5a7b9ed68112/src/panoptes/pocs/camera/sbigudrv.py#L623-L644
import ctypes import enum import threading import time import numpy as np from astropy import units as u from numpy.ctypeslib import as_ctypes from panoptes.pocs.camera.sdk import AbstractSDKDriver from panoptes.utils import error from panoptes.utils.time import CountdownTimer from panoptes.utils.utils import get_quantity_value class SBIGDriver(AbstractSDKDriver): def __init__(self, library_path=None, retries=1, **kwargs): self._command_lock = threading.Lock() self._retries = retries super().__init__(name='sbigudrv', library_path=library_path, **kwargs) @property def retries(self): return self._retries @retries.setter def retries(self, retries): retries = int(retries) if retries < 1: raise ValueError("retries should be 1 or greater, got {}!".format(retries)) self._retries = retries def get_SDK_version(self, request_type='DRIVER_STD'): driver_info_params = GetDriverInfoParams(driver_request_codes[request_type]) driver_info_results = GetDriverInfoResults0() self.open_driver() with self._command_lock: self._send_command('CC_GET_DRIVER_INFO', driver_info_params, driver_info_results) version_string = "{}, {}".format(driver_info_results.name.decode('ascii'), self._bcd_to_string(driver_info_results.version)) return version_string def get_devices(self): camera_info = QueryUSBResults2() with self._command_lock: self._send_command('CC_QUERY_USB2', results=camera_info) if not camera_info.camerasFound: raise error.PanError("No SBIG camera devices found.") cameras = {} for i in range(camera_info.camerasFound): serial_number = camera_info.usbInfo[i].serialNumber.decode('ascii') device_type = "DEV_USB{}".format(i + 1) cameras[serial_number] = device_type return cameras def open_driver(self): with self._command_lock: self._send_command('CC_OPEN_DRIVER') def open_device(self, device_type): odp = OpenDeviceParams(device_type_codes[device_type], 0, 0) with self._command_lock: self._send_command('CC_OPEN_DEVICE', params=odp) def establish_link(self): elp = EstablishLinkParams() elr = EstablishLinkResults() with self._command_lock: self._send_command('CC_ESTABLISH_LINK', params=elp, results=elr) def get_link_status(self): lsr = GetLinkStatusResults() with self._command_lock: self._send_command('CC_GET_LINK_STATUS', results=lsr) link_status = {'established': bool(lsr.linkEstablished), 'base_address': int(lsr.baseAddress), 'camera_type': camera_types[lsr.cameraType], 'com_total': int(lsr.comTotal), 'com_failed': int(lsr.comFailed)} return link_status def get_driver_handle(self): ghr = GetDriverHandleResults() with self._command_lock: self._send_command('CC_GET_DRIVER_HANDLE', results=ghr) return ghr.handle def set_handle(self, handle): set_handle_params = SetDriverHandleParams(handle) self._send_command('CC_SET_DRIVER_HANDLE', params=set_handle_params) def get_ccd_info(self, handle): ccd_info_params0 = GetCCDInfoParams(ccd_info_request_codes['CCD_INFO_IMAGING']) ccd_info_results0 = GetCCDInfoResults0() ccd_info_params2 = GetCCDInfoParams(ccd_info_request_codes['CCD_INFO_EXTENDED']) ccd_info_results2 = GetCCDInfoResults2() ccd_info_params4 = GetCCDInfoParams(ccd_info_request_codes['CCD_INFO_EXTENDED2_IMAGING']) ccd_info_results4 = GetCCDInfoResults4() ccd_info_params6 = GetCCDInfoParams(ccd_info_request_codes['CCD_INFO_EXTENDED3']) ccd_info_results6 = GetCCDInfoResults6() with self._command_lock: self.set_handle(handle) self._send_command('CC_GET_CCD_INFO', params=ccd_info_params0, results=ccd_info_results0) self._send_command('CC_GET_CCD_INFO', params=ccd_info_params2, results=ccd_info_results2) self._send_command('CC_GET_CCD_INFO', params=ccd_info_params4, results=ccd_info_results4) self._send_command('CC_GET_CCD_INFO', params=ccd_info_params6, results=ccd_info_results6) ccd_info = {'firmware version': self._bcd_to_string(ccd_info_results0.firmwareVersion), 'camera type': camera_types[ccd_info_results0.cameraType], 'camera name': str(ccd_info_results0.name, encoding='ascii'), 'bad columns': ccd_info_results2.columns[0:ccd_info_results2.badColumns], 'imaging ABG': bool(ccd_info_results2.imagingABG), 'serial number': str(ccd_info_results2.serialNumber, encoding='ascii'), 'frame transfer': bool(ccd_info_results4.capabilities_b0), 'electronic shutter': bool(ccd_info_results4.capabilities_b1), 'remote guide head support': bool(ccd_info_results4.capabilities_b2), 'Biorad TDI support': bool(ccd_info_results4.capabilities_b3), 'AO8': bool(ccd_info_results4.capabilities_b4), 'frame buffer': bool(ccd_info_results4.capabilities_b5), 'dump extra': ccd_info_results4.dumpExtra, 'STXL': bool(ccd_info_results6.camera_b0), 'mechanical shutter': not bool(ccd_info_results6.camera_b1), 'colour': bool(ccd_info_results6.ccd_b0), 'Truesense': bool(ccd_info_results6.ccd_b1)} readout_mode_info = self._parse_readout_info( ccd_info_results0.readoutInfo[0:ccd_info_results0.readoutModes]) ccd_info['readout modes'] = readout_mode_info return ccd_info def disable_vdd_optimized(self, handle): set_driver_control_params = SetDriverControlParams( driver_control_codes['DCP_VDD_OPTIMIZED'], 0) self.logger.debug('Disabling DCP_VDD_OPTIMIZE on {}'.format(handle)) with self._command_lock: self.set_handle(handle) self._send_command('CC_SET_DRIVER_CONTROL', params=set_driver_control_params) def query_temp_status(self, handle): qtp = QueryTemperatureStatusParams(temp_status_request_codes['TEMP_STATUS_ADVANCED2']) qtr = QueryTemperatureStatusResults2() with self._command_lock: self.set_handle(handle) self._send_command('CC_QUERY_TEMPERATURE_STATUS', qtp, qtr) temp_status = {'cooling_enabled': bool(qtr.coolingEnabled), 'fan_enabled': bool(qtr.fanEnabled), 'ccd_set_point': qtr.ccdSetpoint * u.Celsius, 'imaging_ccd_temperature': qtr.imagingCCDTemperature * u.Celsius, 'tracking_ccd_temperature': qtr.trackingCCDTemperature * u.Celsius, 'external_ccd_temperature': qtr.externalTrackingCCDTemperature * u.Celsius, 'ambient_temperature': qtr.ambientTemperature * u.Celsius, 'imaging_ccd_power': qtr.imagingCCDPower * u.percent, 'tracking_ccd_power': qtr.trackingCCDPower * u.percent, 'external_ccd_power': qtr.externalTrackingCCDPower * u.percent, 'heatsink_temperature': qtr.heatsinkTemperature * u.Celsius, 'fan_power': qtr.fanPower * u.percent, 'fan_speed': qtr.fanSpeed / u.minute, 'tracking_ccd_set_point': qtr.trackingCCDSetpoint * u.Celsius} return temp_status def set_temp_regulation(self, handle, target_temperature, enabled): target_temperature = get_quantity_value(target_temperature, unit=u.Celsius) if enabled: enable_code = temperature_regulation_codes['REGULATION_ON'] else: enable_code = temperature_regulation_codes['REGULATION_OFF'] set_temp_params = SetTemperatureRegulationParams2(enable_code, target_temperature) autofreeze_code = temperature_regulation_codes['REGULATION_ENABLE_AUTOFREEZE'] set_freeze_params = SetTemperatureRegulationParams2(autofreeze_code, target_temperature) with self._command_lock: self.set_handle(handle) self._send_command('CC_SET_TEMPERATURE_REGULATION2', params=set_temp_params) self._send_command('CC_SET_TEMPERATURE_REGULATION2', params=set_freeze_params) def get_exposure_status(self, handle): query_status_params = QueryCommandStatusParams(command_codes['CC_START_EXPOSURE2']) query_status_results = QueryCommandStatusResults() with self._command_lock: self.set_handle(handle) self._send_command('CC_QUERY_COMMAND_STATUS', params=query_status_params, results=query_status_results) return statuses[query_status_results.status] def start_exposure(self, handle, seconds, dark, antiblooming, readout_mode, top, left, height, width): centiseconds = int(get_quantity_value(seconds, unit=u.second) * 100) if antiblooming: abg_command_code = abg_state_codes['ABG_CLK_MED7'] else: abg_command_code = abg_state_codes['ABG_LOW7'] if not dark: shutter_command_code = shutter_command_codes['SC_OPEN_SHUTTER'] else: shutter_command_code = shutter_command_codes['SC_CLOSE_SHUTTER'] start_exposure_params = StartExposureParams2(ccd_codes['CCD_IMAGING'], centiseconds, abg_command_code, shutter_command_code, readout_mode_codes[readout_mode], int(get_quantity_value(top, u.pixel)), int(get_quantity_value(left, u.pixel)), int(get_quantity_value(height, u.pixel)), int(get_quantity_value(width, u.pixel))) with self._command_lock: self.set_handle(handle) self._send_command('CC_START_EXPOSURE2', params=start_exposure_params) def readout(self, handle, readout_mode, top, left, height, width): readout_mode_code = readout_mode_codes[readout_mode] top = int(get_quantity_value(top, unit=u.pixel)) left = int(get_quantity_value(left, unit=u.pixel)) height = int(get_quantity_value(height, unit=u.pixel)) width = int(get_quantity_value(width, unit=u.pixel)) end_exposure_params = EndExposureParams(ccd_codes['CCD_IMAGING']) start_readout_params = StartReadoutParams(ccd_codes['CCD_IMAGING'], readout_mode_code, top, left, height, width) readout_line_params = ReadoutLineParams(ccd_codes['CCD_IMAGING'], readout_mode_code, left, width) end_readout_params = EndReadoutParams(ccd_codes['CCD_IMAGING']) image_data = np.zeros((height, width), dtype=np.uint16) rows_got = 0 with self._command_lock: self.set_handle(handle) self._send_command('CC_END_EXPOSURE', params=end_exposure_params) self._send_command('CC_START_READOUT', params=start_readout_params) try: for i in range(height): self._send_command('CC_READOUT_LINE', params=readout_line_params, results=as_ctypes(image_data[i])) rows_got += 1 except RuntimeError as err: message = 'expected {} rows, got {}: {}'.format(height, rows_got, err) raise RuntimeError(message) try: self._send_command('CC_END_READOUT', params=end_readout_params) except RuntimeError as err: message = "error ending readout: {}".format(err) raise RuntimeError(message) return image_data def cfw_init(self, handle, model='AUTO', timeout=10 * u.second): self.logger.debug("Initialising filter wheel on {}".format(handle)) cfw_init = self._cfw_params(handle, model, CFWCommand.INIT) init_event = threading.Event() poll_thread = threading.Thread(target=self._cfw_poll, args=(handle, 1, model, init_event, timeout), daemon=True) poll_thread.start() init_event.wait() return self._cfw_parse_results(cfw_init) def cfw_query(self, handle, model='AUTO'): cfw_query = self._cfw_command(handle, model, CFWCommand.QUERY) return self._cfw_parse_results(cfw_query) def cfw_get_info(self, handle, model='AUTO'): cfw_info = self._cfw_command(handle, model, CFWCommand.GET_INFO, CFWGetInfoSelect.FIRMWARE_VERSION) results = {'model': CFWModelSelect(cfw_info.cfwModel).name, 'firmware_version': int(cfw_info.cfwResults1), 'n_positions': int(cfw_info.cfwResults2)} msg = "Filter wheel on {}, model: {}, firmware version: {}, number of positions: {}".format( handle, results['model'], results['firmware_version'], results['n_positions']) self.logger.debug(msg) return results def cfw_goto(self, handle, position, model='AUTO', cfw_event=None, timeout=10 * u.second): self.logger.debug("Moving filter wheel on {} to position {}".format(handle, position)) info = self.cfw_get_info(handle, model) if position < 1 or position > info['n_positions']: msg = "Position must be between 1 and {}, got {}".format( info['n_positions'], position) self.logger.error(msg) raise RuntimeError(msg) query = self.cfw_query(handle, model) if query['status'] == CFWStatus.BUSY: msg = "Attempt to move filter wheel when already moving" self.logger.error(msg) raise RuntimeError(msg) cfw_goto_results = self._cfw_command(handle, model, CFWCommand.GOTO, position) poll_thread = threading.Thread(target=self._cfw_poll, args=(handle, position, model, cfw_event, timeout), daemon=True) poll_thread.start() return self._cfw_parse_results(cfw_goto_results) def _cfw_poll(self, handle, position, model='AUTO', cfw_event=None, timeout=None): if timeout is not None: timer = CountdownTimer(duration=timeout) try: query = self.cfw_query(handle, model) while query['status'] == 'BUSY': if timeout is not None and timer.expired(): msg = "Timeout waiting for filter wheel {} to move to {}".format( handle, position) raise error.Timeout(msg) time.sleep(0.1) query = self.cfw_query(handle, model) except RuntimeError as err: self.logger.error('Error while moving filter wheel on {} to {}: {}'.format( handle, position, err)) raise err else: if query['status'] == 'IDLE' and query['position'] == position: self.logger.debug('Filter wheel on {} moved to position {}'.format( handle, query['position'])) else: msg = 'Problem moving filter wheel on {} to {} - status: {}, position: {}'.format( handle, position, query['status'], query['position']) self.logger.error(msg) raise RuntimeError(msg) finally: if cfw_event is not None: cfw_event.set() def _cfw_parse_results(self, cfw_results): results = {'model': CFWModelSelect(cfw_results.cfwModel).name, 'position': int(cfw_results.cfwPosition), 'status': CFWStatus(cfw_results.cfwStatus).name, 'error': CFWError(cfw_results.cfwError).name} if results['position'] == 0: results['position'] = float('nan') return results def _cfw_command(self, handle, model, *args): cfw_params = CFWParams(CFWModelSelect[model], *args) cfw_results = CFWResults() with self._command_lock: self.set_handle(handle) self._send_command('CC_CFW', cfw_params, cfw_results) return cfw_results
MIT License
wangpinggl/treqs
LeafNATS/data/utils.py
create_batch_memory
python
def create_batch_memory(path_, file_, is_shuffle, batch_size, is_lower=True): file_name = os.path.join(path_, file_) corpus_arr = [] fp = open(file_name, 'r', encoding="iso-8859-1") for line in fp: if is_lower: line = line.lower() corpus_arr.append(line) fp.close() if is_shuffle: random.shuffle(corpus_arr) data_split = [] for itm in corpus_arr: try: arr.append(itm) except: arr = [itm] if len(arr) == batch_size: data_split.append(arr) arr = [] if len(arr) > 0: data_split.append(arr) arr = [] return data_split
Users cannot rewrite this function, unless they want to rewrite the engine. used when the data is relatively small. This will store data in memeory. Advantage: Fast and easy to handle.
https://github.com/wangpinggl/treqs/blob/1c3bda58b08fce0015ae1c6680cc928b3c2580cf/LeafNATS/data/utils.py#L198-L237
import glob import os import random import re import shutil import numpy as np import torch from torch.autograd import Variable def construct_vocab(file_, max_size=200000, mincount=5): vocab2id = {'<s>': 2, '</s>': 3, '<pad>': 1, '<unk>': 0, '<stop>': 4} id2vocab = {2: '<s>', 3: '</s>', 1: '<pad>', 0: '<unk>', 4: '<stop>'} word_pad = {'<s>': 2, '</s>': 3, '<pad>': 1, '<unk>': 0, '<stop>': 4} cnt = len(vocab2id) with open(file_, 'r') as fp: for line in fp: arr = re.split(' ', line[:-1]) if len(arr) == 1: arr = re.split('<sec>', line[:-1]) if arr[0] == ' ': continue if arr[0] in word_pad: continue if int(arr[1]) >= mincount: vocab2id[arr[0]] = cnt id2vocab[cnt] = arr[0] cnt += 1 if len(vocab2id) == max_size: break return vocab2id, id2vocab def load_vocab_pretrain(file_pretrain_vocab, file_pretrain_vec): vocab2id = {'<s>': 2, '</s>': 3, '<pad>': 1, '<unk>': 0, '<stop>': 4} id2vocab = {2: '<s>', 3: '</s>', 1: '<pad>', 0: '<unk>', 4: '<stop>'} word_pad = {'<s>': 2, '</s>': 3, '<pad>': 1, '<unk>': 0, '<stop>': 4} pad_cnt = len(vocab2id) cnt = len(vocab2id) with open(file_pretrain_vocab, 'r') as fp: for line in fp: arr = re.split(' ', line[:-1]) if len(arr) == 1: arr = re.split('<sec>', line[:-1]) if arr[0] == ' ': continue if arr[0] in word_pad: continue vocab2id[arr[0]] = cnt id2vocab[cnt] = arr[0] cnt += 1 pretrain_vec = np.load(file_pretrain_vec) pad_vec = np.zeros([pad_cnt, pretrain_vec.shape[1]]) pretrain_vec = np.vstack((pad_vec, pretrain_vec)) return vocab2id, id2vocab, pretrain_vec def construct_pos_vocab(file_): vocab2id = {'<pad>': 0} id2vocab = {0: '<pad>'} word_pad = {'<pad>': 0} cnt = len(vocab2id) with open(file_, 'r') as fp: for line in fp: arr = re.split(' ', line[:-1]) if len(arr) == 1: arr = re.split('<sec>', line[:-1]) if arr[0] == ' ': continue if arr[0] in word_pad: continue vocab2id[arr[0]] = cnt id2vocab[cnt] = arr[0] cnt += 1 return vocab2id, id2vocab def construct_char_vocab(file_): vocab2id = {'<pad>': 0} id2vocab = {0: '<pad>'} word_pad = {'<pad>': 0} cnt = len(vocab2id) with open(file_, 'r') as fp: for line in fp: arr = re.split(' ', line[:-1]) if len(arr) == 1: arr = re.split('<sec>', line[:-1]) if arr[0] == ' ': continue if arr[0] in word_pad: continue vocab2id[arr[0]] = cnt id2vocab[cnt] = arr[0] cnt += 1 return vocab2id, id2vocab def create_batch_file(path_data, path_work, is_shuffle, fkey_, file_, batch_size, is_lower=True): file_name = os.path.join(path_data, file_) folder = os.path.join(path_work, 'batch_'+fkey_+'_'+str(batch_size)) try: shutil.rmtree(folder) os.mkdir(folder) except: os.mkdir(folder) corpus_arr = [] fp = open(file_name, 'r', encoding="iso-8859-1") for line in fp: if is_lower: line = line.lower() corpus_arr.append(line) fp.close() if is_shuffle: random.shuffle(corpus_arr) cnt = 0 for itm in corpus_arr: try: arr.append(itm) except: arr = [itm] if len(arr) == batch_size: fout = open(os.path.join(folder, str(cnt)), 'w') for sen in arr: fout.write(sen) fout.close() arr = [] cnt += 1 if len(arr) > 0: fout = open(os.path.join(folder, str(cnt)), 'w') for sen in arr: fout.write(sen) fout.close() arr = [] cnt += 1 return cnt
MIT License
hyperledger/aries-cloudagent-python
aries_cloudagent/messaging/responder.py
BaseResponder.send_outbound
python
async def send_outbound(self, message: OutboundMessage) -> OutboundSendStatus:
Send an outbound message. Args: message: The `OutboundMessage` to be sent
https://github.com/hyperledger/aries-cloudagent-python/blob/fec69f1a2301e4745fc9d40cea190050e3f595fa/aries_cloudagent/messaging/responder.py#L115-L121
from abc import ABC, abstractmethod import json from typing import Sequence, Union from ..connections.models.connection_target import ConnectionTarget from ..core.error import BaseError from ..transport.outbound.message import OutboundMessage from .base_message import BaseMessage from ..transport.outbound.status import OutboundSendStatus class ResponderError(BaseError): class BaseResponder(ABC): def __init__( self, *, connection_id: str = None, reply_session_id: str = None, reply_to_verkey: str = None, ): self.connection_id = connection_id self.reply_session_id = reply_session_id self.reply_to_verkey = reply_to_verkey async def create_outbound( self, message: Union[BaseMessage, str, bytes], *, connection_id: str = None, reply_session_id: str = None, reply_thread_id: str = None, reply_to_verkey: str = None, reply_from_verkey: str = None, target: ConnectionTarget = None, target_list: Sequence[ConnectionTarget] = None, to_session_only: bool = False, ) -> OutboundMessage: if isinstance(message, BaseMessage): serialized = message.serialize() payload = json.dumps(serialized) enc_payload = None if not reply_thread_id: reply_thread_id = message._thread_id else: payload = None enc_payload = message return OutboundMessage( connection_id=connection_id, enc_payload=enc_payload, payload=payload, reply_session_id=reply_session_id, reply_thread_id=reply_thread_id, reply_to_verkey=reply_to_verkey, reply_from_verkey=reply_from_verkey, target=target, target_list=target_list, to_session_only=to_session_only, ) async def send( self, message: Union[BaseMessage, str, bytes], **kwargs ) -> OutboundSendStatus: outbound = await self.create_outbound(message, **kwargs) return await self.send_outbound(outbound) async def send_reply( self, message: Union[BaseMessage, str, bytes], *, connection_id: str = None, target: ConnectionTarget = None, target_list: Sequence[ConnectionTarget] = None, ) -> OutboundSendStatus: outbound = await self.create_outbound( message, connection_id=connection_id or self.connection_id, reply_session_id=self.reply_session_id, reply_to_verkey=self.reply_to_verkey, target=target, target_list=target_list, ) return await self.send_outbound(outbound) @abstractmethod
Apache License 2.0
lscsoft/bilby
bilby/gw/detector/interferometer.py
Interferometer.vertex_position_geocentric
python
def vertex_position_geocentric(self): return gwutils.get_vertex_position_geocentric(self.geometry.latitude_radians, self.geometry.longitude_radians, self.geometry.elevation)
Calculate the position of the IFO vertex in geocentric coordinates in meters. Based on arXiv:gr-qc/0008066 Eqs. B11-B13 except for the typo in the definition of the local radius. See Section 2.1 of LIGO-T980044-10 for the correct expression Returns ======= array_like: A 3D array representation of the vertex
https://github.com/lscsoft/bilby/blob/b1e02f1dfae03d4939cae9c95eff300c22919689/bilby/gw/detector/interferometer.py#L484-L497
import os import sys import numpy as np from ...core import utils from ...core.utils import docstring, logger, PropertyAccessor from .. import utils as gwutils from .calibration import Recalibrate from .geometry import InterferometerGeometry from .strain_data import InterferometerStrainData class Interferometer(object): length = PropertyAccessor('geometry', 'length') latitude = PropertyAccessor('geometry', 'latitude') latitude_radians = PropertyAccessor('geometry', 'latitude_radians') longitude = PropertyAccessor('geometry', 'longitude') longitude_radians = PropertyAccessor('geometry', 'longitude_radians') elevation = PropertyAccessor('geometry', 'elevation') x = PropertyAccessor('geometry', 'x') y = PropertyAccessor('geometry', 'y') xarm_azimuth = PropertyAccessor('geometry', 'xarm_azimuth') yarm_azimuth = PropertyAccessor('geometry', 'yarm_azimuth') xarm_tilt = PropertyAccessor('geometry', 'xarm_tilt') yarm_tilt = PropertyAccessor('geometry', 'yarm_tilt') vertex = PropertyAccessor('geometry', 'vertex') detector_tensor = PropertyAccessor('geometry', 'detector_tensor') duration = PropertyAccessor('strain_data', 'duration') sampling_frequency = PropertyAccessor('strain_data', 'sampling_frequency') start_time = PropertyAccessor('strain_data', 'start_time') frequency_array = PropertyAccessor('strain_data', 'frequency_array') time_array = PropertyAccessor('strain_data', 'time_array') minimum_frequency = PropertyAccessor('strain_data', 'minimum_frequency') maximum_frequency = PropertyAccessor('strain_data', 'maximum_frequency') frequency_mask = PropertyAccessor('strain_data', 'frequency_mask') frequency_domain_strain = PropertyAccessor('strain_data', 'frequency_domain_strain') time_domain_strain = PropertyAccessor('strain_data', 'time_domain_strain') def __init__(self, name, power_spectral_density, minimum_frequency, maximum_frequency, length, latitude, longitude, elevation, xarm_azimuth, yarm_azimuth, xarm_tilt=0., yarm_tilt=0., calibration_model=Recalibrate()): self.geometry = InterferometerGeometry(length, latitude, longitude, elevation, xarm_azimuth, yarm_azimuth, xarm_tilt, yarm_tilt) self.name = name self.power_spectral_density = power_spectral_density self.calibration_model = calibration_model self.strain_data = InterferometerStrainData( minimum_frequency=minimum_frequency, maximum_frequency=maximum_frequency) self.meta_data = dict() def __eq__(self, other): if self.name == other.name and self.geometry == other.geometry and self.power_spectral_density.__eq__(other.power_spectral_density) and self.calibration_model == other.calibration_model and self.strain_data == other.strain_data: return True return False def __repr__(self): return self.__class__.__name__ + '(name=\'{}\', power_spectral_density={}, minimum_frequency={}, ' 'maximum_frequency={}, length={}, latitude={}, longitude={}, elevation={}, ' 'xarm_azimuth={}, yarm_azimuth={}, xarm_tilt={}, yarm_tilt={})' .format(self.name, self.power_spectral_density, float(self.strain_data.minimum_frequency), float(self.strain_data.maximum_frequency), float(self.geometry.length), float(self.geometry.latitude), float(self.geometry.longitude), float(self.geometry.elevation), float(self.geometry.xarm_azimuth), float(self.geometry.yarm_azimuth), float(self.geometry.xarm_tilt), float(self.geometry.yarm_tilt)) def set_strain_data_from_gwpy_timeseries(self, time_series): self.strain_data.set_from_gwpy_timeseries(time_series=time_series) def set_strain_data_from_frequency_domain_strain( self, frequency_domain_strain, sampling_frequency=None, duration=None, start_time=0, frequency_array=None): self.strain_data.set_from_frequency_domain_strain( frequency_domain_strain=frequency_domain_strain, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time, frequency_array=frequency_array) def set_strain_data_from_power_spectral_density( self, sampling_frequency, duration, start_time=0): self.strain_data.set_from_power_spectral_density( self.power_spectral_density, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time) def set_strain_data_from_frame_file( self, frame_file, sampling_frequency, duration, start_time=0, channel=None, buffer_time=1): self.strain_data.set_from_frame_file( frame_file=frame_file, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time, channel=channel, buffer_time=buffer_time) def set_strain_data_from_channel_name( self, channel, sampling_frequency, duration, start_time=0): self.strain_data.set_from_channel_name( channel=channel, sampling_frequency=sampling_frequency, duration=duration, start_time=start_time) def set_strain_data_from_csv(self, filename): self.strain_data.set_from_csv(filename) def set_strain_data_from_zero_noise( self, sampling_frequency, duration, start_time=0): self.strain_data.set_from_zero_noise( sampling_frequency=sampling_frequency, duration=duration, start_time=start_time) def antenna_response(self, ra, dec, time, psi, mode): polarization_tensor = gwutils.get_polarization_tensor(ra, dec, time, psi, mode) return np.einsum('ij,ij->', self.geometry.detector_tensor, polarization_tensor) def get_detector_response(self, waveform_polarizations, parameters): signal = {} for mode in waveform_polarizations.keys(): det_response = self.antenna_response( parameters['ra'], parameters['dec'], parameters['geocent_time'], parameters['psi'], mode) signal[mode] = waveform_polarizations[mode] * det_response signal_ifo = sum(signal.values()) signal_ifo *= self.strain_data.frequency_mask time_shift = self.time_delay_from_geocenter( parameters['ra'], parameters['dec'], parameters['geocent_time']) dt_geocent = parameters['geocent_time'] - self.strain_data.start_time dt = dt_geocent + time_shift signal_ifo[self.strain_data.frequency_mask] = signal_ifo[self.strain_data.frequency_mask] * np.exp( -1j * 2 * np.pi * dt * self.strain_data.frequency_array[self.strain_data.frequency_mask]) signal_ifo[self.strain_data.frequency_mask] *= self.calibration_model.get_calibration_factor( self.strain_data.frequency_array[self.strain_data.frequency_mask], prefix='recalib_{}_'.format(self.name), **parameters) return signal_ifo def inject_signal(self, parameters, injection_polarizations=None, waveform_generator=None): if injection_polarizations is None and waveform_generator is None: raise ValueError( "inject_signal needs one of waveform_generator or " "injection_polarizations.") elif injection_polarizations is not None: self.inject_signal_from_waveform_polarizations(parameters=parameters, injection_polarizations=injection_polarizations) elif waveform_generator is not None: injection_polarizations = self.inject_signal_from_waveform_generator(parameters=parameters, waveform_generator=waveform_generator) return injection_polarizations def inject_signal_from_waveform_generator(self, parameters, waveform_generator): injection_polarizations = waveform_generator.frequency_domain_strain(parameters) self.inject_signal_from_waveform_polarizations(parameters=parameters, injection_polarizations=injection_polarizations) return injection_polarizations def inject_signal_from_waveform_polarizations(self, parameters, injection_polarizations): if not self.strain_data.time_within_data(parameters['geocent_time']): logger.warning( 'Injecting signal outside segment, start_time={}, merger time={}.' .format(self.strain_data.start_time, parameters['geocent_time'])) signal_ifo = self.get_detector_response(injection_polarizations, parameters) self.strain_data.frequency_domain_strain += signal_ifo self.meta_data['optimal_SNR'] = ( np.sqrt(self.optimal_snr_squared(signal=signal_ifo)).real) self.meta_data['matched_filter_SNR'] = ( self.matched_filter_snr(signal=signal_ifo)) self.meta_data['parameters'] = parameters logger.info("Injected signal in {}:".format(self.name)) logger.info(" optimal SNR = {:.2f}".format(self.meta_data['optimal_SNR'])) logger.info(" matched filter SNR = {:.2f}".format(self.meta_data['matched_filter_SNR'])) for key in parameters: logger.info(' {} = {}'.format(key, parameters[key])) @property def amplitude_spectral_density_array(self): return ( self.power_spectral_density.get_amplitude_spectral_density_array( frequency_array=self.strain_data.frequency_array) * self.strain_data.window_factor**0.5) @property def power_spectral_density_array(self): return ( self.power_spectral_density.get_power_spectral_density_array( frequency_array=self.strain_data.frequency_array) * self.strain_data.window_factor) def unit_vector_along_arm(self, arm): logger.warning("This method has been moved and will be removed in the future." "Use Interferometer.geometry.unit_vector_along_arm instead.") return self.geometry.unit_vector_along_arm(arm) def time_delay_from_geocenter(self, ra, dec, time): return gwutils.time_delay_geocentric(self.geometry.vertex, np.array([0, 0, 0]), ra, dec, time)
MIT License
ptvoinfo/neptun2mqtt
neptun2mqtt.py
log_traceback
python
def log_traceback(message, ex, ex_traceback=None): if ex_traceback is None: ex_traceback = ex.__traceback__ tb_lines = [line.rstrip('\n') for line in traceback.format_exception(ex.__class__, ex, ex_traceback)] log(message + ':', tb_lines)
Log detailed call stack for exceptions.
https://github.com/ptvoinfo/neptun2mqtt/blob/b635d33aab6f03a87005ba6ccf0283182bb59f61/neptun2mqtt.py#L55-L63
from neptun import * import os import signal import paho.mqtt.client as mqtt import json import sys, traceback import time import datetime import binascii import logging import traceback import configparser as ConfigParser import _thread as thread devices = None exitSignal = False debug_mode = 0 logger = None mqtt_client = None MQTT_QOS = 0 MQTT_RETAIN = False MQTT_PATH = 'neptun/{friendly_name}' connected_devices_info = {} connected_devices = {} subscribed_devices = {} def signal_handler(signal, frame): log('SIGINT') global exitSignal exitSignal = True def printf(*args): together = ' '.join(map(str, args)) return together def log(*args): if logger is not None: logger.info(printf(*args)) else: d = datetime.datetime.now() print(d.strftime("%Y-%m-%d %H:%M:%S"), *args) return
MIT License
googleapis/python-aiplatform
google/cloud/aiplatform_v1beta1/services/job_service/async_client.py
JobServiceAsyncClient.list_data_labeling_jobs
python
async def list_data_labeling_jobs( self, request: job_service.ListDataLabelingJobsRequest = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDataLabelingJobsAsyncPager: has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.ListDataLabelingJobsRequest(request) if parent is not None: request.parent = parent rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_data_labeling_jobs, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) response = pagers.ListDataLabelingJobsAsyncPager( method=rpc, request=request, response=response, metadata=metadata, ) return response
r"""Lists DataLabelingJobs in a Location. Args: request (:class:`google.cloud.aiplatform_v1beta1.types.ListDataLabelingJobsRequest`): The request object. Request message for [JobService.ListDataLabelingJobs][google.cloud.aiplatform.v1beta1.JobService.ListDataLabelingJobs]. parent (:class:`str`): Required. The parent of the DataLabelingJob. Format: ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.aiplatform_v1beta1.services.job_service.pagers.ListDataLabelingJobsAsyncPager: Response message for [JobService.ListDataLabelingJobs][google.cloud.aiplatform.v1beta1.JobService.ListDataLabelingJobs]. Iterating over this object will yield results and resolve additional pages automatically.
https://github.com/googleapis/python-aiplatform/blob/c1c2326b2342ab1b6f4c4ce3852e63376eae740d/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py#L795-L873
from collections import OrderedDict import functools import re from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.oauth2 import service_account from google.api_core import operation as gac_operation from google.api_core import operation_async from google.cloud.aiplatform_v1beta1.services.job_service import pagers from google.cloud.aiplatform_v1beta1.types import batch_prediction_job from google.cloud.aiplatform_v1beta1.types import ( batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import completion_stats from google.cloud.aiplatform_v1beta1.types import custom_job from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, ) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1beta1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import job_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import manual_batch_tuning_parameters from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) from google.cloud.aiplatform_v1beta1.types import model_monitoring from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import study from google.protobuf import duration_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 from google.type import money_pb2 from .transports.base import JobServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import JobServiceGrpcAsyncIOTransport from .client import JobServiceClient class JobServiceAsyncClient: _client: JobServiceClient DEFAULT_ENDPOINT = JobServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = JobServiceClient.DEFAULT_MTLS_ENDPOINT batch_prediction_job_path = staticmethod(JobServiceClient.batch_prediction_job_path) parse_batch_prediction_job_path = staticmethod( JobServiceClient.parse_batch_prediction_job_path ) custom_job_path = staticmethod(JobServiceClient.custom_job_path) parse_custom_job_path = staticmethod(JobServiceClient.parse_custom_job_path) data_labeling_job_path = staticmethod(JobServiceClient.data_labeling_job_path) parse_data_labeling_job_path = staticmethod( JobServiceClient.parse_data_labeling_job_path ) dataset_path = staticmethod(JobServiceClient.dataset_path) parse_dataset_path = staticmethod(JobServiceClient.parse_dataset_path) endpoint_path = staticmethod(JobServiceClient.endpoint_path) parse_endpoint_path = staticmethod(JobServiceClient.parse_endpoint_path) hyperparameter_tuning_job_path = staticmethod( JobServiceClient.hyperparameter_tuning_job_path ) parse_hyperparameter_tuning_job_path = staticmethod( JobServiceClient.parse_hyperparameter_tuning_job_path ) model_path = staticmethod(JobServiceClient.model_path) parse_model_path = staticmethod(JobServiceClient.parse_model_path) model_deployment_monitoring_job_path = staticmethod( JobServiceClient.model_deployment_monitoring_job_path ) parse_model_deployment_monitoring_job_path = staticmethod( JobServiceClient.parse_model_deployment_monitoring_job_path ) network_path = staticmethod(JobServiceClient.network_path) parse_network_path = staticmethod(JobServiceClient.parse_network_path) tensorboard_path = staticmethod(JobServiceClient.tensorboard_path) parse_tensorboard_path = staticmethod(JobServiceClient.parse_tensorboard_path) trial_path = staticmethod(JobServiceClient.trial_path) parse_trial_path = staticmethod(JobServiceClient.parse_trial_path) common_billing_account_path = staticmethod( JobServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( JobServiceClient.parse_common_billing_account_path ) common_folder_path = staticmethod(JobServiceClient.common_folder_path) parse_common_folder_path = staticmethod(JobServiceClient.parse_common_folder_path) common_organization_path = staticmethod(JobServiceClient.common_organization_path) parse_common_organization_path = staticmethod( JobServiceClient.parse_common_organization_path ) common_project_path = staticmethod(JobServiceClient.common_project_path) parse_common_project_path = staticmethod(JobServiceClient.parse_common_project_path) common_location_path = staticmethod(JobServiceClient.common_location_path) parse_common_location_path = staticmethod( JobServiceClient.parse_common_location_path ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): return JobServiceClient.from_service_account_info.__func__(JobServiceAsyncClient, info, *args, **kwargs) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): return JobServiceClient.from_service_account_file.__func__(JobServiceAsyncClient, filename, *args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> JobServiceTransport: return self._client.transport get_transport_class = functools.partial( type(JobServiceClient).get_transport_class, type(JobServiceClient) ) def __init__( self, *, credentials: ga_credentials.Credentials = None, transport: Union[str, JobServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: self._client = JobServiceClient( credentials=credentials, transport=transport, client_options=client_options, client_info=client_info, ) async def create_custom_job( self, request: job_service.CreateCustomJobRequest = None, *, parent: str = None, custom_job: gca_custom_job.CustomJob = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gca_custom_job.CustomJob: has_flattened_params = any([parent, custom_job]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.CreateCustomJobRequest(request) if parent is not None: request.parent = parent if custom_job is not None: request.custom_job = custom_job rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_custom_job, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) return response async def get_custom_job( self, request: job_service.GetCustomJobRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> custom_job.CustomJob: has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.GetCustomJobRequest(request) if name is not None: request.name = name rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_custom_job, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) return response async def list_custom_jobs( self, request: job_service.ListCustomJobsRequest = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListCustomJobsAsyncPager: has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.ListCustomJobsRequest(request) if parent is not None: request.parent = parent rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_custom_jobs, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) response = pagers.ListCustomJobsAsyncPager( method=rpc, request=request, response=response, metadata=metadata, ) return response async def delete_custom_job( self, request: job_service.DeleteCustomJobRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.DeleteCustomJobRequest(request) if name is not None: request.name = name rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_custom_job, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) response = operation_async.from_gapic( response, self._client._transport.operations_client, empty_pb2.Empty, metadata_type=gca_operation.DeleteOperationMetadata, ) return response async def cancel_custom_job( self, request: job_service.CancelCustomJobRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.CancelCustomJobRequest(request) if name is not None: request.name = name rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_custom_job, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_data_labeling_job( self, request: job_service.CreateDataLabelingJobRequest = None, *, parent: str = None, data_labeling_job: gca_data_labeling_job.DataLabelingJob = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gca_data_labeling_job.DataLabelingJob: has_flattened_params = any([parent, data_labeling_job]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.CreateDataLabelingJobRequest(request) if parent is not None: request.parent = parent if data_labeling_job is not None: request.data_labeling_job = data_labeling_job rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_data_labeling_job, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) return response async def get_data_labeling_job( self, request: job_service.GetDataLabelingJobRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> data_labeling_job.DataLabelingJob: has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = job_service.GetDataLabelingJobRequest(request) if name is not None: request.name = name rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_data_labeling_job, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) return response
Apache License 2.0
simaki/pandas-estat
pandas_estat/appid.py
get_appid
python
def get_appid(appid: Optional[str] = None) -> Optional[str]: if appid is not None: return appid elif _global_appid.value is not None: return _global_appid.value elif "ESTAT_APPID" in os.environ: return os.environ["ESTAT_APPID"] else: return None
Get Application ID. The Parameter `appid`, global app ID in `_GlobalAppID`, and environment variable `ESTAT_APPID` are referenced in order. If these are all None, return None. Parameters ---------- appid : str, optional If given, just return this. Returns ------- appid : str or None Application ID.
https://github.com/simaki/pandas-estat/blob/c5a4f1ce38d5a5ba615b0699ebb9b7c3088e18e6/pandas_estat/appid.py#L37-L61
import os from typing import Optional class _GlobalAppID: def __new__(cls, *args, **kwargs): if not hasattr(cls, "_instance"): cls._instance = super().__new__(cls) return cls._instance def __init__(self, value: Optional[str]) -> None: self.value = value _global_appid = _GlobalAppID(None) def set_appid(appid: Optional[str]) -> None: _global_appid.value = appid
BSD 3-Clause New or Revised License
dcos/dcos-e2e
src/dcos_e2e_cli/common/commands.py
download_installer
python
def download_installer( ctx: click.core.Context, dcos_version: str, download_path: str, ) -> None: path = Path(download_path) path.parent.mkdir(exist_ok=True, parents=True) path = path.parent.resolve() / path.name click.echo('Downloading to {path}.'.format(path=path)) if dcos_version.startswith('http'): url = dcos_version else: base_url = 'https://downloads.dcos.io/dcos/' url = base_url + dcos_version + '/dcos_generate_config.sh' head_resp = requests.head(url) if not head_resp.ok: message = 'Cannot download installer from {url}.'.format(url=url) ctx.fail(message=message) if path.is_dir(): path = path / 'dcos_generate_config.sh' if not path.exists(): path.parent.mkdir(parents=True, exist_ok=True) stream = requests.get(url, stream=True) assert stream.ok content_length = int(stream.headers['Content-Length']) total_written = 0 chunk_size = 1024 content_iter = stream.iter_content(chunk_size=chunk_size) progress_bar = tqdm( iterable=content_iter, total=content_length / chunk_size, dynamic_ncols=True, bar_format='{l_bar}{bar}', unit_scale=None, ) with click.open_file( filename=str(path), mode='wb', atomic=True, lazy=True, ) as file_descriptor: for chunk in progress_bar: progress_bar.disable = False if chunk: total_written += len(chunk) file_descriptor.write(chunk) progress_bar.disable = True message = ( 'Downloaded {total_written} bytes. ' 'Expected {content_length} bytes.' ).format( total_written=total_written, content_length=content_length, ) assert total_written == content_length, message
Download a DC/OS Open Source installer. For DC/OS Enterprise installers, contact your sales representative.
https://github.com/dcos/dcos-e2e/blob/ab7c4bfd58872f458e5766fff01ca74322441065/src/dcos_e2e_cli/common/commands.py#L39-L113
from pathlib import Path import click import requests from tqdm import tqdm @click.command('download-installer') @click.option( '--dcos-version', type=str, default='stable', show_default=True, help=( 'The DC/OS Open Source installer version to download. ' 'This can be in one of the following formats: ' '``stable``, ' '``testing/master``, ' '``testing/<DC/OS MAJOR RELEASE>``, ' '``stable/<DC/OS MINOR RELEASE>``, ' '``testing/pull/<GITHUB-PR-NUMBER>``.\n' 'See https://dcos.io/releases/ for available releases.' '\n' 'If an HTTP or HTTPS URL is given, that is downloaded.' ), ) @click.option( '--download-path', type=str, default='./dcos_generate_config.sh', show_default=True, help='The path to download an installer to.', ) @click.pass_context
Apache License 2.0
coleifer/walrus
walrus/models.py
Model.get_id
python
def get_id(self): try: return getattr(self, self._primary_key) except KeyError: return None
Return the primary key for the model instance. If the model is unsaved, then this value will be ``None``.
https://github.com/coleifer/walrus/blob/a66f3d7d1dbc27a7105284238634168b6762b633/walrus/models.py#L639-L647
from copy import deepcopy import datetime import json import pickle import re import sys import time import uuid from warnings import warn from walrus.containers import Array from walrus.containers import Hash from walrus.containers import HyperLogLog from walrus.containers import List from walrus.containers import Set from walrus.containers import ZSet from walrus.query import ABSOLUTE from walrus.query import CONTINUOUS from walrus.query import Desc from walrus.query import Executor from walrus.query import FTS from walrus.query import Node from walrus.search import Tokenizer from walrus.utils import basestring_type from walrus.utils import decode from walrus.utils import decode_dict_keys from walrus.utils import encode from walrus.utils import PY3 from walrus.utils import unicode_type class Field(Node): _coerce = None def __init__(self, index=False, primary_key=False, default=None): self._index = index or primary_key self._primary_key = primary_key self._default = default def _generate_key(self): raise NotImplementedError def db_value(self, value): if self._coerce: return self._coerce(value) return value def python_value(self, value): if self._coerce: return self._coerce(value) return value def add_to_class(self, model_class, name): self.model_class = model_class self.name = name setattr(model_class, name, self) def __get__(self, instance, instance_type=None): if instance is not None: return instance._data.get(self.name) return self def __set__(self, instance, value): instance._data[self.name] = value def get_index(self, op): indexes = self.get_indexes() for index in indexes: if op in index.operations: return index raise ValueError('Operation %s is not supported by an index.' % op) def get_indexes(self): return [AbsoluteIndex(self)] class _ScalarField(Field): def get_indexes(self): return [AbsoluteIndex(self), ContinuousIndex(self)] class IntegerField(_ScalarField): _coerce = int def db_value(self, value): return 0 if value is None else int(value) class AutoIncrementField(IntegerField): def __init__(self, *args, **kwargs): kwargs['primary_key'] = True return super(AutoIncrementField, self).__init__(*args, **kwargs) def _generate_key(self): query_helper = self.model_class._query key = query_helper.make_key(self.name, '_sequence') return self.model_class.__database__.incr(key) class FloatField(_ScalarField): _coerce = float def db_value(self, value): return 0. if value is None else float(value) class ByteField(Field): def db_value(self, value): if isinstance(value, unicode_type): value = value.encode('utf-8') elif value is None: value = b'' return value class TextField(Field): def __init__(self, fts=False, stemmer=True, metaphone=False, stopwords_file=None, min_word_length=None, *args, **kwargs): super(TextField, self).__init__(*args, **kwargs) self._fts = fts self._stemmer = stemmer self._metaphone = metaphone self._stopwords_file = stopwords_file self._min_word_length = min_word_length self._index = self._index or self._fts def db_value(self, value): return b'' if value is None else encode(value) def python_value(self, value): return decode(value) def get_indexes(self): indexes = super(TextField, self).get_indexes() if self._fts: indexes.append(FullTextIndex( self, self._stemmer, self._metaphone, self._stopwords_file, self._min_word_length)) return indexes class BooleanField(Field): def db_value(self, value): return '1' if value else '0' def python_value(self, value): return decode(value) == '1' class UUIDField(Field): def __init__(self, **kwargs): kwargs['index'] = True super(UUIDField, self).__init__(**kwargs) def db_value(self, value): return encode(value.hex if value is not None else '') def python_value(self, value): return uuid.UUID(decode(value)) if value else None def _generate_key(self): return uuid.uuid4() class DateTimeField(_ScalarField): def db_value(self, value): if value is None: return 0. timestamp = time.mktime(value.timetuple()) micro = value.microsecond * (10 ** -6) return timestamp + micro def python_value(self, value): if not value: return None elif isinstance(value, (basestring_type, int, float)): return datetime.datetime.fromtimestamp(float(value)) else: return value class DateField(DateTimeField): def db_value(self, value): if value is None: return 0. return time.mktime(value.timetuple()) def python_value(self, value): if not value: return None elif isinstance(value, (basestring_type, int, float)): return datetime.datetime.fromtimestamp(float(value)).date() else: return value class JSONField(Field): def db_value(self, value): return encode(json.dumps(value)) def python_value(self, value): return json.loads(decode(value)) class PickledField(Field): def db_value(self, value): return pickle.dumps(value, pickle.HIGHEST_PROTOCOL) def python_value(self, value): return pickle.loads(value) class _ContainerField(Field): container_class = None def __init__(self, *args, **kwargs): super(_ContainerField, self).__init__(*args, **kwargs) if self._primary_key: raise ValueError('Container fields cannot be primary keys.') if self._index: raise ValueError('Container fields cannot be indexed.') def _get_container(self, instance): return self.container_class( self.model_class.__database__, self.__key__(instance)) def __key__(self, instance): return self.model_class._query.make_key( 'container', self.name, instance.get_hash_id()) def __get__(self, instance, instance_type=None): if instance is not None: if not instance.get_id(): raise ValueError('Model must have a primary key before ' 'container attributes can be accessed.') return self._get_container(instance) return self def __set__(self, instance, instance_type=None): raise ValueError('Cannot set the value of a container field.') def _delete(self, instance): self._get_container(instance).clear() class HashField(_ContainerField): container_class = Hash class ListField(_ContainerField): container_class = List class SetField(_ContainerField): container_class = Set class ZSetField(_ContainerField): container_class = ZSet class Query(object): def __init__(self, model_class): self.model_class = model_class @property def _base_key(self): model_name = self.model_class.__name__.lower() if self.model_class.__namespace__: return '%s|%s:' % (self.model_class.__namespace__, model_name) return '%s:' % model_name def make_key(self, *parts): separator = getattr(self.model_class, 'index_separator', '.') parts = map(decode, parts) return '%s%s' % (self._base_key, separator.join(map(str, parts))) def get_primary_hash_key(self, primary_key): pk_field = self.model_class._fields[self.model_class._primary_key] return self.make_key('id', pk_field.db_value(primary_key)) def all_index(self): return self.model_class.__database__.Set(self.make_key('all')) class BaseIndex(object): operations = None def __init__(self, field): self.field = field self.__database__ = self.field.model_class.__database__ self.query_helper = self.field.model_class._query def field_value(self, instance): return self.field.db_value(getattr(instance, self.field.name)) def get_key(self, instance, value): raise NotImplementedError def store_instance(self, key, instance, value): raise NotImplementedError def delete_instance(self, key, instance, value): raise NotImplementedError def save(self, instance): value = self.field_value(instance) key = self.get_key(value) self.store_instance(key, instance, value) def remove(self, instance): value = self.field_value(instance) key = self.get_key(value) self.delete_instance(key, instance, value) class AbsoluteIndex(BaseIndex): operations = ABSOLUTE def get_key(self, value): key = self.query_helper.make_key( self.field.name, 'absolute', value) return self.__database__.Set(key) def store_instance(self, key, instance, value): key.add(instance.get_hash_id()) def delete_instance(self, key, instance, value): key.remove(instance.get_hash_id()) if len(key) == 0: key.clear() class ContinuousIndex(BaseIndex): operations = CONTINUOUS def get_key(self, value): key = self.query_helper.make_key( self.field.name, 'continuous') return self.__database__.ZSet(key) def store_instance(self, key, instance, value): key[instance.get_hash_id()] = value def delete_instance(self, key, instance, value): del key[instance.get_hash_id()] if len(key) == 0: key.clear() class FullTextIndex(BaseIndex): operations = FTS def __init__(self, field, stemmer=True, metaphone=False, stopwords_file=None, min_word_length=None): super(FullTextIndex, self).__init__(field) self.tokenizer = Tokenizer( stemmer=stemmer, metaphone=metaphone, stopwords_file=stopwords_file or 'stopwords.txt', min_word_length=min_word_length) def get_key(self, value): key = self.query_helper.make_key( self.field.name, 'fts', value) return self.__database__.ZSet(key) def store_instance(self, key, instance, value): hash_id = instance.get_hash_id() for word, score in self.tokenizer.tokenize(value).items(): key = self.get_key(word) key[hash_id] = -score def delete_instance(self, key, instance, value): hash_id = instance.get_hash_id() for word in self.tokenizer.tokenize(value): key = self.get_key(word) del key[hash_id] if len(key) == 0: key.clear() class BaseModel(type): def __new__(cls, name, bases, attrs): if not bases: return super(BaseModel, cls).__new__(cls, name, bases, attrs) if 'database' in attrs: warn('"database" has been deprecated in favor of "__database__" ' 'for Walrus models.', DeprecationWarning) attrs['__database__'] = attrs.pop('database') if 'namespace' in attrs: warn('"namespace" has been deprecated in favor of "__namespace__" ' 'for Walrus models.', DeprecationWarning) attrs['__namespace__'] = attrs.pop('namespace') ignore = set() primary_key = None for key, value in attrs.items(): if isinstance(value, Field) and value._primary_key: primary_key = (key, value) for base in bases: for key, value in base.__dict__.items(): if key in attrs: continue if isinstance(value, Field): if value._primary_key and primary_key: ignore.add(key) else: if value._primary_key: primary_key = (key, value) attrs[key] = deepcopy(value) if not primary_key: attrs['_id'] = AutoIncrementField() primary_key = ('_id', attrs['_id']) model_class = super(BaseModel, cls).__new__(cls, name, bases, attrs) model_class._data = None defaults = {} fields = {} indexes = [] for key, value in model_class.__dict__.items(): if isinstance(value, Field) and key not in ignore: value.add_to_class(model_class, key) if value._index: indexes.append(value) fields[key] = value if value._default is not None: defaults[key] = value._default model_class._defaults = defaults model_class._fields = fields model_class._indexes = indexes model_class._primary_key = primary_key[0] model_class._query = Query(model_class) return model_class def _with_metaclass(meta, base=object): return meta("NewBase", (base,), {'__database__': None, '__namespace__': None}) class Model(_with_metaclass(BaseModel)): __database__ = None __namespace__ = None index_separator = '.' def __init__(self, *args, **kwargs): self._data = {} self._load_default_dict() for k, v in kwargs.items(): setattr(self, k, v) def __repr__(self): return '<%s: %s>' % (type(self).__name__, self.get_id()) def _load_default_dict(self): for field_name, default in self._defaults.items(): if callable(default): default = default() setattr(self, field_name, default) def incr(self, field, incr_by=1): model_hash = self.to_hash() for index in field.get_indexes(): index.remove(self) if isinstance(incr_by, int): new_val = model_hash.incr(field.name, incr_by) else: new_val = model_hash.incr_float(field.name, incr_by) setattr(self, field.name, new_val) for index in field.get_indexes(): index.save(self) return new_val
MIT License
django-leonardo/django-leonardo
leonardo/utils/compress_patch.py
input
python
def input(self, **kwargs): with_variables = None context = kwargs.get('context', {}) if context.get('leonardo_page', None): try: context['leonardo_page']['theme'] context['leonardo_page']['color_scheme'] except Exception as e: LOG.exception(str(e)) else: with_variables = """ @import "/themes/{}/{}/_variables"; {} """.format( context['leonardo_page']['theme']['name'], context['leonardo_page']['color_scheme']['name'], self.content) return self.compiler.compile_string( with_variables or self.content, filename=self.filename)
main override which append variables import to all scss content
https://github.com/django-leonardo/django-leonardo/blob/7d3f116830075f05a8c9a105ae6f7f80f7a6444c/leonardo/utils/compress_patch.py#L94-L119
from __future__ import unicode_literals, with_statement import logging from compressor.cache import cache_set from compressor.conf import settings from compressor.exceptions import CompressorError, FilterDoesNotExist from compressor.filters import CachedCompilerFilter from compressor.filters.css_default import CssAbsoluteFilter from compressor.utils import get_mod_func from django import template from django.utils import six try: from importlib import import_module except: from django.utils.importlib import import_module try: from urllib.request import url2pathname except ImportError: from urllib import url2pathname LOG = logging.getLogger(__name__) SOURCE_HUNK, SOURCE_FILE = 'inline', 'file' METHOD_INPUT, METHOD_OUTPUT = 'input', 'output' register = template.Library() OUTPUT_FILE = 'file' OUTPUT_INLINE = 'inline' OUTPUT_MODES = (OUTPUT_FILE, OUTPUT_INLINE) def compress_monkey_patch(): from compressor.templatetags import compress as compress_tags from compressor import base as compress_base compress_base.Compressor.filter_input = filter_input compress_base.Compressor.output = output compress_base.Compressor.hunks = hunks compress_base.Compressor.precompile = precompile compress_tags.CompressorMixin.render_compressed = render_compressed from django_pyscss import compressor as pyscss_compressor pyscss_compressor.DjangoScssFilter.input = input def render_compressed(self, context, kind, mode, forced=False): if self.is_offline_compression_enabled(forced) and not forced: return self.render_offline(context) if (not settings.COMPRESS_ENABLED and not settings.COMPRESS_PRECOMPILERS and not forced): return self.get_original_content(context) context['compressed'] = {'name': getattr(self, 'name', None)} compressor = self.get_compressor(context, kind) cache_key = None if settings.COMPRESS_ENABLED and not forced: cache_key, cache_content = self.render_cached(compressor, kind, mode) if cache_content is not None: return cache_content rendered_output = compressor.output(mode, forced=forced, context=context) assert isinstance(rendered_output, six.string_types) if cache_key: cache_set(cache_key, rendered_output) return rendered_output
BSD 3-Clause New or Revised License
elementai/haven_old
haven/haven_results/__init__.py
zip_exp_list
python
def zip_exp_list(savedir_base): import zipfile with zipfile.ZipFile(savedir_base) as z: for filename in z.namelist(): if not os.path.isdir(filename): with z.open(filename) as f: for line in f: print(line)
[summary] Parameters ---------- savedir_base : [type] [description]
https://github.com/elementai/haven_old/blob/3ce987d9779a1e652c4c7b821f899bce7901618c/haven/haven_results/__init__.py#L656-L672
import copy import glob import os import sys import pprint from itertools import groupby from textwrap import wrap import numpy as np import pandas as pd import pylab as plt import tqdm from .. import haven_jobs as hjb from .. import haven_utils as hu from .. import haven_share as hd class ResultManager: def __init__(self, savedir_base, exp_list=None, filterby_list=None, verbose=True, has_score_list=False, exp_groups=None, mode_key=None, exp_ids=None, save_history=False, score_list_name='score_list.pkl', account_id=None): assert os.path.exists(savedir_base), '%s does not exist' % savedir_base self.exp_groups = {} if exp_groups is not None: if isinstance(exp_groups, dict): self.exp_groups = exp_groups elif os.path.exists(exp_groups): self.exp_groups = hu.load_py(exp_groups).EXP_GROUPS else: raise ValueError('%s does not exist...' % exp_groups) self.score_list_name = score_list_name self.mode_key = mode_key self.has_score_list = has_score_list self.save_history = save_history self.account_id = account_id if exp_ids is not None: assert exp_list is None, "settings exp_ids require exp_list=None" assert exp_groups is None, "settings exp_ids require exp_groups=None" exp_list = [] for exp_id in exp_ids: exp_list += [hu.load_json(os.path.join(savedir_base, exp_id, 'exp_dict.json'))] else: if exp_list is None: exp_list = get_exp_list(savedir_base=savedir_base, verbose=verbose) else: exp_list = exp_list if len(exp_list) == 0: raise ValueError('exp_list is empty...') exp_list_with_scores = [e for e in exp_list if os.path.exists(os.path.join(savedir_base, hu.hash_dict(e), score_list_name))] if has_score_list: exp_list = exp_list_with_scores self.exp_list_all = copy.deepcopy(exp_list) self.score_keys = ['None'] if len(exp_list_with_scores): score_fname = os.path.join(savedir_base, hu.hash_dict(exp_list_with_scores[0]), score_list_name) self.score_keys = ['None'] + list(hu.load_pkl(score_fname)[0].keys()) self.savedir_base = savedir_base self.filterby_list = filterby_list self.verbose = verbose self.n_exp_all = len(exp_list) self.exp_list = filter_exp_list(exp_list, filterby_list=filterby_list, savedir_base=savedir_base, verbose=verbose) if len(self.exp_list) != 0: self.exp_params = list(self.exp_list[0].keys()) else: self.exp_params = [] if mode_key: for exp_dict in exp_list: exp_dict[mode_key] = 1 for exp_dict in self.exp_list_all: exp_dict[mode_key] = 1 self.exp_groups['all'] = copy.deepcopy(self.exp_list_all) def get_state_dict(self): pass def load_state_dict(self, state_dict): pass def get_plot(self, groupby_list=None, savedir_plots=None, filterby_list=None, **kwargs): fig_list = [] filterby_list = filterby_list or self.filterby_list exp_groups = group_exp_list(self.exp_list, groupby_list) for i, exp_list in enumerate(exp_groups): fig, ax = get_plot(exp_list=exp_list, savedir_base=self.savedir_base, filterby_list=filterby_list, verbose=self.verbose, score_list_name=self.score_list_name, **kwargs) fig_list += [fig] if savedir_plots != '' and savedir_plots is not None: os.makedirs(savedir_plots, exist_ok=True) save_fname = os.path.join(savedir_plots, "%d.png" % i ) fig.savefig(save_fname, bbox_inches='tight') return fig_list def get_plot_all(self, y_metric_list, order='groups_by_metrics', groupby_list=None, ylim_list=None, xlim_list=None, savedir_plots=None, legend_last_row_only=False, show_legend_all=None, **kwargs): if order not in ['groups_by_metrics', 'metrics_by_groups']: raise ValueError('%s order is not defined, choose between %s' % (order, ['groups_by_metrics', 'metrics_by_groups'])) exp_groups = group_exp_list(self.exp_list, groupby_list) figsize = kwargs.get('figsize') or None fig_list = [] if not isinstance(y_metric_list, list): y_metric_list = [y_metric_list] if ylim_list is not None: assert isinstance(ylim_list[0], list), "ylim_list has to be lists of lists" if xlim_list is not None: assert isinstance(xlim_list[0], list), "xlim_list has to be lists of lists" if order == 'groups_by_metrics': for j, exp_list in enumerate(exp_groups): fig, ax_list = plt.subplots(nrows=1, ncols=len(y_metric_list), figsize=figsize) if not hasattr(ax_list, 'size'): ax_list = [ax_list] for i, y_metric in enumerate(y_metric_list): if i == (len(y_metric_list) - 1): show_legend = True else: show_legend = False ylim = None xlim = None if ylim_list is not None: assert len(ylim_list) == len(exp_groups), "ylim_list has to have %d rows" % len(exp_groups) assert len(ylim_list[0]) == len(y_metric_list), "ylim_list has to have %d cols" % len(y_metric_list) ylim = ylim_list[j][i] if xlim_list is not None: assert len(xlim_list) == len(exp_groups), "xlim_list has to have %d rows" % len(exp_groups) assert len(xlim_list[0]) == len(y_metric_list), "xlim_list has to have %d cols" % len(y_metric_list) xlim = xlim_list[j][i] if show_legend_all is not None: show_legend = show_legend_all fig, _ = get_plot(exp_list=exp_list, savedir_base=self.savedir_base, y_metric=y_metric, fig=fig, axis=ax_list[i], verbose=self.verbose, filterby_list=self.filterby_list, show_legend=show_legend, ylim=ylim, xlim=xlim, score_list_name=self.score_list_name, **kwargs) fig_list += [fig] elif order == 'metrics_by_groups': for j, y_metric in enumerate(y_metric_list): fig, ax_list = plt.subplots(nrows=1, ncols=len(exp_groups) , figsize=figsize) if not hasattr(ax_list, 'size'): ax_list = [ax_list] for i, exp_list in enumerate(exp_groups): if i == 0: show_ylabel = True else: show_ylabel = False if i == (len(exp_groups) - 1): show_legend = True else: show_legend = False if legend_last_row_only and j < (len(y_metric_list) - 1): show_legend = False ylim = None xlim = None if ylim_list is not None: assert len(ylim_list) == len(y_metric_list), "ylim_list has to have %d rows" % len(exp_groups) assert len(ylim_list[0]) == len(exp_groups), "ylim_list has to have %d cols" % len(y_metric_list) ylim = ylim_list[j][i] if xlim_list is not None: assert len(xlim_list) == len(y_metric_list), "xlim_list has to have %d rows" % len(exp_groups) assert len(xlim_list[0]) == len(exp_groups), "xlim_list has to have %d cols" % len(y_metric_list) xlim = xlim_list[j][i] if show_legend_all is not None: show_legend = show_legend_all fig, _ = get_plot(exp_list=exp_list, savedir_base=self.savedir_base, y_metric=y_metric, fig=fig, axis=ax_list[i], verbose=self.verbose, filterby_list=self.filterby_list, ylim=ylim, xlim=xlim, show_legend=show_legend, show_ylabel=show_ylabel, score_list_name=self.score_list_name, **kwargs) fig_list += [fig] plt.tight_layout() if savedir_plots: for i in range(len(fig_list)): os.makedirs(savedir_plots, exist_ok=True) fname = os.path.join(savedir_plots + '_%d.pdf' % i) fig_list[i].savefig(fname, dpi=300, bbox_inches='tight') print(fname, 'saved') return fig_list def get_score_df(self, **kwargs): df_list = get_score_df(exp_list=self.exp_list, savedir_base=self.savedir_base, verbose=self.verbose, score_list_name=self.score_list_name, **kwargs) return df_list def to_dropbox(self, outdir_base, access_token): hd.to_dropbox(self.exp_list, savedir_base=self.savedir_base, outdir_base=outdir_base, access_token=access_token) def get_exp_list_df(self, **kwargs): df_list = get_exp_list_df(exp_list=self.exp_list, verbose=self.verbose, **kwargs) return df_list def get_exp_table(self, **kwargs): table = get_exp_list_df(exp_list=self.exp_list, verbose=self.verbose, **kwargs) return table def get_score_table(self, **kwargs): table = get_score_df(exp_list=self.exp_list, savedir_base=self.savedir_base, score_list_name=self.score_list_name, filterby_list=self.filterby_list, verbose=self.verbose, **kwargs) return table def get_score_lists(self, **kwargs): score_lists = get_score_lists(exp_list=self.exp_list, savedir_base=self.savedir_base, score_list_name=self.score_list_name, filterby_list=self.filterby_list, verbose=self.verbose, **kwargs) return score_lists def get_images(self, **kwargs): return get_images(exp_list=self.exp_list, savedir_base=self.savedir_base, verbose=self.verbose, **kwargs) def get_job_summary(self, columns=None, add_prefix=False, **kwargs): exp_list = filter_exp_list(self.exp_list, self.filterby_list, savedir_base=self.savedir_base, verbose=self.verbose) jm = hjb.JobManager(exp_list=exp_list, savedir_base=self.savedir_base, account_id=self.account_id, **kwargs) summary_list = jm.get_summary_list(columns=columns, add_prefix=add_prefix) return summary_list def to_zip(self, savedir_base='', fname='tmp.zip', **kwargs): from haven import haven_dropbox as hd if savedir_base == '': savedir_base = self.savedir_base exp_id_list = [hu.hash_dict(exp_dict) for exp_dict in self.exp_list] hd.zipdir(exp_id_list, savedir_base, fname, **kwargs) def to_dropbox(self, fname, dropbox_path=None, access_token=None): from haven import haven_dropbox as hd out_fname = os.path.join(dropbox_path, fname) src_fname = os.path.join(self.savedir_base, fname) self.to_zip(src_fname) hd.upload_file_to_dropbox(src_fname, out_fname, access_token) print('saved: https://www.dropbox.com/home/%s' % out_fname) def group_exp_list(exp_list, groupby_list): if groupby_list is None: return [exp_list] if not isinstance(groupby_list, list): groupby_list = [groupby_list] def split_func(x): x_list = [] for k_list in groupby_list: if not isinstance(k_list, list): k_list = [k_list] val = get_str(x, k_list) x_list += [val] return x_list exp_list.sort(key=split_func) list_of_exp_list = [] group_dict = groupby(exp_list, key=split_func) for k, v in group_dict: v_list = list(v) list_of_exp_list += [v_list] return list_of_exp_list def group_list(python_list, key, return_count=False): group_dict = {} for p in python_list: p_tmp = copy.deepcopy(p) del p_tmp[key] k = p[key] if k not in group_dict: group_dict[k] = [] group_dict[k] += [p_tmp] if return_count: count_dict = {} for k in group_dict: count_dict[k] = len(group_dict[k]) return count_dict return group_dict def get_exp_list_from_config(exp_groups, exp_config_fname): exp_list = [] for e in exp_groups: exp_list += hu.load_py(exp_config_fname).EXP_GROUPS[e] return exp_list def get_str(h_dict, k_list): k = k_list[0] if len(k_list) == 1: return str(h_dict.get(k)) return get_str(h_dict.get(k), k_list[1:]) def get_best_exp_dict(exp_list, savedir_base, metric, metric_agg='min', filterby_list=None, avg_across=None, return_scores=False, verbose=True, score_list_name='score_list.pkl'): scores_dict = [] if metric_agg in ['min', 'min_last']: best_score = np.inf elif metric_agg in ['max', 'max_last']: best_score = 0. exp_dict_best = None exp_list = filter_exp_list(exp_list, filterby_list, verbose=verbose) for exp_dict in exp_list: exp_id = hu.hash_dict(exp_dict) savedir = os.path.join(savedir_base, exp_id) score_list_fname = os.path.join(savedir, score_list_name) if not os.path.exists(score_list_fname): if verbose: print('%s: missing %s' % (exp_id, score_list_name)) continue score_list = hu.load_pkl(score_list_fname) if metric_agg in ['min', 'min_last']: if metric_agg == 'min_last': score = [score_dict[metric] for score_dict in score_list][-1] elif metric_agg == 'min': score = np.min([score_dict[metric] for score_dict in score_list]) if best_score >= score: best_score = score exp_dict_best = exp_dict elif metric_agg in ['max', 'max_last']: if metric_agg == 'max_last': score = [score_dict[metric] for score_dict in score_list][-1] elif metric_agg == 'max': score = np.max([score_dict[metric] for score_dict in score_list]) if best_score <= score: best_score = score exp_dict_best = exp_dict scores_dict += [{'score': score, 'epochs': len(score_list), 'exp_id': exp_id}] if exp_dict_best is None: if verbose: print('no experiments with metric "%s"' % metric) return {} return exp_dict_best def get_exp_list_from_exp_configs(exp_group_list, workdir, filterby_list=None, verbose=True): assert workdir is not None from importlib import reload assert(workdir is not None) if workdir not in sys.path: sys.path.append(workdir) import exp_configs as ec reload(ec) exp_list = [] for exp_group in exp_group_list: exp_list += ec.EXP_GROUPS[exp_group] if verbose: print('%d experiments' % len(exp_list)) exp_list = filter_exp_list(exp_list, filterby_list, verbose=verbose) return exp_list def get_exp_list(savedir_base, filterby_list=None, verbose=True): exp_list = [] dir_list = os.listdir(savedir_base) for exp_id in tqdm.tqdm(dir_list): savedir = os.path.join(savedir_base, exp_id) fname = os.path.join(savedir, 'exp_dict.json') if len(exp_id) != 32: if verbose: print('"%s/" is not an exp directory' % exp_id) continue if not os.path.exists(fname): if verbose: print('%s: missing exp_dict.json' % exp_id) continue exp_dict = hu.load_json(fname) expected_id = hu.hash_dict(exp_dict) if expected_id != exp_id: if verbose: print('%s does not match %s' % (expected_id, exp_id)) continue exp_list += [exp_dict] exp_list = filter_exp_list(exp_list, filterby_list) return exp_list
Apache License 2.0
mdrasmus/compbio
rasmus/treelib.py
Tree.merge_branch_data
python
def merge_branch_data(self, data1, data2): return self.branch_data.merge_branch_data(data1, data2)
Merges the branch data from two neighboring branches into one
https://github.com/mdrasmus/compbio/blob/7fe7082904248783d63ebab615056022982096d8/rasmus/treelib.py#L532-L534
import copy import sys import StringIO try: from rasmus import util util except ImportError: import util try: from rasmus import textdraw except ImportError: pass try: from rasmus import treelib_parser treelib_parser except ImportError: try: import treelib_parser treelib_parser except ImportError: treelib_parser = None class TreeNode (object): def __init__(self, name=None): self.name = name self.children = [] self.parent = None self.dist = 0 self.data = {} def __iter__(self): return iter(self.children) def copy(self, parent=None, copyChildren=True, copyData=True): node = TreeNode(self.name) node.name = self.name node.dist = self.dist node.parent = parent if copyData: node.data = copy.copy(self.data) if copyChildren: for child in self.children: node.children.append(child.copy(node, copyData=copyData)) return node def is_leaf(self): return len(self.children) == 0 def recurse(self, func, *args): for child in self.children: func(child, *args) def leaves(self): leaves = [] def walk(node): if node.is_leaf(): leaves.append(node) for child in node.children: walk(child) walk(self) return leaves def leaf_names(self): return [x.name for x in self.leaves()] def ancestors(self): ancestors = [] def walk(node): if node.parent: ancestors.append(node.parent) walk(node.parent) walk(self) return ancestors def ancestor_names(self): return [x.name for x in self.ancestors()] def descendants(self): descendants = [] def walk(node): for child in node.children: descendants.append(child) walk(child) walk(self) return descendants def descendant_names(self): return [x.name for x in self.descendants()] def write_data(self, out): out.write(str(self.dist)) def __repr__(self): return "<node %s>" % self.name class BranchData (object): def __init__(self): pass def get_branch_data(self, node): if "boot" in node.data: return {"boot": node.data["boot"]} else: return {} def set_branch_data(self, node, data): if "boot" in data: node.data["boot"] = data["boot"] def split_branch_data(self, node): if "boot" in node.data: return {"boot": node.data["boot"]}, {"boot": node.data["boot"]} else: return {}, {} def merge_branch_data(self, data1, data2): if "boot" in data1 and "boot" in data2: assert data1["boot"] == data2["boot"], (data1, data2) return {"boot": data1["boot"]} else: return {} class Tree (object): def __init__(self, nextname=1, branch_data=BranchData(), name=None): self.nodes = {} self.root = None self.nextname = nextname self.default_data = {} self.data = {} self.branch_data = branch_data self.name = name def __repr__(self): return "<tree %s>" % (self.name if self.name is not None else hex(id(self))) def copy(self, copyData=True): tree = Tree(nextname=self.nextname, name=self.name) if self.root is not None: tree.root = self.root.copy(copyData=copyData) def walk(node): tree.nodes[node.name] = node for child in node.children: walk(child) walk(tree.root) if copyData: tree.copy_data(self) tree.copy_node_data(self) return tree def __iter__(self): return self.nodes.itervalues() def __len__(self): return len(self.nodes) def __getitem__(self, key): return self.nodes[key] def __setitem__(self, key, node): node.name = key self.add(node) def __contains__(self, name): return name in self.nodes def preorder(self, node=None, is_leaf=lambda x: x.is_leaf()): if node is None: node = self.root queue = [node] while len(queue) > 0: node = queue.pop() yield node if not is_leaf(node): for child in reversed(node.children): queue.append(child) def postorder(self, node=None, is_leaf=lambda x: x.is_leaf()): if node is None: node = self.root stack = [[node, 0]] while len(stack) > 0: node, i = stack[-1] if i < len(node.children) and not is_leaf(node): stack.append([node.children[i], 0]) stack[-2][1] += 1 else: yield node stack.pop() def inorder(self, node=None, is_leaf=lambda x: x.is_leaf()): if node is None: node = self.root stack = [[node, 0]] while len(stack) > 0: node, i = stack[-1] if node.is_leaf(): yield node stack.pop() elif i < len(node.children) and not is_leaf(node): assert len(node.children) == 2, node.name if i == 1: yield node stack.append([node.children[i], 0]) stack[-2][1] += 1 else: stack.pop() def make_root(self, name=None): if name is None: name = self.new_name() self.root = TreeNode(name) return self.add(self.root) def add(self, node): self.nodes[node.name] = node return node def add_child(self, parent, child): assert parent != child, (parent.name, child.name) self.nodes[child.name] = child self.nodes[parent.name] = parent child.parent = parent parent.children.append(child) return child def new_node(self, name=None): if name is None: name = self.new_name() return self.add(TreeNode(name)) def remove(self, node): if node.parent: node.parent.children.remove(node) node.parent = None del self.nodes[node.name] def remove_child(self, parent, child): assert parent != child and child.parent == parent, ( parent.name, child.name) parent.children.remove(child) child.parent = None def remove_tree(self, node): def walk(node): if node.name in self.nodes: del self.nodes[node.name] for child in node.children: walk(child) walk(node) if node.parent: node.parent.children.remove(node) node.parent = None def rename(self, oldname, newname): assert newname not in self.nodes, newname node = self.nodes[oldname] del self.nodes[oldname] self.nodes[newname] = node node.name = newname def new_name(self): name = self.nextname self.nextname += 1 return name def unique_name(self, name, names, sep="_"): i = 1 name2 = name while name2 in names: name2 = name + sep + str(i) i += 1 names.add(name2) return name2 def add_tree(self, parent, childTree): self.merge_names(childTree) self.add_child(parent, childTree.root) def replace_tree(self, node, childTree): self.remove_tree(node) self.add_tree(node.parent, childTree) def merge_names(self, tree2): for name in tree2.nodes: if name in self.nodes: name2 = self.new_name() self.nodes[name2] = tree2.nodes[name] self.nodes[name2].name = name2 else: if isinstance(name, int): if name >= self.nextname: self.nextname = name + 1 self.nodes[name] = tree2.nodes[name] def clear(self): self.nodes = {} self.root = None def leaves(self, node=None): if node is None: node = self.root if node is None: return [] return node.leaves() def leaf_names(self, node=None): return map(lambda x: x.name, self.leaves(node)) def ancestors(self, node): return node.ancestors() def ancestor_names(self, node): return node.ancestor_names() def descendants(self, node): return node.descendants() def descendant_names(self, node): return node.descendant_names() def has_data(self, dataname): return dataname in self.default_data def copy_data(self, tree): self.branch_data = tree.branch_data self.default_data = copy.copy(tree.default_data) self.data = copy.copy(tree.data) def copy_node_data(self, tree): for name, node in self.nodes.iteritems(): if name in tree.nodes: node.data = copy.copy(tree.nodes[name].data) self.set_default_data() def set_default_data(self): for node in self.nodes.itervalues(): for key, val in self.default_data.iteritems(): node.data.setdefault(key, val) def clear_data(self, *keys): for node in self.nodes.itervalues(): if len(keys) == 0: node.data = {} else: for key in keys: if key in node.data: del node.data[key] def get_branch_data(self, node): return self.branch_data.get_branch_data(node) def set_branch_data(self, node, data): return self.branch_data.set_branch_data(node, data) def split_branch_data(self, node): return self.branch_data.split_branch_data(node)
MIT License
syss-research/outis
syhelpers/dataqueue.py
DataQueue.__init__
python
def __init__(self): self.memorybio = ssl.MemoryBIO()
initialize a new storage queue
https://github.com/syss-research/outis/blob/ef9b720fa12e8e0748a826354f138eabc33747fe/syhelpers/dataqueue.py#L9-L14
import ssl class DataQueue:
MIT License
nrel/ditto
ditto/network/network.py
Network.provide_graphs
python
def provide_graphs(self, graph, digraph): self.graph = graph self.digraph = digraph self.is_built = True
This functions sets the graph and digraph of the Network class with direct user inputs. This can be useful if the user has the graphs stored and does not want to re-compute them. It can also be useful when work has to be done to get connected networks. It might be easier to perform the work beforehand, and create a Network instance for each connected component. .. warning: The method does not do any safety check yet...
https://github.com/nrel/ditto/blob/e97fd0823f74d626edeb69e43d741c3e237964f3/ditto/network/network.py#L30-L41
from __future__ import absolute_import, division, print_function from builtins import super, range, zip, round, map import logging import random import traceback import networkx as nx from ditto.models.base import DiTToHasTraits logger = logging.getLogger(__name__) class Network: def __init__(self): self.graph = None self.digraph = None self.class_map = ( {} ) self.is_built = ( False ) self.attributes_set = ( False )
BSD 3-Clause New or Revised License
harvardnlp/strux
strux/deptree.py
DepTree._to_parts
python
def _to_parts(sequence, extra, length): N, _ = sequence.shape labels = np.zeros((N + 1, N + 1, C)) Ns = np.arange(1, N + 1) labels = labels.at[sequence[Ns - 1, 0], Ns, sequence[Ns - 1 , 1]].set(1) labels = np.where(np.arange(N+1).reshape(N+1, 1) >= length+1, labels, 0) labels = np.where(np.arange(N+1).reshape(1, N+1) >= length+1, labels, 0) return _unconvert(labels)
Convert a sequence representation to arcs Parameters: sequence : N x 2 long tensor in [0, N] (indexing is +1) Returns: arcs : N x N arc indicators
https://github.com/harvardnlp/strux/blob/364183c3d04c96d67dd91cce547cb4dd3661aa10/strux/deptree.py#L81-L96
import jax.numpy as np from .helpers import _Struct import jax def convert(logits): N = logits.shape[0] new_logits = np.full((N+1, N+1), -1e9, dtype=logits.dtype) new_logits = new_logits.at[1:, 1:].set(logits) Ns = np.arange(N) new_logits = new_logits.at[0, 1:].set(logits[Ns, Ns]) return new_logits.at[Ns+1, Ns+1].set(-1e9) def _unconvert(logits): new_logits[:, :] = logits[ 1:, 1:] Ns = np.arange(new_logits.shape[0]) return new_logits.at[Ns, Ns].set( logits[0, 1:]) A, B, R, C, L, I = 0, 1, 1, 1, 0, 0 class DepTree(_Struct): length_axes = (0, 1) log_scale = False def _dp(self, log_potentials, length): semiring = self.semiring log_potentials = convert(semiring.sum(log_potentials)) N, N2 = log_potentials.shape assert N == N2 chart = np.full((2, 2, 2, N, N), semiring.zero, log_potentials.dtype) for dir in [L, R]: chart = chart.at[A, C, dir, :, 0].set(semiring.one) for dir in [L, R]: chart = chart.at[B, C, dir, :, -1].set(semiring.one) start_idx = 0 for k in range(1, N): f = np.arange(start_idx, N - k), np.arange(k+start_idx, N) ACL = chart[A, C, L, start_idx: N - k, :k] ACR = chart[A, C, R, start_idx: N - k, :k] BCL = chart[B, C, L, k+start_idx:, N - k :] BCR = chart[B, C, R, k+start_idx:, N - k :] x = semiring.dot(ACR, BCL) arcs_l = semiring.times(x, log_potentials[f[1], f[0]]) chart = chart.at[A, I, L, start_idx:N - k, k].set(arcs_l) chart = chart.at[B, I, L, k+start_idx:N, N - k - 1].set(arcs_l) arcs_r = semiring.times(x, log_potentials[f[0], f[1]]) chart = chart.at[A, I, R, start_idx:N - k, k].set(arcs_r) chart = chart.at[B, I, R, k+start_idx:N, N - k - 1].set(arcs_r) AIR = chart[A, I, R, start_idx: N - k, 1 : k + 1] BIL = chart[B, I, L, k+start_idx:, N - k - 1 : N - 1] new = semiring.dot(ACL, BIL) chart = chart.at[A, C, L, start_idx: N - k, k].set(new) chart = chart.at[B, C, L, k+start_idx:N, N - k - 1].set(new) new = semiring.dot(AIR, BCR) chart = chart.at[A, C, R, start_idx: N - k, k].set(new) chart = chart.at[B, C, R, k+start_idx:N, N - k - 1].set(new) return chart[A, C, R, 0, length] @staticmethod
MIT License
uppsaladatavetare/foobar-api
src/shop/api.py
initiate_stocktaking
python
def initiate_stocktaking(chunk_size=10): stocktake_qs = models.Stocktake.objects if not stocktake_qs.filter(locked=False).count() == 0: raise exceptions.APIException('Stock-taking already in progress.') stocktake_obj = stocktake_qs.create() product_objs = list(models.Product.objects.all().order_by('category')) for i in range(0, len(product_objs), chunk_size): chunk_obj = stocktake_obj.chunks.create() chunk_products = product_objs[i:i + chunk_size] for p in chunk_products: chunk_obj.items.create(product=p) return stocktake_obj
Initiates a stock-taking procedure for all the products.
https://github.com/uppsaladatavetare/foobar-api/blob/c63e824ce246b15163e352f5150ef4ecbee81b96/src/shop/api.py#L184-L200
import logging import numpy as np import math from itertools import accumulate from datetime import date, timedelta from django.db import transaction from django.db.models import Sum from django.db.models.functions import TruncDay from django.contrib.contenttypes.models import ContentType from django.utils import timezone from sklearn.svm import SVR from .suppliers.base import SupplierAPIException from . import models, enums, suppliers, exceptions log = logging.getLogger(__name__) @transaction.atomic def create_product(code, name): product_obj = models.Product( code=code, name=name ) product_obj.save() return product_obj @transaction.atomic def update_product(id, **kwargs): product_obj = models.Product.objects.get(id=id) for k, v in kwargs.items(): setattr(product_obj, k, v) product_obj.save() def get_product(id): try: return models.Product.objects.get(id=id) except models.Product.DoesNotExist: return None def get_product_transactions_by_ref(reference): ct = ContentType.objects.get_for_model(reference) qs = models.ProductTransactionStatus.objects.filter( reference_ct=ct, reference_id=reference.pk, ).values_list('trx', flat=True).distinct() return models.ProductTransaction.objects.filter(pk__in=qs).distinct() @transaction.atomic def create_product_transaction(product_id, trx_type, qty, reference=None): product_obj = models.Product.objects.get(id=product_id) ct = None if reference is not None: ct = ContentType.objects.get_for_model(reference) trx_obj = product_obj.transactions.create(trx_type=trx_type, qty=qty) trx_obj.states.create( status=enums.TrxStatus.PENDING, reference_ct=ct, reference_id=reference.pk if reference is not None else None ) return trx_obj @transaction.atomic def finalize_product_transaction(trx_id, reference=None): trx_obj = models.ProductTransaction.objects.get(pk=trx_id) trx_obj.set_status(enums.TrxStatus.FINALIZED, reference) @transaction.atomic def cancel_product_transaction(trx_id, reference=None): trx_obj = models.ProductTransaction.objects.get(id=trx_id) trx_obj.set_status(enums.TrxStatus.CANCELED, reference) def list_products(start=None, limit=None, **kwargs): return models.Product.objects.filter(**kwargs)[start:limit] def list_categories(): return models.ProductCategory.objects.all() @transaction.atomic def get_supplier_product(supplier_id, sku, refresh=False): if not refresh: try: return models.SupplierProduct.objects.get( supplier_id=supplier_id, sku=sku ) except models.SupplierProduct.DoesNotExist: pass supplier_obj = models.Supplier.objects.get(id=supplier_id) supplier_api = suppliers.get_supplier_api(supplier_obj.internal_name) product_data = supplier_api.retrieve_product(sku) if product_data is None: log.warning('Product not found (sku: %s, supplier: %s', sku, supplier_id) return None product_obj, _ = models.SupplierProduct.objects.update_or_create( supplier_id=supplier_id, sku=sku, defaults={ 'price': product_data.price, 'name': product_data.name, 'units': product_data.units, } ) return product_obj def parse_report(supplier_internal_name, report_path): supplier_api = suppliers.get_supplier_api(supplier_internal_name) return supplier_api.parse_delivery_report(report_path) @transaction.atomic def populate_delivery(delivery_id): delivery_obj = models.Delivery.objects.get(id=delivery_id) supplier_obj = delivery_obj.supplier items = parse_report(supplier_obj.internal_name, delivery_obj.report.path) for item in items: product_obj = get_supplier_product(supplier_obj.id, item.sku) if product_obj is not None: models.DeliveryItem.objects.create( delivery=delivery_obj, supplier_product_id=product_obj.id, qty=item.qty * product_obj.qty_multiplier, price=item.price / product_obj.qty_multiplier ) return delivery_obj @transaction.atomic def process_delivery(delivery_id): delivery_obj = models.Delivery.objects.get(id=delivery_id) assert delivery_obj.valid, ('Some of the delivered items are not ' 'associated with a product in the system.') for item in delivery_obj.delivery_items.all(): supplier_product = item.supplier_product create_product_transaction( product_id=supplier_product.product.id, trx_type=enums.TrxType.INVENTORY, qty=item.qty, reference=item ) delivery_obj.locked = True delivery_obj.save() @transaction.atomic
MIT License
vertexproject/synapse
synapse/lib/parser.py
parse_cmd_string
python
def parse_cmd_string(text, off): tree = CmdStringParser.parse(text[off:]) valu, newoff = CmdStringer().transform(tree) return valu, off + newoff
Parse a command line string which may be quoted.
https://github.com/vertexproject/synapse/blob/a9d62ffacd9cc236ac52f92a734deef55c66ecf3/synapse/lib/parser.py#L616-L622
import ast import lark import regex import synapse.exc as s_exc import synapse.lib.ast as s_ast import synapse.lib.coro as s_coro import synapse.lib.cache as s_cache import synapse.lib.datfile as s_datfile terminalEnglishMap = { 'ABSPROP': 'absolute or universal property', 'ABSPROPNOUNIV': 'absolute property', 'ALLTAGS': '#', 'AND': 'and', 'BOOL': 'boolean', 'BREAK': 'break', 'CASEBARE': 'case value', 'CCOMMENT': 'C comment', 'CMDOPT': 'command line option', 'CMDNAME': 'command name', 'CMDRTOKN': 'An unquoted string parsed as a cmdr arg', 'WHITETOKN': 'An unquoted string terminated by whitespace', 'CMPR': 'comparison operator', 'BYNAME': 'named comparison operator', 'COLON': ':', 'COMMA': ',', 'CONTINUE': 'continue', 'FINI': 'fini', 'INIT': 'init', 'CPPCOMMENT': 'c++ comment', 'DEREFMATCHNOSEP': 'key or variable', 'DOLLAR': '$', 'DOT': '.', 'DOUBLEQUOTEDSTRING': 'double-quoted string', 'ELIF': 'elif', 'ELSE': 'else', 'EQUAL': '=', 'EXPRCMPR': 'expression comparison operator', 'EXPRDIVIDE': '/', 'EXPRMINUS': '-', 'EXPRPLUS': '+', 'EXPRTIMES': '*', 'FILTPREFIX': '+ or -', 'FOR': 'for', 'FUNCTION': 'function', 'HEXNUMBER': 'number', 'IF': 'if', 'IN': 'in', 'LBRACE': '[', 'LISTTOKN': 'An unquoted list-compatible string.', 'LPAR': '(', 'LSQB': '{', 'NONQUOTEWORD': 'unquoted value', 'NOT': 'not', 'NUMBER': 'number', 'OR': 'or', 'PROPNAME': 'property name', 'PROPS': 'absolute property name', 'BASEPROP': 'base property name', 'RBRACE': ']', 'RELNAME': 'relative property', 'RPAR': ')', 'RSQB': '}', 'SETOPER': '= or ?=', 'SETTAGOPER': '?', 'SINGLEQUOTEDSTRING': 'single-quoted string', 'SWITCH': 'switch', 'TAG': 'plain tag name', 'TAGMATCH': 'tag name with asterisks', 'UNIVNAME': 'universal property', 'VARTOKN': 'variable', 'VBAR': '|', 'WHILE': 'while', 'WORDTOKN': 'A whitespace tokenized string', 'YIELD': 'yield', '_ARRAYCONDSTART': '*[', '_DEREF': '*', '_EDGEADDN1INIT': '+(', '_EDGEADDN1FINI': ')>', '_EDGEDELN1INIT': '-(', '_EDGEDELN1FINI': ')>', '_EDGEADDN2INIT': '<(', '_EDGEADDN2FINI': ')+', '_EDGEDELN2INIT': '<(', '_EDGEDELN2FINI': ')-', '_EMBEDQUERYSTART': '${', '_LEFTJOIN': '<+-', '_LEFTPIVOT': '<-', '_WALKNPIVON1': '-->', '_WALKNPIVON2': '<--', '_N1WALKINIT': '-(', '_N1WALKFINI': ')>', '_N2WALKINIT': '<(', '_N2WALKFINI': ')-', '_ONLYTAGPROP': '#:', '_RETURN': 'return', '_RIGHTJOIN': '-+>', '_RIGHTPIVOT': '->', '_TRYSET': '?=', '_WS': 'whitespace', '_WSCOMM': 'whitespace or comment' } class AstConverter(lark.Transformer): def __init__(self, text): lark.Transformer.__init__(self) self.text = text @classmethod def _convert_children(cls, children): return [cls._convert_child(k) for k in children] @classmethod def _convert_child(cls, child): if not isinstance(child, lark.lexer.Token): return child tokencls = terminalClassMap.get(child.type, s_ast.Const) newkid = tokencls(child.value) return newkid def __default__(self, treedata, children, treemeta): assert treedata in ruleClassMap, f'Unknown grammar rule: {treedata}' cls = ruleClassMap[treedata] newkids = self._convert_children(children) return cls(newkids) @lark.v_args(meta=True) def subquery(self, kids, meta): assert len(kids) <= 2 hasyield = (len(kids) == 2) kid = self._convert_child(kids[-1]) kid.hasyield = hasyield return kid @lark.v_args(meta=True) def baresubquery(self, kids, meta): assert len(kids) == 1 epos = meta.end_pos spos = meta.start_pos subq = s_ast.SubQuery(kids) subq.text = self.text[spos:epos] return subq @lark.v_args(meta=True) def argvquery(self, kids, meta): assert len(kids) == 1 epos = meta.end_pos spos = meta.start_pos argq = s_ast.ArgvQuery(kids) argq.text = self.text[spos:epos] return argq def yieldvalu(self, kids): kid = self._convert_child(kids[-1]) return s_ast.YieldValu(kids=[kid]) @lark.v_args(meta=True) def evalvalu(self, kids, meta): return self._convert_child(kids[0]) @lark.v_args(meta=True) def lookup(self, kids, meta): kids = self._convert_children(kids) look = s_ast.Lookup(kids=kids) return look @lark.v_args(meta=True) def query(self, kids, meta): kids = self._convert_children(kids) if kids: epos = meta.end_pos spos = meta.start_pos else: epos = spos = 0 quer = s_ast.Query(kids=kids) quer.text = self.text[spos:epos] return quer @lark.v_args(meta=True) def embedquery(self, kids, meta): assert len(kids) == 1 text = kids[0].text ast = s_ast.EmbedQuery(text, kids) return ast @lark.v_args(meta=True) def funccall(self, kids, meta): kids = self._convert_children(kids) argkids = [] kwargkids = [] kwnames = set() for kid in kids[1:]: if isinstance(kid, s_ast.CallKwarg): name = kid.kids[0].valu if name in kwnames: mesg = f"Duplicate keyword argument '{name}' in function call" raise s_exc.BadSyntax(mesg=mesg, at=meta.start_pos) kwnames.add(name) kwargkids.append(kid) else: if kwargkids: mesg = 'Positional argument follows keyword argument in function call' raise s_exc.BadSyntax(mesg=mesg, at=meta.start_pos) argkids.append(kid) args = s_ast.CallArgs(kids=argkids) kwargs = s_ast.CallKwargs(kids=kwargkids) return s_ast.FuncCall(kids=[kids[0], args, kwargs]) def varlist(self, kids): kids = self._convert_children(kids) return s_ast.VarList([k.valu for k in kids]) def operrelprop_pivot(self, kids, isjoin=False): kids = self._convert_children(kids) relprop, rest = kids[0], kids[1:] if not rest: return s_ast.PropPivotOut(kids=kids, isjoin=isjoin) pval = s_ast.RelPropValue(kids=(relprop,)) return s_ast.PropPivot(kids=(pval, *kids[1:]), isjoin=isjoin) def operrelprop_join(self, kids): return self.operrelprop_pivot(kids, isjoin=True) def stormcmdargs(self, kids): kids = self._convert_children(kids) return s_ast.List(kids=kids) @lark.v_args(meta=True) def funcargs(self, kids, meta): newkids = [] names = set() kwfound = False for kid in kids: kid = self._convert_child(kid) newkids.append(kid) if isinstance(kid, s_ast.CallKwarg): name = kid.kids[0].valu kwfound = True else: name = kid.valu if kwfound: mesg = f"Positional parameter '{name}' follows keyword parameter in definition" raise s_exc.BadSyntax(mesg=mesg, at=meta.start_pos) if name in names: mesg = f"Duplicate parameter '{name}' in function definition" raise s_exc.BadSyntax(mesg=mesg, at=meta.start_pos) names.add(name) return s_ast.FuncArgs(newkids) def cmdrargs(self, kids): argv = [] for kid in kids: if isinstance(kid, s_ast.SubQuery): argv.append(kid.text) continue if isinstance(kid, s_ast.Const): argv.append(kid.valu) continue if isinstance(kid, lark.lexer.Token): argv.append(str(kid)) continue mesg = f'Unhandled AST node type in cmdrargs: {kid!r}' raise s_exc.BadSyntax(mesg=mesg) return argv @classmethod def _tagsplit(cls, tag): if '$' not in tag: return [s_ast.Const(tag)] segs = tag.split('.') kids = [s_ast.VarValue(kids=[s_ast.Const(seg[1:])]) if seg[0] == '$' else s_ast.Const(seg) for seg in segs] return kids def varderef(self, kids): assert kids and len(kids) == 2 newkid = kids[1] if newkid[0] == '$': tokencls = terminalClassMap.get(newkid.type, s_ast.Const) newkid = s_ast.VarValue(kids=[tokencls(newkid[1:])]) else: newkid = self._convert_child(kids[1]) return s_ast.VarDeref(kids=(kids[0], newkid)) def tagname(self, kids): assert kids and len(kids) == 1 kid = kids[0] if not isinstance(kid, lark.lexer.Token): return self._convert_child(kid) kids = self._tagsplit(kid.value) return s_ast.TagName(kids=kids) def switchcase(self, kids): newkids = [] it = iter(kids) varvalu = next(it) newkids.append(varvalu) for casekid, sqkid in zip(it, it): subquery = self._convert_child(sqkid) if casekid.type == 'DEFAULTCASE': caseentry = s_ast.CaseEntry(kids=[subquery]) else: casekid = self._convert_child(casekid) caseentry = s_ast.CaseEntry(kids=[casekid, subquery]) newkids.append(caseentry) return s_ast.SwitchCase(newkids) with s_datfile.openDatFile('synapse.lib/storm.lark') as larkf: _grammar = larkf.read().decode() LarkParser = lark.Lark(_grammar, regex=True, start=['query', 'lookup', 'cmdrargs', 'evalvalu'], propagate_positions=True) class Parser: def __init__(self, text, offs=0): self.offs = offs assert text is not None self.text = text.strip() self.size = len(self.text) def _larkToSynExc(self, e): mesg = regex.split('[\n!]', str(e))[0] at = len(self.text) if isinstance(e, lark.exceptions.UnexpectedCharacters): expected = sorted(terminalEnglishMap[t] for t in e.allowed) mesg += f'. Expecting one of: {", ".join(expected)}' at = e.pos_in_stream elif isinstance(e, lark.exceptions.UnexpectedEOF): expected = sorted(terminalEnglishMap[t] for t in set(e.expected)) mesg += ' ' + ', '.join(expected) elif isinstance(e, lark.exceptions.VisitError): origexc = e.orig_exc if not isinstance(origexc, s_exc.SynErr): raise origexc.errinfo['text'] = self.text return s_exc.BadSyntax(**origexc.errinfo) return s_exc.BadSyntax(at=at, text=self.text, mesg=mesg) def eval(self): try: tree = LarkParser.parse(self.text, start='evalvalu') newtree = AstConverter(self.text).transform(tree) except lark.exceptions.LarkError as e: raise self._larkToSynExc(e) from None newtree.text = self.text return newtree def query(self): try: tree = LarkParser.parse(self.text, start='query') newtree = AstConverter(self.text).transform(tree) except lark.exceptions.LarkError as e: raise self._larkToSynExc(e) from None newtree.text = self.text return newtree def lookup(self): try: tree = LarkParser.parse(self.text, start='lookup') except lark.exceptions.LarkError as e: raise self._larkToSynExc(e) from None newtree = AstConverter(self.text).transform(tree) newtree.text = self.text return newtree def cmdrargs(self): try: tree = LarkParser.parse(self.text, start='cmdrargs') except lark.exceptions.LarkError as e: raise self._larkToSynExc(e) from None return AstConverter(self.text).transform(tree) def parseQuery(text, mode='storm'): if mode == 'lookup': return Parser(text).lookup() if mode == 'autoadd': look = Parser(text).lookup() look.autoadd = True return look return Parser(text).query() def parseEval(text): return Parser(text).eval() async def _forkedParseQuery(args): return await s_coro.forked(parseQuery, args[0], mode=args[1]) async def _forkedParseEval(text): return await s_coro.forked(parseEval, text) evalcache = s_cache.FixedCache(_forkedParseEval, size=100) querycache = s_cache.FixedCache(_forkedParseQuery, size=100) def massage_vartokn(x): return s_ast.Const('' if not x else (x[1:-1] if x[0] == "'" else (unescape(x) if x[0] == '"' else x))) terminalClassMap = { 'ABSPROP': s_ast.AbsProp, 'ABSPROPNOUNIV': s_ast.AbsProp, 'ALLTAGS': lambda _: s_ast.TagMatch(''), 'BREAK': lambda _: s_ast.BreakOper(), 'CONTINUE': lambda _: s_ast.ContinueOper(), 'DEREFMATCHNOSEP': massage_vartokn, 'DOUBLEQUOTEDSTRING': lambda x: s_ast.Const(unescape(x)), 'NUMBER': lambda x: s_ast.Const(s_ast.parseNumber(x)), 'HEXNUMBER': lambda x: s_ast.Const(s_ast.parseNumber(x)), 'BOOL': lambda x: s_ast.Bool(x == 'true'), 'SINGLEQUOTEDSTRING': lambda x: s_ast.Const(x[1:-1]), 'TAGMATCH': lambda x: s_ast.TagMatch(kids=AstConverter._tagsplit(x)), 'VARTOKN': massage_vartokn, } ruleClassMap = { 'abspropcond': s_ast.AbsPropCond, 'arraycond': s_ast.ArrayCond, 'andexpr': s_ast.AndCond, 'condsubq': s_ast.SubqCond, 'dollarexpr': s_ast.DollarExpr, 'reqdollarexpr': s_ast.DollarExpr, 'edgeaddn1': s_ast.EditEdgeAdd, 'edgedeln1': s_ast.EditEdgeDel, 'edgeaddn2': lambda kids: s_ast.EditEdgeAdd(kids, n2=True), 'edgedeln2': lambda kids: s_ast.EditEdgeDel(kids, n2=True), 'editnodeadd': s_ast.EditNodeAdd, 'editparens': s_ast.EditParens, 'initblock': s_ast.InitBlock, 'finiblock': s_ast.FiniBlock, 'formname': s_ast.FormName, 'editpropdel': s_ast.EditPropDel, 'editpropset': s_ast.EditPropSet, 'edittagadd': s_ast.EditTagAdd, 'edittagdel': s_ast.EditTagDel, 'edittagpropset': s_ast.EditTagPropSet, 'edittagpropdel': s_ast.EditTagPropDel, 'editunivdel': s_ast.EditUnivDel, 'editunivset': s_ast.EditPropSet, 'expror': s_ast.ExprOrNode, 'exprand': s_ast.ExprAndNode, 'exprnot': s_ast.UnaryExprNode, 'exprcmp': s_ast.ExprNode, 'exprproduct': s_ast.ExprNode, 'exprsum': s_ast.ExprNode, 'filtoper': s_ast.FiltOper, 'forloop': s_ast.ForLoop, 'whileloop': s_ast.WhileLoop, 'formjoin_formpivot': lambda kids: s_ast.FormPivot(kids, isjoin=True), 'formjoin_pivotout': lambda _: s_ast.PivotOut(isjoin=True), 'formjoinin_pivotin': lambda kids: s_ast.PivotIn(kids, isjoin=True), 'formjoinin_pivotinfrom': lambda kids: s_ast.PivotInFrom(kids, isjoin=True), 'formpivot_': s_ast.FormPivot, 'formpivot_pivotout': s_ast.PivotOut, 'formpivot_pivottotags': s_ast.PivotToTags, 'formpivot_jointags': lambda kids: s_ast.PivotToTags(kids, isjoin=True), 'formpivotin_': s_ast.PivotIn, 'formpivotin_pivotinfrom': s_ast.PivotInFrom, 'formtagprop': s_ast.FormTagProp, 'hasabspropcond': s_ast.HasAbsPropCond, 'hasrelpropcond': s_ast.HasRelPropCond, 'hastagpropcond': s_ast.HasTagPropCond, 'ifstmt': s_ast.IfStmt, 'ifclause': s_ast.IfClause, 'kwarg': lambda kids: s_ast.CallKwarg(kids=tuple(kids)), 'liftbytag': s_ast.LiftTag, 'liftformtag': s_ast.LiftFormTag, 'liftprop': s_ast.LiftProp, 'liftpropby': s_ast.LiftPropBy, 'lifttagtag': s_ast.LiftTagTag, 'liftbyarray': s_ast.LiftByArray, 'liftbytagprop': s_ast.LiftTagProp, 'liftbyformtagprop': s_ast.LiftFormTagProp, 'looklist': s_ast.Lookup, 'n1walk': s_ast.N1Walk, 'n2walk': s_ast.N2Walk, 'n1walknpivo': s_ast.N1WalkNPivo, 'n2walknpivo': s_ast.N2WalkNPivo, 'notcond': s_ast.NotCond, 'opervarlist': s_ast.VarListSetOper, 'orexpr': s_ast.OrCond, 'rawpivot': s_ast.RawPivot, 'return': s_ast.Return, 'relprop': s_ast.RelProp, 'relpropcond': s_ast.RelPropCond, 'relpropvalu': s_ast.RelPropValue, 'relpropvalue': s_ast.RelPropValue, 'setitem': s_ast.SetItemOper, 'setvar': s_ast.SetVarOper, 'stormcmd': lambda kids: s_ast.CmdOper(kids=kids if len(kids) == 2 else (kids[0], s_ast.Const(tuple()))), 'stormfunc': s_ast.Function, 'tagcond': s_ast.TagCond, 'tagprop': s_ast.TagProp, 'tagvalu': s_ast.TagValue, 'tagpropvalu': s_ast.TagPropValue, 'tagvalucond': s_ast.TagValuCond, 'tagpropcond': s_ast.TagPropCond, 'valulist': s_ast.List, 'vareval': s_ast.VarEvalOper, 'varvalue': s_ast.VarValue, 'univprop': s_ast.UnivProp, 'univpropvalu': s_ast.UnivPropValue, } def unescape(valu): ret = ast.literal_eval(valu) assert isinstance(ret, str) return ret CmdStringGrammar = r''' %import common.WS -> _WS %import common.ESCAPED_STRING cmdstring: _WS? valu [/.+/] valu: alist | DOUBLEQUOTEDSTRING | SINGLEQUOTEDSTRING | JUSTCHARS DOUBLEQUOTEDSTRING: ESCAPED_STRING SINGLEQUOTEDSTRING: /'[^']*'/ alist: "(" _WS? valu (_WS? "," _WS? valu)* _WS? ")" // Disallow trailing comma JUSTCHARS: /[^()=\[\]{}'"\s]*[^,()=\[\]{}'"\s]/ ''' CmdStringParser = lark.Lark(CmdStringGrammar, start='cmdstring', regex=True, propagate_positions=True)
Apache License 2.0