repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
itsthejoker/filamentcolors.xyz
filamentcolors/colors.py
hsl_to_rgb
python
def hsl_to_rgb(in_col): assert is_hsl(in_col), f"Error, {str(in_col)} is not a valid HSL color." h, s, l = in_col r, g, b = colorsys.hls_to_rgb(h, l, s) r = int(round(255 * r)) g = int(round(255 * g)) b = int(round(255 * b)) return (r, g, b)
Convert HSL colors to RGB. Input should be a tuple of floats between 0.0 and 1.0. Output is a tuple of integers (R, G, B) where each is between 0 and 255.
https://github.com/itsthejoker/filamentcolors.xyz/blob/10cdc3b573093f4122ec3300d4fcc1c72771659d/filamentcolors/colors.py#L168-L187
__version__ = "0.13.0" import colorsys import math import re from typing import List, Tuple, Union import numpy def ColorDistance(rgb1: Union[Tuple, List], rgb2: Union[Tuple, List]) -> float: rgb1 = numpy.array(rgb1) rgb2 = numpy.array(rgb2) rm = 0.5 * (rgb1[0] + rgb2[0]) distance = math.sqrt(sum((2 + rm, 4, 3 - rm) * (rgb1 - rgb2) ** 2)) return distance def is_rgb(in_col): if len(in_col) == 3 and type(in_col) == tuple: if ( type(in_col[0]) is int and type(in_col[1]) and type(in_col[2]) and 0 <= in_col[0] <= 255 and 0 <= in_col[1] <= 255 and 0 <= in_col[2] <= 255 ): return True else: return False else: return False def is_hex(in_col): if type(in_col) is not str: return False regular_expression = re.compile( r"""^ # match beginning of string [#]? # exactly one hash, but optional [0-9a-fA-F]{6} # exactly six of the hex symbols 0 to 9, a to f $ # match end of string """, re.VERBOSE | re.MULTILINE, ) if regular_expression.match(in_col) == None: return False else: return True def is_hsl(in_col): if len(in_col) == 3 and type(in_col) == tuple: if 0 <= in_col[0] <= 1 and 0 <= in_col[1] <= 1 and 0 <= in_col[2] <= 1: return True else: return False else: return False def rgb_to_hex(rgb): assert is_rgb(rgb) is True, "Error, %s is not a valid RGB color." % rgb return "#%02x%02x%02x".lower() % rgb def hex_to_rgb(in_col): assert is_hex(in_col) is True, f"Error, {in_col} is not a valid hex color." in_col = in_col.lstrip("#") return tuple([int(in_col[s : s + 2], 16) for s in range(0, len(in_col), 2)]) def rgb_to_hsl(in_col): assert is_rgb(in_col), "Error, %s is not a valid RGB color." % in_col r, g, b = [x / 255.0 for x in in_col] h, l, s = colorsys.rgb_to_hls(r, g, b) return (h, s, l)
MIT License
elcorto/pwtools
pwtools/visualize.py
ViewFactory.__init__
python
def __init__(self, cmd=None, assert_cmd=None, suffix='.axsf', writer=io.write_axsf): self.cmd = cmd self.assert_cmd = assert_cmd self.suffix = suffix self.writer = writer
Parameters ---------- cmd : str Shell command to call the viewer. Used as ``<cmd> <structfile>``. Example: 'jmol', 'xcrysden --axsf'. assert_cmd : callable Function which accepts a single arg. Called as ``assert_cmd(obj)`` where `obj` = Structure or Trajectory instance usually). Will be called early. Use to make additional tests on `obj`. suffix : str File end for written structure file. writer : callable Called as ``writer(obj, structfile)``. Write struct file to read by viewer. Examples -------- >>> viewer = ViewFactory(...) >>> viewer(struct) >>> # To start more than one viewer, use bg=True to send the spawned >>> # process to the background. Will leave temp files on disk. >>> viewer(struct1, bg=True) >>> viewer(struct2, bg=True)
https://github.com/elcorto/pwtools/blob/99831540c6eb1fc7e8bd9b1ce61375b330f4f43e/pwtools/visualize.py#L44-L74
from tempfile import mkstemp import os from pwtools import io, common class ViewFactory:
BSD 3-Clause New or Revised License
pystruct/pystruct
pystruct/models/node_type_edge_feature_graph_crf.py
NodeTypeEdgeFeatureGraphCRF._set_size_joint_feature
python
def _set_size_joint_feature(self): if self.l_n_features: self.size_unaries = sum(n_states * n_features for n_states, n_features in zip(self.l_n_states, self.l_n_features)) self.size_pairwise = 0 for typ1, typ2 in self._iter_type_pairs(): self.size_pairwise += self.a_n_edge_features[typ1, typ2] * self.l_n_states[typ1] * self.l_n_states[typ2] self.size_joint_feature = self.size_unaries + self.size_pairwise
We have: - 1 weight per node feature per label per node type - 1 weight per edge feature per label of node1 type, per label of node2 type NOTE: for now, a typ1, typ2 type of edge with 0 features is simply ignored. While it could get a state x state matrix of weights
https://github.com/pystruct/pystruct/blob/957193a40f3933ae5709336d46289c8ad4a60b7a/pystruct/models/node_type_edge_feature_graph_crf.py#L141-L163
import numpy as np import random from ..inference import inference_dispatch from .utils import loss_augment_unaries from .typed_crf import TypedCRF, InconsistentLabel class NodeTypeEdgeFeatureGraphCRF(TypedCRF): def __init__(self, n_types, l_n_states, l_n_features, a_n_edge_features, inference_method="ad3", l_class_weight=None): self.a_n_edge_features = np.array(a_n_edge_features) if self.a_n_edge_features.shape != (n_types, n_types): raise ValueError("Expected a feature number matrix for edges of " "shape (%d, %d), got " "%s." % (n_types, n_types, self.a_n_edge_features.shape)) self.a_n_edge_features = self.a_n_edge_features.reshape(n_types, n_types) if not (self.a_n_edge_features == self.a_n_edge_features.T).all(): raise ValueError("Expected a symmetric array of edge feature " "numbers") self.l_n_edge_features = self.a_n_edge_features.ravel() self._n_edge_features = self.a_n_edge_features.sum(axis=None) TypedCRF.__init__(self, n_types, l_n_states, l_n_features, inference_method=inference_method, l_class_weight=l_class_weight) self._get_pairwise_potentials_initialize()
BSD 2-Clause Simplified License
dico-api/dico
dico/base/http.py
HTTPRequestBase.request_reactions
python
def request_reactions(self, channel_id, message_id, emoji: str, after=None, limit=None) -> RESPONSE: params = {} if after is not None: params["after"] = after if limit is not None: params["limit"] = limit return self.request(f"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}", "GET", params=params)
Sends get reactions request. :param channel_id: ID of the channel. :param message_id: ID of the message to request. :param emoji: Emoji to request. :param after: User ID to request after this user. :param limit: Maximum number to request.
https://github.com/dico-api/dico/blob/d4dc8f022d0838017a3ae1d692dbf4b591b853a9/dico/base/http.py#L337-L352
import io import typing import datetime from abc import ABC, abstractmethod class __EmptyObject: def __bool__(self) -> bool: return False def __len__(self) -> int: return 0 def __repr__(self) -> str: return "Empty" EmptyObject: __EmptyObject = __EmptyObject() _R = typing.Optional[typing.Union[dict, list, str, bytes]] RESPONSE = typing.Union[_R, typing.Awaitable[_R]] class HTTPRequestBase(ABC): BASE_URL: str = "https://discord.com/api/v9" @abstractmethod def request(self, route: str, meth: str, body: typing.Any = None, *, is_json: bool = False, reason_header: str = None, retry: int = 3, **kwargs) -> RESPONSE: pass def request_guild_audit_log(self, guild_id, user_id: str = None, action_type: int = None, before: str = None, limit: int = None) -> RESPONSE: params = {} if user_id is not None: params["user_id"] = user_id if action_type is not None: params["action_type"] = action_type if before is not None: params["before"] = before if limit is not None: params["limit"] = limit return self.request(f"/guilds/{guild_id}/audit-logs", "GET", params=params) def request_channel(self, channel_id) -> RESPONSE: return self.request(f"/channels/{channel_id}", "GET") def modify_guild_channel(self, channel_id, name: str = None, channel_type: int = None, position: int = EmptyObject, topic: str = EmptyObject, nsfw: bool = EmptyObject, rate_limit_per_user: int = EmptyObject, bitrate: int = EmptyObject, user_limit: int = EmptyObject, permission_overwrites: typing.List[dict] = EmptyObject, parent_id: str = EmptyObject, rtc_region: str = EmptyObject, video_quality_mode: int = EmptyObject, reason: str = None) -> RESPONSE: body = {} if name is not None: body["name"] = name if channel_type is not None: body["channel_type"] = channel_type if position is not EmptyObject: body["position"] = position if topic is not EmptyObject: body["topic"] = topic if nsfw is not EmptyObject: body["nsfw"] = nsfw if rate_limit_per_user is not EmptyObject: body["rate_limit_per_user"] = rate_limit_per_user if bitrate is not EmptyObject: body["bitrate"] = bitrate if user_limit is not EmptyObject: body["user_limit"] = user_limit if permission_overwrites is not EmptyObject: body["permission_overwrites"] = permission_overwrites if parent_id is not EmptyObject: body["parent_id"] = parent_id if rtc_region is not EmptyObject: body["rtc_region"] = rtc_region if video_quality_mode is not EmptyObject: body["video_quality_mode"] = video_quality_mode return self.request(f"/channels/{channel_id}", "PATCH", body, is_json=True, reason_header=reason) def modify_group_dm_channel(self, channel_id, name: str = None, icon: bin = None, reason: str = None) -> RESPONSE: body = {} if name is not None: body["name"] = name if icon is not None: body["icon"] = icon return self.request(f"/channels/{channel_id}", "PATCH", body, is_json=True, reason_header=reason) def modify_thread_channel(self, channel_id, name: str = None, archived: bool = None, auto_archive_duration: int = None, locked: bool = None, rate_limit_per_user: int = EmptyObject, reason: str = None) -> RESPONSE: body = {} if name is not None: body["name"] = name if archived is not None: body["archived"] = archived if auto_archive_duration is not None: body["auto_archive_duration"] = auto_archive_duration if locked is not None: body["locked"] = locked if rate_limit_per_user is not EmptyObject: body["rate_limit_per_user"] = rate_limit_per_user return self.request(f"/channels/{channel_id}", "PATCH", body, is_json=True, reason_header=reason) def delete_channel(self, channel_id, reason: str = None) -> RESPONSE: return self.request(f"/channels/{channel_id}", "DELETE", reason_header=reason) @property def close_channel(self): return self.delete_channel def request_channel_messages(self, channel_id, around=None, before=None, after=None, limit: int = None) -> RESPONSE: if around and before and after: raise ValueError("Only around or before or after must be passed.") query = {} if around: query["around"] = around if before: query["before"] = before if after: query["after"] = after if limit: query["limit"] = limit return self.request(f"/channels/{channel_id}/messages", "GET", params=query) def request_channel_message(self, channel_id, message_id) -> RESPONSE: return self.request(f"/channels/{channel_id}/messages/{message_id}", "GET") def create_message(self, channel_id, content: str = None, nonce: typing.Union[int, str] = None, tts: bool = False, embeds: typing.List[dict] = None, allowed_mentions: dict = None, message_reference: dict = None, components: typing.List[dict] = None, sticker_ids: typing.List[str] = None) -> RESPONSE: if not (content or embeds or sticker_ids): raise ValueError("either content or embed or sticker_ids must be passed.") body = {} if content is not None: body["content"] = content if embeds is not None: body["embeds"] = embeds if nonce is not None: body["nonce"] = nonce if tts is not None: body["tts"] = tts if allowed_mentions is not None: body["allowed_mentions"] = allowed_mentions if message_reference is not None: body["message_reference"] = message_reference if components is not None: body["components"] = components if sticker_ids is not None: body["sticker_ids"] = sticker_ids return self.request(f"/channels/{channel_id}/messages", "POST", body, is_json=True) @abstractmethod def create_message_with_files(self, channel_id, content: str = None, files: typing.List[io.FileIO] = None, nonce: typing.Union[int, str] = None, tts: bool = None, embeds: typing.List[dict] = None, allowed_mentions: dict = None, message_reference: dict = None, components: typing.List[dict] = None, sticker_ids: typing.List[str] = None) -> RESPONSE: pass def crosspost_message(self, channel_id, message_id) -> RESPONSE: return self.request(f"/channels/{channel_id}/messages/{message_id}/crosspost", "POST") def create_reaction(self, channel_id, message_id, emoji: str) -> RESPONSE: return self.request(f"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me", "PUT") def delete_reaction(self, channel_id, message_id, emoji: str, user_id="@me") -> RESPONSE: return self.request(f"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{user_id}", "DELETE")
MIT License
uwescience/myria-web
appengine/networkx/classes/graph.py
Graph.nodes_iter
python
def nodes_iter(self, data=False): if data: return iter(self.node.items()) return iter(self.adj)
Return an iterator over the nodes. Parameters ---------- data : boolean, optional (default=False) If False the iterator returns nodes. If True return a two-tuple of node and node data dictionary Returns ------- niter : iterator An iterator over nodes. If data=True the iterator gives two-tuples containing (node, node data, dictionary) Notes ----- If the node data is not required it is simpler and equivalent to use the expression 'for n in G'. >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> [d for n,d in G.nodes_iter(data=True)] [{}, {}, {}]
https://github.com/uwescience/myria-web/blob/b1c175a5cbe76d4f2219b192635085ece1326588/appengine/networkx/classes/graph.py#L527-L560
from copy import deepcopy import networkx as nx from networkx.exception import NetworkXError import networkx.convert as convert __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) class Graph(object): def __init__(self, data=None, **attr): self.graph = {} self.node = {} self.adj = {} if data is not None: convert.to_networkx_graph(data,create_using=self) self.graph.update(attr) self.edge = self.adj @property def name(self): return self.graph.get('name','') @name.setter def name(self, s): self.graph['name']=s def __str__(self): return self.name def __iter__(self): return iter(self.adj) def __contains__(self,n): try: return n in self.adj except TypeError: return False def __len__(self): return len(self.adj) def __getitem__(self, n): return self.adj[n] def add_node(self, n, attr_dict=None, **attr): if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError( "The attr_dict argument must be a dictionary.") if n not in self.adj: self.adj[n] = {} self.node[n] = attr_dict else: self.node[n].update(attr_dict) def add_nodes_from(self, nodes, **attr): for n in nodes: try: newnode=n not in self.adj except TypeError: nn,ndict = n if nn not in self.adj: self.adj[nn] = {} newdict = attr.copy() newdict.update(ndict) self.node[nn] = newdict else: olddict = self.node[nn] olddict.update(attr) olddict.update(ndict) continue if newnode: self.adj[n] = {} self.node[n] = attr.copy() else: self.node[n].update(attr) def remove_node(self,n): adj = self.adj try: nbrs = list(adj[n].keys()) del self.node[n] except KeyError: raise NetworkXError("The node %s is not in the graph."%(n,)) for u in nbrs: del adj[u][n] del adj[n] def remove_nodes_from(self, nodes): adj = self.adj for n in nodes: try: del self.node[n] for u in list(adj[n].keys()): del adj[u][n] del adj[n] except KeyError: pass
BSD 3-Clause New or Revised License
richrd/suplemon
suplemon/viewer.py
BaseViewer.jump_up
python
def jump_up(self): self.move_cursors((0, -3)) self.scroll_up()
Jump up 3 lines.
https://github.com/richrd/suplemon/blob/8bb67d6758e5bc5ca200fdce7a0fb6635abb66f4/suplemon/viewer.py#L807-L810
import os import re import sys import curses import logging try: import importlib except ImportError: importlib = False from . import helpers from .line import Line from .cursor import Cursor from .themes import scope_to_pair import suplemon.linelight try: import pygments.lexers from .lexer import Lexer except ImportError: pygments = False class BaseViewer: def __init__(self, app, window): self.app = app self.window = window self.logger = logging.getLogger(__name__) self.config = {} self.data = "" self.lines = [Line()] self.file_extension = "" self.extension_map = { "scss": "css", "less": "css", "tmtheme": "xml", "ts": "js", } self.show_line_ends = True self.cursor_style = curses.A_UNDERLINE self.y_scroll = 0 self.x_scroll = 0 self.cursors = [Cursor()] self.buffer = [] self.last_find = "" self.operations = { "arrow_right": self.arrow_right, "arrow_left": self.arrow_left, "arrow_up": self.arrow_up, "arrow_down": self.arrow_down, "jump_left": self.jump_left, "jump_right": self.jump_right, "jump_up": self.jump_up, "jump_down": self.jump_down, "page_up": self.page_up, "page_down": self.page_down, "home": self.home, "end": self.end, "find": self.find_query, "find_next": self.find_next, "find_all": self.find_all, } self.pygments_syntax = None self.lexer = None def init(self): pass def get_buffer(self): if self.config["use_global_buffer"]: return self.app.global_buffer else: return self.buffer def get_size(self): y, x = self.window.getmaxyx() return (x, y) @property def scroll_pos(self): return self.y_scroll, self.x_scroll @scroll_pos.setter def scroll_pos(self, pos): self.y_scroll = pos[0] self.x_scroll = pos[1] def get_cursor(self): return self.cursors[0] def get_first_cursor(self): highest = None for cursor in self.cursors: if highest is None or cursor.y < highest.y: highest = cursor return highest def get_last_cursor(self): lowest = None for cursor in self.cursors: if lowest is None: lowest = cursor elif cursor.y > lowest.y: lowest = cursor elif cursor.y == lowest.y and cursor.x > lowest.x: lowest = cursor return lowest def get_cursors_on_line(self, line_no): cursors = [] for cursor in self.cursors: if cursor.y == line_no: cursors.append(cursor) return cursors def get_line(self, n): return self.lines[n] def get_lines_with_cursors(self): line_nums = [] for cursor in self.cursors: if cursor.y not in line_nums: line_nums.append(cursor.y) line_nums.sort() return line_nums def get_data(self): str_lines = [] for line in self.lines: if isinstance(line, str): str_lines.append(line) else: str_lines.append(line.get_data()) data = str(self.config["end_of_line"].join(str_lines)) return data def set_data(self, data): self.data = data self.lines = [] lines = self.data.splitlines() if not len(lines) or self.data.endswith(("\n", "\r\n", "\r")): lines.append(Line()) for line in lines: self.lines.append(Line(line)) def set_config(self, config): self.config = config self.set_cursor_style(self.config["cursor_style"]) def set_cursor_style(self, cursor_style): if cursor_style == "underline": self.cursor_style = curses.A_UNDERLINE elif cursor_style == "reverse": self.cursor_style = curses.A_REVERSE else: return False return True def set_cursor(self, cursor): self.logger.warning("set_cursor is deprecated, use set_cursor_style instead.") return self.set_cursor_style(cursor) def set_cursors(self, cursors): self.cursors = [Cursor(c) for c in cursors] def set_single_cursor(self, cursor): self.cursors = [Cursor(cursor)] def setup_linelight(self): raise NotImplementedError("Needs to be implemented in derived classes") def setup_highlight(self): raise NotImplementedError("Needs to be implemented in derived classes") def set_file_extension(self, ext): ext = ext.lower() if ext and ext != self.file_extension: self.file_extension = ext self.setup_linelight() if self.config["show_highlighting"]: self.setup_highlight() def add_cursor(self, cursor): self.cursors.append(Cursor(cursor)) def max_line_length(self): return self.get_size()[0]-self.line_offset()-1 def line_offset(self): if not self.config["show_line_nums"]: return 0 return len(str(len(self.lines)))+1 def toggle_line_nums(self): self.config["show_line_nums"] = not self.config["show_line_nums"] self.render() def toggle_line_ends(self): self.show_line_ends = not self.show_line_ends self.render() def toggle_highlight(self): return False def move_win(self, yx): try: self.window.mvwin(yx[0], yx[1]) except: self.logger.warning("Moving window failed!", exc_info=True) def refresh(self): self.move_cursors() self.render() self.window.refresh() def resize(self, yx=None): if not yx: yx = self.window.getmaxyx() self.window.resize(yx[0], yx[1]) self.move_cursors() self.refresh() def render(self): if self.app.block_rendering: return self.window.erase() max_y = self.get_size()[1] max_len = self.max_line_length() lnum_len = self.line_offset() - 1 lnum_pad = ">" if self.config["line_nums_pad_space"] else "0" for i in range(max_y): x_offset = self.line_offset() lnum = i + self.y_scroll if lnum >= len(self.lines): break line = self.lines[lnum] attribs = None if self.config["highlight_current_line"] and self._is_current_line(i): attribs = self.window.getbkgd() self.window.bkgdset(" ", attribs | curses.A_BOLD) if self.config["show_line_nums"]: curs_color = curses.color_pair(line.number_color) padded_num = "{:{}{}d} ".format(lnum + 1, lnum_pad, lnum_len) self.window.addstr(i, 0, padded_num, curs_color) pos = (x_offset, i) try: self.render_line_contents(line, pos, x_offset, max_len) except: self.logger.error("Failed rendering line #{0} @{1} DATA:'{2}'!".format(lnum+1, pos, line), exc_info=True) if attribs is not None: self.window.bkgdset(" ", attribs) self.render_cursors() def _is_current_line(self, y): for cursor in self.cursors: if cursor.y - self.y_scroll == y: return True return False def render_line_contents(self, line, pos, x_offset, max_len): show_highlighting = self.config["show_highlighting"] if pygments and show_highlighting and self.pygments_syntax and self.app.themes.current_theme: self.render_line_pygments(line, pos, x_offset, max_len) elif self.config["show_line_colors"]: self.render_line_linelight(line, pos, x_offset, max_len) else: self.render_line_normal(line, pos, x_offset, max_len) def render_line_pygments(self, line, pos, x_offset, max_len): x, y = pos line_data = line.get_data() line_data = self._prepare_line_for_rendering(line_data, max_len, no_wspace=True) tokens = self.lexer.lex(line_data, self.pygments_syntax) first_token = True for token in tokens: if token[1] == "\n": break scope = token[0] text = self.replace_whitespace(token[1]) if token[1].isspace() and not self.app.ui.limited_colors: pair = 9 settings = self.app.themes.get_scope("global") if settings and settings.get("invisibles"): fg = int(settings.get("invisibles") or -1) bg = int(settings.get("background") or -1) curses.init_pair(pair, fg, bg) curs_color = curses.color_pair(pair) if first_token and self.config["show_tab_indicators"]: text = self.add_tab_indicators(text) self.window.addstr(y, x_offset, text, curs_color) else: settings = self.app.themes.get_scope(scope) pair = scope_to_pair.get(scope) if settings and pair is not None: fg = int(settings.get("foreground") or -1) bg = int(settings.get("background") or -1) curses.init_pair(pair, fg, bg) curs_color = curses.color_pair(pair) self.window.addstr(y, x_offset, text, curs_color) else: self.window.addstr(y, x_offset, text) if first_token: first_token = False x_offset += len(text) def get_line_color(self, line): raise NotImplementedError("Needs to be implemented in derived classes") def render_line_linelight(self, line, pos, x_offset, max_len): y = pos[1] line_data = line.get_data() line_data = self._prepare_line_for_rendering(line_data, max_len) curs_color = curses.color_pair(self.get_line_color(line)) self.window.addstr(y, x_offset, line_data, curs_color) def render_line_normal(self, line, pos, x_offset, max_len): y = pos[1] line_data = line.get_data() line_data = self._prepare_line_for_rendering(line_data, max_len) self.window.addstr(y, x_offset, line_data) def add_tab_indicators(self, data): new_data = "" i = 0 for char in data: if i == 0: new_data += self.config["tab_indicator_character"] else: new_data += char i += 1 if i > self.config["tab_width"]-1: i = 0 return new_data def replace_whitespace(self, data): for key in self.config["white_space_map"]: char = " " if self.config["show_white_space"]: char = self.config["white_space_map"][key] data = data.replace(key, char) data = data.replace("\n", "") return data def _prepare_line_for_rendering(self, line_data, max_len, no_wspace=False): if self.show_line_ends: line_data += self.config["line_end_char"] line_data = self._slice_line_for_rendering(line_data, max_len) if not no_wspace: line_data = self.replace_whitespace(line_data) if sys.version_info[0] == 3 and sys.version_info[1] > 2: line_data = line_data.encode("utf-8") return line_data def _slice_line_for_rendering(self, line, max_len): line = line[min(self.x_scroll, len(line)):] if not line: return "" line = line[:min(len(line), max_len)] return line def render_cursors(self): if self.app.block_rendering: return max_x, max_y = self.get_size() for cursor in self.cursors: x = cursor.x - self.x_scroll + self.line_offset() y = cursor.y - self.y_scroll if y < 0: continue if y >= max_y: break if x < self.line_offset(): continue if x > max_x-1: continue self.window.chgat(y, cursor.x+self.line_offset()-self.x_scroll, 1, self.cursor_style) def scroll_up(self): cursor = self.get_first_cursor() if cursor.y - self.y_scroll < 0: self.y_scroll = cursor.y def scroll_down(self): cursor = self.get_last_cursor() size = self.get_size() if cursor.y - self.y_scroll >= size[1]: self.y_scroll = cursor.y - size[1]+1 def scroll_to_line(self, line_no): if line_no >= len(self.lines): line_no = len(self.lines)-1 new_y = line_no - int(self.get_size()[1] / 2) if new_y < 0: new_y = 0 self.y_scroll = new_y def move_y_scroll(self, delta): self.y_scroll += delta def move_cursors(self, delta=None): for cursor in self.cursors: if delta: if delta[0] != 0 and cursor.x >= 0: cursor.move_right(delta[0]) if delta[1] != 0 and cursor.y >= 0: cursor.move_down(delta[1]) if cursor.x < 0: cursor.x = 0 if cursor.y < 0: cursor.y = 0 if cursor.y >= len(self.lines)-1: cursor.y = len(self.lines)-1 if cursor.x >= len(self.lines[cursor.y]): cursor.x = len(self.lines[cursor.y]) elif cursor.persistent_x != cursor.x: cursor.x = min(cursor.persistent_x, len(self.lines[cursor.y])) cur = self.get_cursor() size = self.get_size() offset = self.line_offset() if cur.x - self.x_scroll+offset > size[0] - 1: self.x_scroll = len(self.lines[cur.y]) - size[0]+offset+1 if cur.x - self.x_scroll < 0: self.x_scroll -= abs(cur.x - self.x_scroll) if cur.x - self.x_scroll+offset < offset: self.x_scroll -= 1 self.purge_cursors() def move_x_cursors(self, line, col, delta): for cursor in self.cursors: if cursor.y == line: if cursor.x > col: cursor.move_right(delta) def move_y_cursors(self, line, delta, exclude=None): for cursor in self.cursors: if cursor == exclude: continue if cursor.y > line: cursor.move_down(delta) def cursor_exists(self, cursor): return cursor.tuple() in [cur.tuple() for cur in self.cursors] def remove_cursor(self, cursor): try: index = self.cursors.index(cursor) except: return False self.cursors.pop(index) return True def purge_cursors(self): new = [] ref = [] for cursor in self.cursors: if not cursor.tuple() in ref: ref.append(cursor.tuple()) new.append(cursor) self.cursors = new def purge_line_cursors(self, line_no): line_cursors = [] for cursor in self.cursors: if cursor.y == line_no: line_cursors.append(cursor) if len(line_cursors) < 2: return False line_cursors.pop(0) for line_cursors in cursor: self.remove_cursor(cursor) return True def get_key_bindings(self): return self.app.get_key_bindings() def handle_input(self, event): if event.type == "mouse": return False key = event.key_code name = event.key_name key_bindings = self.get_key_bindings() operation = None if key in key_bindings: operation = key_bindings[key] elif name in key_bindings: operation = key_bindings[name] if operation: self.run_operation(operation) return True return False def run_operation(self, operation): if operation in self.operations: cancel = self.app.trigger_event_before(operation) if cancel: return False result = self.operations[operation]() self.app.trigger_event_after(operation) return result return False def arrow_right(self): for cursor in self.cursors: line = self.lines[cursor.y] if cursor.x >= len(line) or len(line) == 0: if cursor.y != len(self.lines)-1: cursor.move_down() cursor.set_x(0) else: cursor.move_right() self.move_cursors() self.scroll_down() def arrow_left(self): for cursor in self.cursors: if cursor.y != 0 and cursor.x == 0: cursor.move_up() cursor.set_x(len(self.lines[cursor.y])+1) self.move_cursors((-1, 0)) self.scroll_up() def arrow_up(self): self.move_cursors((0, -1)) self.scroll_up() def arrow_down(self): self.move_cursors((0, 1)) self.scroll_down() def home(self): for cursor in self.cursors: wspace = helpers.whitespace(self.lines[cursor.y]) if cursor.x == wspace: cursor.set_x(0) else: cursor.set_x(wspace) self.move_cursors() def end(self): for cursor in self.cursors: cursor.set_x(len(self.lines[cursor.y])) self.move_cursors() def page_up(self): amount = int(self.get_size()[1]/2) * -1 self.move_cursors((0, amount)) self.scroll_up() def page_down(self): amount = int(self.get_size()[1]/2) self.move_cursors((0, amount)) self.scroll_down() def jump_left(self): chars = self.config["punctuation"] for cursor in self.cursors: line = self.lines[cursor.y] if cursor.x == 0: if cursor.y > 0: cursor.set_x(len(self.lines[cursor.y-1])) cursor.move_up() continue if cursor.x <= len(line): cur_chr = line[cursor.x-1] else: cur_chr = line[cursor.x] while cursor.x > 0: next = cursor.x-2 if next < 0: next = 0 if cur_chr == " ": cursor.move_left() if line[next] != " ": break else: cursor.move_left() if line[next] in chars: break self.move_cursors() def jump_right(self): chars = self.config["punctuation"] for cursor in self.cursors: line = self.lines[cursor.y] if cursor.x == len(line): if cursor.y < len(self.lines): cursor.set_x(0) cursor.move_down() continue cur_chr = line[cursor.x] while cursor.x < len(line): next = cursor.x+1 if next == len(line): next -= 1 if cur_chr == " ": cursor.move_right() if line[next] != " ": break else: cursor.move_right() if line[next] in chars: break self.move_cursors()
MIT License
bitcraft/pyglet
pyglet/media/__init__.py
SourceGroup.get_audio_data
python
def get_audio_data(self, bytes_): data = self._sources[0].get_audio_data(bytes_) eos = False while not data: eos = True if self._loop and not self._advance_after_eos: self._timestamp_offset += self._sources[0].duration self._dequeued_durations.insert(0, self._sources[0].duration) self._sources[0].seek(0) else: self._advance_after_eos = False if len(self._sources) > 1: self._advance() else: return None data = self._sources[0].get_audio_data(bytes_) data.timestamp += self._timestamp_offset if eos: if _debug: print('adding on_eos event to audio data') data.events.append(MediaEvent(0, 'on_eos')) return data
Get next audio packet. :Parameters: `bytes` : int Hint for preferred size of audio packet; may be ignored. :rtype: `AudioData` :return: Audio data, or None if there is no more data.
https://github.com/bitcraft/pyglet/blob/144257c365ca85528c6a4c5bed8141e683d7a9b6/pyglet/media/__init__.py#L775-L811
import atexit import ctypes import heapq import sys import threading import time import pyglet from pyglet.compat import bytes_type, BytesIO _debug = pyglet.options['debug_media'] class MediaException(Exception): pass class MediaFormatException(MediaException): pass class CannotSeekException(MediaException): pass class MediaThread: _threads = set() _threads_lock = threading.Lock() def __init__(self, target=None): self._thread = threading.Thread(target=self._thread_run) self._thread.setDaemon(True) if target is not None: self.run = target self.condition = threading.Condition() self.stopped = False @classmethod def _atexit(cls): cls._threads_lock.acquire() threads = list(cls._threads) cls._threads_lock.release() for thread in threads: thread.stop() def run(self): pass def _thread_run(self): if pyglet.options['debug_trace']: pyglet._install_trace() self._threads_lock.acquire() self._threads.add(self) self._threads_lock.release() self.run() self._threads_lock.acquire() self._threads.remove(self) self._threads_lock.release() def start(self): self._thread.start() def stop(self): if _debug: print('MediaThread.stop()') self.condition.acquire() self.stopped = True self.condition.notify() self.condition.release() self._thread.join() def sleep(self, timeout): if _debug: print('MediaThread.sleep(%r)' % timeout) self.condition.acquire() self.condition.wait(timeout) self.condition.release() def notify(self): if _debug: print('MediaThread.notify()') self.condition.acquire() self.condition.notify() self.condition.release() atexit.register(MediaThread._atexit) class WorkerThread(MediaThread): def __init__(self, target=None): super().__init__(target) self._jobs = list() def run(self): while True: job = self.get_job() if not job: break job() def get_job(self): self.condition.acquire() while self._empty() and not self.stopped: self.condition.wait() if self.stopped: result = None else: result = self._get() self.condition.release() return result def put_job(self, job): self.condition.acquire() self._put(job) self.condition.notify() self.condition.release() def clear_jobs(self): self.condition.acquire() self._clear() self.condition.notify() self.condition.release() def _empty(self): return not self._jobs def _get(self): return self._jobs.pop(0) def _put(self, job): self._jobs.append(job) def _clear(self): del self._jobs[:] class AudioFormat: def __init__(self, channels, sample_size, sample_rate): self.channels = channels self.sample_size = sample_size self.sample_rate = sample_rate self.bytes_per_sample = (sample_size >> 3) * channels self.bytes_per_second = self.bytes_per_sample * sample_rate def __eq__(self, other): return (self.channels == other.channels and self.sample_size == other.sample_size and self.sample_rate == other.sample_rate) def __ne__(self, other): return not self.__eq__(other) def __repr__(self): return '%s(channels=%d, sample_size=%d, sample_rate=%d)' % ( self.__class__.__name__, self.channels, self.sample_size, self.sample_rate) class VideoFormat: def __init__(self, width, height, sample_aspect=1.0): self.width = width self.height = height self.sample_aspect = sample_aspect self.frame_rate = None class AudioData: def __init__(self, data, length, timestamp, duration, events): self.data = data self.length = length self.timestamp = timestamp self.duration = duration self.events = events def consume(self, bytes_, audio_format): self.events = () if bytes_ == self.length: self.data = None self.length = 0 self.timestamp += self.duration self.duration = 0. return elif bytes_ == 0: return if not isinstance(self.data, str): data = ctypes.create_string_buffer(self.length) ctypes.memmove(data, self.data, self.length) self.data = data self.data = self.data[bytes_:] self.length -= bytes_ self.duration -= bytes_ / float(audio_format.bytes_per_second) self.timestamp += bytes_ / float(audio_format.bytes_per_second) def get_string_data(self): if isinstance(self.data, bytes_type): return self.data buf = ctypes.create_string_buffer(self.length) ctypes.memmove(buf, self.data, self.length) return buf.raw class MediaEvent: def __init__(self, timestamp, event, *args): self.timestamp = timestamp self.event = event self.args = args def _sync_dispatch_to_player(self, player): pyglet.app.platform_event_loop.post_event( player, self.event, *self.args) time.sleep(0) def __repr__(self): return '%s(%r, %r, %r)' % (self.__class__.__name__, self.timestamp, self.event, self.args) def __lt__(self, other): return hash(self) < hash(other) class SourceInfo: title = '' author = '' copyright = '' comment = '' album = '' year = 0 track = 0 genre = '' class Source: _duration = None audio_format = None video_format = None info = None @property def duration(self): return self._duration def play(self): player = Player() player.queue(self) player.play() return player def get_animation(self): from pyglet.image import Animation, AnimationFrame if not self.video_format: return Animation(list()) else: frames = list() last_ts = 0 next_ts = self.get_next_video_timestamp() while next_ts is not None: image = self.get_next_video_frame() if image is not None: delay = next_ts - last_ts frames.append(AnimationFrame(image, delay)) last_ts = next_ts next_ts = self.get_next_video_timestamp() return Animation(frames) def get_next_video_timestamp(self): pass def get_next_video_frame(self): pass def seek(self, timestamp): raise CannotSeekException() def get_queue_source(self): return self def get_audio_data(self, bytes_): pass class StreamingSource(Source): _is_queued = False @property def is_queued(self): return self._is_queued def get_queue_source(self): if self._is_queued: raise MediaException('This source is already queued on a player.') self._is_queued = True return self class StaticSource(Source): def __init__(self, source): source = source.get_queue_source() if source.video_format: raise NotImplementedError( 'Static sources not supported for video yet.') self.audio_format = source.audio_format if not self.audio_format: return buffer_size = 1 << 20 data = BytesIO() while True: audio_data = source.get_audio_data(buffer_size) if not audio_data: break data.write(audio_data.get_string_data()) self._data = data.getvalue() self._duration = len(self._data) / float(self.audio_format.bytes_per_second) def get_queue_source(self): return StaticMemorySource(self._data, self.audio_format) def get_audio_data(self, bytes_): raise RuntimeError('StaticSource cannot be queued.') class StaticMemorySource(StaticSource): def __init__(self, data, audio_format): self._file = BytesIO(data) self._max_offset = len(data) self.audio_format = audio_format self._duration = len(data) / float(audio_format.bytes_per_second) def seek(self, timestamp): offset = int(timestamp * self.audio_format.bytes_per_second) if self.audio_format.bytes_per_sample == 2: offset &= 0xfffffffe elif self.audio_format.bytes_per_sample == 4: offset &= 0xfffffffc self._file.seek(offset) def get_audio_data(self, bytes_): offset = self._file.tell() timestamp = float(offset) / self.audio_format.bytes_per_second if self.audio_format.bytes_per_sample == 2: bytes_ &= 0xfffffffe elif self.audio_format.bytes_per_sample == 4: bytes_ &= 0xfffffffc data = self._file.read(bytes_) if not len(data): return None duration = float(len(data)) / self.audio_format.bytes_per_second return AudioData(data, len(data), timestamp, duration, list()) class SourceGroup: _advance_after_eos = False _loop = False def __init__(self, audio_format, video_format): self.audio_format = audio_format self.video_format = video_format self.duration = 0. self._timestamp_offset = 0. self._dequeued_durations = list() self._sources = list() def seek(self, time): if self._sources: self._sources[0].seek(time) def queue(self, source): source = source.get_queue_source() assert(source.audio_format == self.audio_format) self._sources.append(source) self.duration += source.duration def has_next(self): return len(self._sources) > 1 def next_source(self, immediate=True): if immediate: self._advance() else: self._advance_after_eos = True def get_current_source(self): if self._sources: return self._sources[0] def _advance(self): if self._sources: self._timestamp_offset += self._sources[0].duration self._dequeued_durations.insert(0, self._sources[0].duration) old_source = self._sources.pop(0) self.duration -= old_source.duration @property def loop(self): return self._loop @loop.setter def loop(self, loop): self._loop = loop @property def advance_after_eos(self): return self._advance_after_eos @advance_after_eos.setter def advance_after_eos(self, advance): self._advance_after_eos = advance
BSD 3-Clause New or Revised License
hqsquantumsimulations/pyquest-cffi
pyquest_cffi/ops/ops.py
sGate.matrix
python
def matrix(self, **kwargs) -> np.ndarray: matrix = np.array([[1, 0], [0, 1j]], dtype=complex) return matrix
r"""The definition of the gate as a unitary matrix Args: **kwargs: Additional keyword arguments Returns: np.ndarray
https://github.com/hqsquantumsimulations/pyquest-cffi/blob/38dafab739364fd42b2e1f94e0c6617e11fe6229/pyquest_cffi/ops/ops.py#L196-L206
from pyquest_cffi.questlib import ( quest, _PYQUEST, ffi_quest, qreal, tqureg, tquestenv, paulihamil ) import numpy as np from typing import Sequence, Optional, Tuple from pyquest_cffi import cheat class hadamard(_PYQUEST): def call_interactive(self, qureg: tqureg, qubit: int) -> None: quest.hadamard(qureg, qubit) def matrix(self, **kwargs) -> np.ndarray: matrix = 1 / np.sqrt(2) * np.array([[1, 1], [1, -1]], dtype=complex) return matrix class pauliX(_PYQUEST): def call_interactive(self, qureg: tqureg, qubit: int) -> None: quest.pauliX(qureg, qubit) def matrix(self, **kwargs) -> np.ndarray: matrix = np.array([[0, 1], [1, 0]], dtype=complex) return matrix class pauliY(_PYQUEST): def call_interactive(self, qureg: tqureg, qubit: int) -> None: quest.pauliY(qureg, qubit) def matrix(self, **kwargs) -> np.ndarray: matrix = np.array([[0, -1j], [1j, 0]], dtype=complex) return matrix class pauliZ(_PYQUEST): def call_interactive(self, qureg: tqureg, qubit: int) -> None: quest.pauliZ(qureg, qubit) def matrix(self, **kwargs) -> np.ndarray: matrix = np.array([[1, 0], [0, -1]], dtype=complex) return matrix class sGate(_PYQUEST): def call_interactive(self, qureg: tqureg, qubit: int) -> None: quest.sGate(qureg, qubit)
Apache License 2.0
pymeasure/pymeasure
pymeasure/instruments/keithley/keithley6517b.py
Keithley6517B.apply_voltage
python
def apply_voltage(self, voltage_range=None): log.info("%s is sourcing voltage.", self.name) if voltage_range is None: self.auto_range_source() else: self.source_voltage_range = voltage_range self.check_errors()
Configures the instrument to apply a source voltage, and uses an auto range unless a voltage range is specified. :param voltage_range: A :attr:`~.Keithley6517B.voltage_range` value or None (activates auto range)
https://github.com/pymeasure/pymeasure/blob/658d8fb9a02bdb62f64cc3838875c0de12f49ca1/pymeasure/instruments/keithley/keithley6517b.py#L268-L280
import logging import time import re import numpy as np from pymeasure.instruments import Instrument from pymeasure.instruments.validators import truncated_range from .buffer import KeithleyBuffer log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) class Keithley6517B(Instrument, KeithleyBuffer): source_enabled = Instrument.measurement( "OUTPUT?", """ Reads a boolean value that is True if the source is enabled. """, cast=bool ) @staticmethod def extract_value(result): m = re.fullmatch(r'([+\-0-9E.]+)[A-Z]{4}', result[0]) if m: return float(m.group(1)) return None current = Instrument.measurement( ":MEAS?", """ Reads the current in Amps, if configured for this reading. """, get_process=extract_value ) current_range = Instrument.control( ":SENS:CURR:RANG?", ":SENS:CURR:RANG:AUTO 0;:SENS:CURR:RANG %g", """ A floating point property that controls the measurement current range in Amps, which can take values between -20 and +20 mA. Auto-range is disabled when this property is set. """, validator=truncated_range, values=[-20e-3, 20e-3] ) current_nplc = Instrument.control( ":SENS:CURR:NPLC?", ":SENS:CURR:NPLC %g", """ A floating point property that controls the number of power line cycles (NPLC) for the DC current measurements, which sets the integration period and measurement speed. Takes values from 0.01 to 10, where 0.1, 1, and 10 are Fast, Medium, and Slow respectively. """, values=[0.01, 10] ) source_current_resistance_limit = Instrument.control( ":SOUR:CURR:RLIM?", ":SOUR:CURR:RLIM %g", """ Boolean property which enables or disables resistance current limit """, cast=bool ) voltage = Instrument.measurement( ":MEAS:VOLT?", """ Reads the voltage in Volts, if configured for this reading. """, get_process=extract_value ) voltage_range = Instrument.control( ":SENS:VOLT:RANG?", ":SENS:VOLT:RANG:AUTO 0;:SENS:VOLT:RANG %g", """ A floating point property that controls the measurement voltage range in Volts, which can take values from -1000 to 1000 V. Auto-range is disabled when this property is set. """, validator=truncated_range, values=[-1000, 1000] ) voltage_nplc = Instrument.control( ":SENS:VOLT:NPLC?", ":SENS:VOLT:NPLC %g", """ A floating point property that controls the number of power line cycles (NPLC) for the DC voltage measurements, which sets the integration period and measurement speed. Takes values from 0.01 to 10, where 0.1, 1, and 10 are Fast, Medium, and Slow respectively. """ ) source_voltage = Instrument.control( ":SOUR:VOLT?", ":SOUR:VOLT:LEV %g", """ A floating point property that controls the source voltage in Volts. """ ) source_voltage_range = Instrument.control( ":SOUR:VOLT:RANG?", ":SOUR:VOLT:RANG:AUTO 0;:SOUR:VOLT:RANG %g", """ A floating point property that controls the source voltage range in Volts, which can take values from -1000 to 1000 V. Auto-range is disabled when this property is set. """, validator=truncated_range, values=[-1000, 1000] ) resistance = Instrument.measurement( ":READ?", """ Reads the resistance in Ohms, if configured for this reading. """, get_process=extract_value ) resistance_range = Instrument.control( ":SENS:RES:RANG?", ":SENS:RES:RANG:AUTO 0;:SENS:RES:RANG %g", """ A floating point property that controls the resistance range in Ohms, which can take values from 0 to 100e18 Ohms. Auto-range is disabled when this property is set. """, validator=truncated_range, values=[0, 100e18] ) resistance_nplc = Instrument.control( ":SENS:RES:NPLC?", ":SENS:RES:NPLC %g", """ A floating point property that controls the number of power line cycles (NPLC) for the 2-wire resistance measurements, which sets the integration period and measurement speed. Takes values from 0.01 to 10, where 0.1, 1, and 10 are Fast, Medium, and Slow respectively. """ ) buffer_points = Instrument.control( ":TRAC:POIN?", ":TRAC:POIN %d", """ An integer property that controls the number of buffer points. This does not represent actual points in the buffer, but the configuration value instead. """, validator=truncated_range, values=[1, 6875000], cast=int ) def __init__(self, adapter, **kwargs): super(Keithley6517B, self).__init__( adapter, "Keithley 6517B Electrometer/High Resistance Meter", **kwargs ) def enable_source(self): self.write("OUTPUT ON") def disable_source(self): self.write("OUTPUT OFF") def measure_resistance(self, nplc=1, resistance=2.1e5, auto_range=True): log.info("%s is measuring resistance.", self.name) self.write(":SENS:FUNC 'RES';" ":SENS:RES:NPLC %f;" % nplc) if auto_range: self.write(":SENS:RES:RANG:AUTO 1;") else: self.resistance_range = resistance self.check_errors() def measure_voltage(self, nplc=1, voltage=21.0, auto_range=True): log.info("%s is measuring voltage.", self.name) self.write(":SENS:FUNC 'VOLT';" ":SENS:VOLT:NPLC %f;" % nplc) if auto_range: self.write(":SENS:VOLT:RANG:AUTO 1;") else: self.voltage_range = voltage self.check_errors() def measure_current(self, nplc=1, current=1.05e-4, auto_range=True): log.info("%s is measuring current.", self.name) self.write(":SENS:FUNC 'CURR';" ":SENS:CURR:NPLC %f;" % nplc) if auto_range: self.write(":SENS:CURR:RANG:AUTO 1;") else: self.current_range = current self.check_errors() def auto_range_source(self): self.write(":SOUR:VOLT:RANG:AUTO 1")
MIT License
christopher-dg/gpymusic
gpymusic/start.py
get_windows
python
def get_windows(): main = crs.initscr() main.resize(crs.LINES - 3, crs.COLS) inbar = crs.newwin(1, crs.COLS, crs.LINES - 1, 0) infobar = crs.newwin(1, crs.COLS, crs.LINES - 2, 0) outbar = crs.newwin(1, crs.COLS, crs.LINES - 3, 0) return main, inbar, infobar, outbar
Initialize the curses windows. Returns: Curses windows.
https://github.com/christopher-dg/gpymusic/blob/e16ee3122bfd15dd5558824dd9781aee2edf79e1/gpymusic/start.py#L162-L173
from . import common from getpass import getpass from os.path import basename, exists, expanduser, isfile, join from time import sleep import curses as crs import json import sys def validate_config(config): user_fields = ['email', 'password', 'deviceid'] colour_fields = [ 'background', 'foreground', 'highlight', 'content1', 'content2' ] if 'user' not in config: common.w.goodbye('No user info in config file: Exiting.') if not all([k in config['user'] for k in user_fields]): common.w.goodbye('Missing user info in config file: Exiting.') if 'nowplaying' in config and 'enable' not in config['nowplaying']: common.w.goodbye( 'Missing nowplaying enable flag in config file: Exiting.') else: nowplaying = config['nowplaying']['enable'] == 'yes' if nowplaying: if 'filename' in config['nowplaying']: filename = config['nowplaying']['filename'] else: filename = '~/.nowplaying' common.np.initialise(expanduser(filename)) if 'colour' in config and 'enable' not in config['colour']: common.w.goodbye('Missing colour enable flag in config file: Exiting.') else: colour = config['colour']['enable'] == 'yes' if colour and not all([c in config['colour'] for c in colour_fields]): common.w.outbar_msg( 'One or more colours are missing: Not using colour.') colour = False sleep(1.5) elif colour and not all( [validate_colour(c, config['colour'][c]) for c in colour_fields] ): common.w.outbar_msg( 'One or more colours are invalid: Not using colour.' ) colour = False sleep(1.5) return colour def validate_colour(field, hex): if field == 'background' and hex == 'default': return True c = tuple(range(48, 58)) + tuple(range(65, 91)) + tuple(range(97, 123)) return (hex.startswith('#') and len(hex) == 7 and all([ord(ch) in c for ch in hex[1:]])) def check_dirs(): msg = 'At least one required directory does not exist: ' msg += 'did you run gpymusic-setup?' if not exists(common.CONFIG_DIR) or not exists(common.DATA_DIR): common.w.goodbye(msg) if not exists(join(common.DATA_DIR, 'playlists')): common.w.goodbye(msg) if not exists(join(common.DATA_DIR, 'songs')): common.w.goodbye(msg) def password(config): if not config['user']['password']: if not common.w.curses: try: config['user']['password'] = getpass() except KeyboardInterrupt: common.w.goodbye('Exiting.') else: crs.noecho() common.w.addstr(common.w.inbar, 'Enter your password: ') try: config['user']['password'] = ( common.w.inbar.getstr().decode('utf-8') ) except KeyboardInterrupt: common.w.goodbye('Exiting.') crs.echo() return config def read_config(): path = join(common.CONFIG_DIR, 'config.json') if not isfile(path): common.w.goodbye( 'Config file not found at %s: Exiting.' % basename(path) ) with open(path) as f: try: config = json.load(f) except json.decoder.JSONDecodeError: common.w.goodbye( 'Invalid config file, refer to config.example.json: Exiting.' ) return password(config)
MIT License
reliaqualassociates/ramstk
src/ramstk/analyses/fha.py
set_user_defined_floats
python
def set_user_defined_floats(fha: Dict[str, Any], floats: List[float]) -> Dict[str, Any]: _key = "" for _idx in [0, 1, 2]: try: _key = list(fha.keys())[_idx] fha[_key] = float(floats[_idx]) except IndexError: fha[_key] = 0.0 return fha
Set the user-defined float values for the user-defined calculations. :param fha: the functional hazard assessment dict. :param list floats: the list of float values. :return: fha; the functional hazard assessment dict with updated float values. :rtype: dict
https://github.com/reliaqualassociates/ramstk/blob/ffec5a107424914cf0026c6dfe26369c221f79f9/src/ramstk/analyses/fha.py#L162-L179
from typing import Any, Dict, List from sympy import symbols, sympify from ramstk.exceptions import OutOfRangeError PROBABILITY = { "Level E - Extremely Unlikely": 1, "Level D - Remote": 2, "Level C - Occasional": 3, "Level B - Reasonably Probable": 4, "Level A - Frequent": 5, } SEVERITY = { "Insignificant": 1, "Slight": 2, "Low": 3, "Medium": 4, "High": 5, "Major": 6, } def calculate_hri(probability: str, severity: str) -> int: try: return PROBABILITY[probability] * SEVERITY[severity] except KeyError as _error: raise OutOfRangeError( ( "calculate_hri() was passed an unknown hazard " "probability ({0:s}) or severity ({1:s}) " "description." ).format(probability, severity) ) from _error def calculate_user_defined(fha: Dict[str, Any]) -> Dict[str, Any]: (uf1, uf2, uf3, ui1, ui2, ui3, res1, res2, res3, res4, res5) = symbols( "uf1 uf2 uf3 ui1 ui2 ui3 res1 res2 res3 res4 res5" ) fha["res1"] = sympify(fha["equation1"]).evalf( subs={ uf1: fha["uf1"], uf2: fha["uf2"], uf3: fha["uf3"], ui1: fha["ui1"], ui2: fha["ui2"], ui3: fha["ui3"], res1: fha["res1"], res2: fha["res2"], res3: fha["res3"], res4: fha["res4"], res5: fha["res5"], } ) fha["res2"] = sympify(fha["equation2"]).evalf( subs={ uf1: fha["uf1"], uf2: fha["uf2"], uf3: fha["uf3"], ui1: fha["ui1"], ui2: fha["ui2"], ui3: fha["ui3"], res1: fha["res1"], res2: fha["res2"], res3: fha["res3"], res4: fha["res4"], res5: fha["res5"], } ) fha["res3"] = sympify(fha["equation3"]).evalf( subs={ uf1: fha["uf1"], uf2: fha["uf2"], uf3: fha["uf3"], ui1: fha["ui1"], ui2: fha["ui2"], ui3: fha["ui3"], res1: fha["res1"], res2: fha["res2"], res3: fha["res3"], res4: fha["res4"], res5: fha["res5"], } ) fha["res4"] = sympify(fha["equation4"]).evalf( subs={ uf1: fha["uf1"], uf2: fha["uf2"], uf3: fha["uf3"], ui1: fha["ui1"], ui2: fha["ui2"], ui3: fha["ui3"], res1: fha["res1"], res2: fha["res2"], res3: fha["res3"], res4: fha["res4"], res5: fha["res5"], } ) fha["res5"] = sympify(fha["equation5"]).evalf( subs={ uf1: fha["uf1"], uf2: fha["uf2"], uf3: fha["uf3"], ui1: fha["ui1"], ui2: fha["ui2"], ui3: fha["ui3"], res1: fha["res1"], res2: fha["res2"], res3: fha["res3"], res4: fha["res4"], res5: fha["res5"], } ) return fha
BSD 3-Clause New or Revised License
haorand/faceantispoofing_dl
Code/Landmark_5p_caffe.py
list2colmatrix
python
def list2colmatrix(pts_list): assert len(pts_list) > 0 colMat = [] for i in range(len(pts_list)): colMat.append(pts_list[i][0]) colMat.append(pts_list[i][1]) colMat = np.matrix(colMat).transpose() return colMat
convert list to column matrix Parameters: ---------- pts_list: input list Retures: ------- colMat:
https://github.com/haorand/faceantispoofing_dl/blob/40e12ee9db6fbaded03c7aff1f933fe8be5e4ff3/Code/Landmark_5p_caffe.py#L23-L41
import numpy as np import math import cv2 import os from os import listdir import json import time import sys from MtcnnDetector import FaceDetector TRAINING = 0 TESTING = 1 CPU = 0 GPU = 1 CASIA = 0 REPLAYATTACK = 1 OULU = 2
MIT License
sanand0/xmljson
xmljson/__init__.py
XMLData._tostring
python
def _tostring(value): if value is True: value = 'true' elif value is False: value = 'false' elif value is None: value = '' return unicode(value)
Convert value to XML compatible string
https://github.com/sanand0/xmljson/blob/20817db7eecd057b23ed7055d32bb339c3c5a496/xmljson/__init__.py#L61-L69
import sys from collections import Counter, OrderedDict try: from lxml.etree import Element except ImportError: from xml.etree.cElementTree import Element __author__ = 'S Anand' __email__ = 'root.node@gmail.com' __version__ = '0.2.0' if sys.version_info[0] == 3: unicode = str basestring = str class XMLData(object): def __init__(self, xml_fromstring=True, xml_tostring=True, element=None, dict_type=None, list_type=None, attr_prefix=None, text_content=None, simple_text=False, invalid_tags=None): if callable(xml_fromstring): self._fromstring = xml_fromstring elif not xml_fromstring: self._fromstring = lambda v: v if callable(xml_tostring): self._tostring = xml_tostring self.element = Element if element is None else element self.dict = OrderedDict if dict_type is None else dict_type self.list = list if list_type is None else list_type self.attr_prefix = attr_prefix self.text_content = text_content self.simple_text = simple_text if invalid_tags == 'drop': self._element = self.element self.element = self._make_valid_element elif invalid_tags is not None: raise TypeError('invalid_tags can be "drop" or None, not "%s"' % invalid_tags) def _make_valid_element(self, key): try: return self._element(key) except (TypeError, ValueError): pass @staticmethod
MIT License
chriswilson1982/black-and-white
black_white.py
Game.timing
python
def timing(self): if self.can_play: time_allowed = 61 - (difficulty * 10) - (self.level * 0.5 * difficulty) time_allowed = max(time_allowed, 5) time_elapsed = self.t - self.timestamp angle = 2 * pi * time_elapsed / time_allowed if time_elapsed >= time_allowed: self.can_play = False self.timer_mark.run_action(A.fade_to(0, 0)) self.timer_mark_2.run_action(A.fade_to(0, 0)) self.sparkle(color3, self.restart_button.position, image='shp:Circle', spread=50, z_position=0.2, n=20) self.commit() return elif time_elapsed > time_allowed - 8 and not self.low_time and not self.warning_flag: self.warning_flag = True self.level_label.text = choice(hurry_text) elif time_elapsed > time_allowed - 5 and not self.low_time: self.low_time = True self.green_timer_background.fill_color = color3 self.timer_sound() radius = 28.0 p = ui.Path() p.add_arc(0, 0, radius, 0, angle) p.stroke() p.close() self.timer.path = p mid_frame = (self.timer.frame[2] / 2.0, self.timer.frame[3] / 2.0) rp = self.restart_button.position if angle < pi: self.timer.position = ( rp[0] + mid_frame[0], rp[1] + radius - mid_frame[1]) else: self.timer.position = ( rp[0] + radius - mid_frame[0], rp[1] + radius - mid_frame[1]) self.timer_mark_2.rotation = 2 * pi - (time_elapsed / time_allowed * 2 * pi)
Use timestamp for countdown. This is called with each update.
https://github.com/chriswilson1982/black-and-white/blob/e275e6f534aa51f12f4545730b627ce280aae8c3/black_white.py#L870-L923
from scene import * import ui from math import pi from random import choice, randint, randrange, random, uniform from time import sleep, time from datetime import date import dialogs import pickle import sound import os import webbrowser from database import check_final_score, get_all_scores from square import Square from start_finish import StartFinish from powerup import Powerup from configuration import save_settings from common import * class Game (Scene): @property def score(self): return self._score @score.setter def score(self, value): self._score = value self.score_label.text = str(self._score) def setup(self): self.background_color = background_color self.alpha = 0 self._score = 0 self.run_action(A.fade_to(1, 0.2)) bg = ui.Path().rounded_rect(0, 0, square_size, square_size, 4) bg.fill() bg.close() self.bg_list = [] for x in range(randrange(20, 100)): self.random_bg = ShapeNode(bg, color=choice(all_colors), position=( randint(0, screen_w), randint(0, screen_h)), size=(square_size, square_size)) self.random_bg.alpha = 0.05 * randrange(1, 9) self.random_bg.speed = randrange(1, 6) self.random_bg.z_position = 0.2 self.add_child(self.random_bg) self.bg_list.append(self.random_bg) self.title = LabelNode("black white", font=( 'Helvetica Neue', title_font_size), color=text_color, position=title_position) self.title.z_position = 1.0 self.add_child(self.title) self.title2 = LabelNode("&", font=( 'Helvetica-bold', title_font_size), color=text_color, position=title_position) self.title2.z_position = 1.0 self.add_child(self.title2) self.settings = SpriteNode(make_texture('Cog', text_color), position=( top_button_side_gap, screen_h - top_button_top_gap), scale=top_button_scale) self.settings.z_position = 0.9 self.add_child(self.settings) self.highscore_button = SpriteNode(make_texture('Group', text_color), position=( screen_w - top_button_side_gap, screen_h - top_button_top_gap), scale=top_button_scale) self.highscore_button.z_position = 0.9 self.add_child(self.highscore_button) self.level = 0 self.level_bg = SpriteNode(color=background_color, position=( screen_w / 2, label_height), size=(screen_w, 26), alpha=0.7) self.level_bg.z_position = 0.8 self.add_child(self.level_bg) self.level_label = LabelNode("Level 1", font=( 'Helvetica-bold', 20), color=text_color, position=(screen_w / 2, label_height)) self.level_label.z_position = 0.9 self.add_child(self.level_label) self.powerup_1 = Powerup(1, starting_powerups, ( screen_w / 2 - square_size * 2, powerup_vertical)) self.add_child(self.powerup_1) self.powerup_2 = Powerup(2, starting_powerups, (screen_w / 2, powerup_vertical)) self.add_child(self.powerup_2) self.powerup_3 = Powerup(3, starting_powerups, ( screen_w / 2 + square_size * 2, powerup_vertical)) self.add_child(self.powerup_3) self.black_count = LabelNode("0", font=( 'Helvetica', counter_font_size), color=color2, position=(-100, -100), z_position=0.55) self.add_child(self.black_count) self.white_count = LabelNode("0", font=( 'Helvetica', counter_font_size), color=color1, position=(-100, -100), z_position=0.55) self.add_child(self.white_count) self.squares = [] self.start = StartFinish(row=randint(1, rows), type="start") self.add_child(self.start) self.finish = StartFinish(row=randint(1, rows), type="finish") self.add_child(self.finish) self.backdrop3a = SpriteNode(position=self.start.position, size=( 8 * square_size, square_size + 10), color=text_color) self.backdrop3a.anchor_point = (0.965, 0.5) self.backdrop3a.z_position = 0.4 self.add_child(self.backdrop3a) self.backdrop3b = SpriteNode(position=self.finish.position, size=( 8 * square_size, square_size + 10), color=text_color) self.backdrop3b.anchor_point = (0.035, 0.5) self.backdrop3b.z_position = 0.4 self.add_child(self.backdrop3b) bd4 = ui.Path().rounded_rect(0, 0, square_size * 2, 50, 8) bd4.fill() bd4.close() self.backdrop4 = ShapeNode(bd4, position=( screen_w / 2, bottom_vertical), color=color1, size=(square_size * 2, 50)) self.backdrop4.z_position = 0.8 self.add_child(self.backdrop4) bd5 = ui.Path().rounded_rect(0, 0, square_size * 2 - 10, 40, 4) bd5.fill() bd5.close() self.backdrop5 = ShapeNode(bd5, stroke_color=color2, position=( screen_w / 2, bottom_vertical), size=(2 * square_size - 10, 40)) self.backdrop5.line_width = 2 self.backdrop5.z_position = 0.85 self.add_child(self.backdrop5) self.commit_button = SpriteNode(texture=make_texture('Check', text_color), color=text_color, position=( screen_w / 2, bottom_vertical), size=(square_size, square_size), z_position=0.9) self.add_child(self.commit_button) circle1 = ui.Path() circle1.add_arc(0, 0, 24, 0, pi * 2) circle1.line_width = 4 circle1.stroke() circle1.close() circle2 = ui.Path() circle2.add_arc(0, 0, 15, 0, pi * 2) circle2.close() circle3 = ui.Path() circle3.add_arc(0, 0, 26, 0, 2 * pi) circle3.line_width = 2 circle3.stroke() circle3.close() tex_res = Texture('iob:ios7_refresh_32') self.restart_button = SpriteNode(texture=tex_res, position=( screen_w / 2 + square_size * 2.5, bottom_vertical), scale=1) self.restart_button.z_position = 0.4 self.add_child(self.restart_button) self.green_timer_background = ShapeNode( circle1, color4, color1, position=self.restart_button.position) self.green_timer_background.rotation = pi / 2 self.green_timer_background.z_position = 0.18 self.add_child(self.green_timer_background) self.white_timer_background = ShapeNode( circle2, fill_color=color4, position=self.restart_button.position) self.white_timer_background.z_position = 0.35 self.add_child(self.white_timer_background) p = ui.Path() p.add_arc(0, 0, 26, 0, 0) p.stroke() p.close() self.timer = ShapeNode(p, background_color, background_color) self.timer.z_position = 0.19 self.timer.rotation = pi / 2 self.timer.position = self.restart_button.position self.timer.alpha = 1 self.add_child(self.timer) line = ui.Path().rect(0, 0, 4, 25) line.fill() line.close() self.timer_mark = ShapeNode( line, color1, color1, position=self.restart_button.position, size=(4, 25)) self.timer_mark.anchor_point = (0.5, 0) self.timer_mark.z_position = 0.3 self.add_child(self.timer_mark) self.timer_mark_2 = ShapeNode( line, color1, color1, position=self.restart_button.position, size=(4, 25)) self.timer_mark_2.anchor_point = (0.5, 0) self.timer_mark_2.z_position = 0.3 self.add_child(self.timer_mark_2) self.timer_ring = ShapeNode(circle3) self.timer_ring.alpha = 0 self.warning_flag = False self.score_label = LabelNode("0", font=('Helvetica-bold', 40), color=text_color, position=( screen_w / 2 - square_size * 2.5, bottom_vertical), size=(square_size, square_size), z_position=0.9) self.add_child(self.score_label) self.can_play = True self.can_settings = True self.can_highscore = True self.unlock = False self.can_restart = False self.win = False self.no_whites = False self.punishment = False self.reward = False self.can_flip = False self.low_time = False self.green_list = [] self.new_game(False) if not username or username == "Player": self.get_name() @ui.in_background def get_name(self): global username sleep(1) try: username = dialogs.input_alert( "Enter your name", "This will be used on the high score leaderboard!") if username == '': username = 'Player' except KeyboardInterrupt: username = "Player" first_time = False new_data = (background_color, color3, color4, text_color, difficulty, username, first_time) save_settings(new_data) def commit(self): self.stop_squares_moving() self.destroy_crosses() self.can_play = False self.low_time = False self.can_settings = False self.highscore_button.alpha = 1 sound.play_effect(button_sound) for square in self.squares: if square.row == self.start.row and square.col == 1: if square.state == 2: square.state = 3 square.color = color4 self.go(square) else: self.losing() def go(self, start_square): def cascade(node, progress): if progress == 1 and node.state == 3 and self.win: node.state = 4 self.sparkle(color4, node.position, image='shp:RoundRect') node.color = color4 elif progress == 1 and node.state == 4 and self.win: self.sparkle(color4, node.position, image='shp:RoundRect') node.color = color4 self.finish.color = color4 elif progress == 1 and node.state == 0 and not self.win: self.sparkle(color3, node.position, image='shp:RoundRect') node.color = color3 self.green_list.append(start_square) index = 0.01 while self.green_list: square = self.green_list.pop(randint(0, len(self.green_list) - 1)) square.state = 3 square.run_action(A.call(cascade, index)) index += 0.01 for n in square.white_neighbours(self.squares): if n not in self.green_list: self.green_list.append(n) self.check_win() def check_win(self): self.can_play = False self.can_flip = False self.unlock = False for square in self.squares: square.rotation = 0.0 if square.row == self.finish.row and square.col == cols: if square.state == 3: square.state = 4 self.win = True self.can_play = False self.winning() return elif square.state == 4: return self.losing() def winning(self): square_states = [square.state for square in self.squares] self.black_count.text = str(square_states.count(1)) self.white_count.text = str(square_states.count(2)) add_score = 0 for square in self.squares: if square.state >= 3: add_score += 1 self.commit_button.texture = make_texture('Right', text_color) self.restart_button.texture = Texture('iob:checkmark_circled_32') if self.star_square: if self.star_square.state >= 3: self.sparkle(text_color, self.star_square.position, image='shp:Star', spread=40, z_position=1.1) p_list = [] for item in (self.powerup_1, self.powerup_2, self.powerup_3): if item.count < 9: p_list.append(item) if not p_list: p_list.append(self.score_label) target = choice(p_list) pos = target.position self.powerup_indicator = target if text_color == 1: self.star_square.star_icon.scale = 0.8 self.star_square.star_icon.texture = Texture('typw:Star') self.star_square.star_icon.z_position = 0.9 self.star_square.star_icon.run_action(A.sequence(A.scale_by(0.2, 0.1), A.scale_by(-0.2, 0.1), A.group(A.move_to( pos[0], pos[1], 1.8, TIMING_SINODIAL), A.fade_to(0, 1.8, TIMING_EASE_IN), A.rotate_by(2 * pi, 2)), A.remove())) if target != self.score_label: target.run_action(A.sequence(A.wait(1.8), A.scale_to( 1.2, 0.2, TIMING_BOUNCE_IN_OUT), A.scale_to(1, 0.2))) if target == self.score_label: self.score += self.level self.ten = LabelNode("+" + str(self.level), font=('Helvetica-bold', 30), position=self.star_square.position + (0, 30), color=text_color, z_position=0.81) self.add_child(self.ten) self.ten.run_action(A.sequence(A.wait(0.2), A.group(A.move_to( pos[0], pos[1] + 50, 1.8, TIMING_SINODIAL), A.fade_to(0, 1.9, TIMING_EASE_IN)), A.remove())) for item in (self.powerup_1, self.powerup_2, self.powerup_3): self.sparkle(color4, item.position, image='shp:Star') item.run_action(A.sequence( A.scale_to(1.2, 0.4), A.scale_to(1, 0.4))) else: def powerup_increment(pu, progress): if progress == 1 and pu.count < 9: sound.play_effect(powerup_sound) pu.count += 1 target.run_action(A.call(powerup_increment, 1.8)) for bg in self.bg_list: bg.color = color4 self.backdrop5.color = color4 self.move_counters() self.score_change(add_score, self.win) self.end_sound() def losing(self): score_value = self.score self.green_timer_background.fill_color = color3 for square in self.squares: if square.star: square.star_icon.z_position = 1 square.star_icon.run_action( A.sequence(A.scale_to(0, 1), A.remove())) if square.state == 3: square.state = 0 if square.state == 0: square.z_position = 0.25 bg_target = choice(self.bg_list).position square.run_action(A.sequence(A.wait(random() + 1), A.group(A.scale_to(0, 3), A.fade_to( 0, 2), A.move_to(bg_target[0], bg_target[1], 2, TIMING_SINODIAL)), A.remove())) self.start.color = color3 self.score_label.color = color3 self.backdrop5.color = color3 self.commit_button.texture = make_texture('Cross', text_color) if self.level == 1: self.level_label.text = choice(quick_fail_text) elif score_value == 0 and self.level > 1: self.level_label.text = choice(zero_fail_text) elif score_value < 0: self.level_label.text = choice(neg_fail_text) else: self.level_label.text = choice(fail_text) self.move_counters() self.save(score_value) for bg in self.bg_list: bg.color = color3 self.can_settings = True self.end_sound() def score_change(self, num, win): if num > 0: text = "+" + str(num) elif num < 0: text = str(num) else: text = "" red_count = len( [square for square in self.squares if square.state == 0]) if win: self.score_label1 = LabelNode(text, font=('Helvetica', score_label_font_size), color=color4, position=( screen_w / 2, 100 + score_label_gap), size=(square_size, square_size), z_position=0.8, alpha=0) self.add_child(self.score_label1) self.score_label1.run_action(score_action_1) self.score_label2 = LabelNode("-" + str(self.white_count.text), font=('Helvetica', score_label_font_size), color=color2, position=(screen_w / 2, 100), size=(square_size, square_size), z_position=0.8, alpha=0) self.add_child(self.score_label2) self.score_label2.run_action(score_action_1) self.score_label3 = LabelNode("-" + str(red_count), font=('Helvetica', score_label_font_size), color=color3, position=( screen_w / 2, 100 - score_label_gap), size=(square_size, square_size), z_position=0.8, alpha=0) self.add_child(self.score_label3) self.score_label3.run_action(score_action_1) sq = ui.Path().rounded_rect(0, 0, square_size * 4 - 12, square_size * 4 - 12, 4) sq.fill() sq.close() self.score_label_back = ShapeNode( sq, color=color1, position=(screen_w / 2, 100), alpha=0) self.score_label_back.z_position = 0.7 self.score_label_back.size = ( 4 * square_size - 14, 4 * square_size - 14) self.add_child(self.score_label_back) self.score_label_back.run_action(score_action_2) if self.no_whites: self.score_label2.text = "+" + str(rows + cols) num += rows + cols total_score_change = num - int(self.white_count.text) - red_count self.total_score_change_label = LabelNode("+" + str(total_score_change), font=( 'Helvetica-bold', 40), color=text_color, position=self.score_label.position) self.total_score_change_label.z_position = 0.6 if self.no_whites: if total_score_change < rows * cols: self.level_label.text = choice(no_white_text) else: self.reward = True if self.powerup_1.count + self.powerup_2.count + self.powerup_3.count < 18: self.level_label.text = choice(reward_text) else: self.level_label.text = choice(no_white_text) for item in (self.powerup_1, self.powerup_2, self.powerup_3): if item.count < 9: item.count += 1 item.run_action(A.sequence(A.scale_to( 1.2, 0.2, TIMING_BOUNCE_IN_OUT), A.scale_to(1, 0.2))) self.sparkle( color4, item.position - (square_size / 4.0, 0), image='shp:RoundRect') else: if total_score_change > 0: self.level_label.text = choice(win_text) self.total_score_change_label.text = "+" + str(total_score_change) elif total_score_change == 0: self.level_label.text = choice(zero_text) self.total_score_change_label.text = "+" + str(total_score_change) self.punishment = True elif total_score_change < 0: self.level_label.text = choice(neg_text) self.total_score_change_label.text = str( total_score_change) self.total_score_change_label.color = color3 self.punishment = True else: total_score_change = -1 * self.score self.total_score_change_label = LabelNode(str(total_score_change), font=( 'Helvetica-bold', 40), color=color3, position=self.score_label.position) self.add_child(self.total_score_change_label) self.score += total_score_change self.total_score_change_label.run_action(A.sequence(A.fade_to(1, 0.1), A.wait( 1), A.move_to(screen_w / 2, bottom_vertical, 2, TIMING_EASE_IN_OUT), A.remove())) self.score_label.run_action(A.sequence( A.fade_to(0, 0), A.wait(1.8), A.fade_to(1, 0.5))) def new_game(self, win): self.can_restart = False for item in (self.timer_mark, self.timer_mark_2): item.alpha = 1 item.run_action(A.sequence( A.scale_y_to(0.6, 0), A.scale_y_to(1, 0.3))) for item in (self.green_timer_background, self.white_timer_background): item.run_action(A.sequence(A.scale_to(0.6, 0), A.scale_to(1, 0.3))) self.restart_button.texture = Texture('iob:ios7_refresh_32') if self.can_play and self.score != 0: for square in self.squares: square.rotation = 0 try: dialogs.alert("Do you want to start again?", "Your score will be reset to zero!", "Restart") self.level = 0 self.level_label.text = 'level ' + str(self.level) self.losing() except KeyboardInterrupt: self.can_restart = True return self.can_play = False self.no_whites = False self.win = False self.level += 1 self.level_label.text = 'Level ' + str(self.level) self.white_timer_background.fill_color = color2 self.green_timer_background.fill_color = color4 p = ui.Path() p.add_arc(0, 0, 28.0, 0, 0) p.stroke() p.close() self.timer.path = p self.timer.position = self.restart_button.position self.timer_mark_2.rotation = 0 self.warning_flag = False for bg in self.bg_list: bg.run_action(A.move_to(randint(0, screen_w), randint(0, screen_h), random())) bg.color = choice(all_colors) self.make_grid() try: for item in (self.score_label1, self.score_label2, self.score_label3, self.score_label_back): item.run_action(A.remove()) except: pass if not win: self.score = 0 self.level = 0 self.level_label.text = 'Level ' + str(self.level) self.can_settings = True for pu in (self.powerup_1, self.powerup_2, self.powerup_3): pu.count = starting_powerups self.move_counters() self.score_label.color = text_color self.backdrop5.color = color4 self.commit_button.texture = make_texture('Check', text_color) self.restart_button.remove_all_actions() sound.play_effect(new_game_sound) def make_grid(self): global rows, cols, top_left if self.punishment: rows, cols = 4, 4 self.level_label.text = choice(punishment_text) else: rows = randrange(4, 11, 2) if rows == 4: cols = 6 else: cols = randrange(4, 7, 2) top_left = (centre[0] - square_size * (cols / 2.0 - 0.5), (centre[1] + square_size * (rows / 2.0 - 0.5))) for square in self.squares: bg_target = choice(self.bg_list).position square.alpha = 0.6 square.scale = 0.6 square.run_action(A.sequence(A.group(A.scale_to(0, 2), A.fade_to( 0, 2), A.move_to(bg_target[0], bg_target[1], 2, TIMING_SINODIAL)), A.remove())) self.start.row = randint(1, rows) self.start.color = color4 self.finish.row = randint(1, rows) self.finish.color = color2 border = ui.Path().rounded_rect( 0, 0, cols * square_size + 20, rows * square_size + 20, 4) border.line_width = 6 border.stroke() border.close() fine_border = ui.Path().rect(0, 0, cols * square_size, rows * square_size) fine_border.line_width = 1 fine_border.stroke() fine_border.close() try: self.backdrop.run_action(A.remove()) self.backdrop2.run_action(A.remove()) except: pass self.backdrop = ShapeNode(border, (0, 0, 0, 0), text_color, position=centre, size=( square_size * cols + 20, square_size * rows + 20)) self.backdrop.blend_mode = BLEND_NORMAL self.backdrop.z_position = 0.3 self.add_child(self.backdrop) self.backdrop2 = ShapeNode(fine_border, (0, 0, 0, 0), text_color, position=centre, size=( square_size * cols, square_size * rows)) self.backdrop2.z_position = 0.2 self.add_child(self.backdrop2) self.start.run_action(A.move_to( top_left[0] - square_size, top_left[1] - square_size * (self.start.row - 1), 0.3)) self.finish.run_action(A.move_to( top_left[0] + square_size * cols, top_left[1] - square_size * (self.finish.row - 1), 0.3)) self.backdrop3a.run_action(A.move_to( top_left[0] - square_size, top_left[1] - square_size * (self.start.row - 1), 0.3)) self.backdrop3b.run_action(A.move_to( top_left[0] + square_size * cols, top_left[1] - square_size * (self.finish.row - 1), 0.3)) horizontal = top_left[0] vertical = top_left[1] self.squares = [] for x in range(cols): for y in range(rows): self.square = Square(col=x + 1, row=y + 1, position=(horizontal, vertical), size=( square_size, square_size), state=choice((1, 2)), color=None) self.square.z_position = 0.5 self.add_child(self.square) self.squares.append(self.square) vertical -= square_size vertical = top_left[1] horizontal += square_size - 0.05 longest_square = choice(self.squares) for square in self.squares: bg_target = choice(self.bg_list).position pos = square.position if square != longest_square: square.run_action(A.sequence(A.group(A.scale_to(0, 0), A.fade_to(0, 0), A.rotate_to(randrange(0, 6), 0), A.move_to(bg_target[0], bg_target[1], 0)), A.group(A.scale_to( 1, randrange(5, 10) * 0.1), A.fade_to(0.8, randrange(1, 10) * 0.1), A.rotate_to(0, randrange(1, 10) * 0.1), A.move_to(pos[0], pos[1], randrange(1, 10) * 0.1, TIMING_SINODIAL)))) elif square == longest_square: def now_can_play(): self.can_play = True self.can_restart = True self.timestamp = self.t square.run_action(A.sequence(A.group(A.scale_to(0, 0), A.fade_to(0, 0), A.rotate_to(randrange(0, 6), 0), A.move_to(bg_target[0], bg_target[1], 0)), A.group( A.scale_to(1, 1), A.fade_to(0.8, 1), A.rotate_to(0, 1), A.move_to(pos[0], pos[1], 1, TIMING_SINODIAL)), A.wait(0.2), A.call(now_can_play))) try: self.star_square.star_icon.run_action(A.sequence( A.group(A.scale_to(0.3, 1), A.fade_to(0, 1)), A.remove())) except: pass if randrange(1, 3) == 1: self.star_square = choice(self.squares) self.star_square.go_star() else: self.star_square = None self.punishment = False self.reward = False self.can_flip = False self.unlock = False def move_counters(self): black_list = [square for square in self.squares if square.state == 1] white_list = [square for square in self.squares if square.state == 2] self.black_count.text = str(len(black_list)) self.white_count.text = str(len(white_list)) wc = len(white_list) try: if self.star_square: if self.star_square in black_list: black_list.remove(self.star_square) elif self.star_square in white_list: white_list.remove(self.star_square) except: pass try: b = choice(black_list) self.black_count.position = b.position except: self.black_count.position = (-100, -100) try: w = choice(white_list) self.white_count.position = w.position except: self.white_count.position = (-100, -100) if not wc: self.no_whites = True @ui.in_background def save(self, number): result = check_final_score(username, number, difficulty) if result == "g": sleep(1) try: sound.play_effect(star_bonus_sound) dialogs.alert("New High Score!", "Well done " + username + "!\n\nYou are the new champion with a score of " + str(number), "OK", hide_cancel_button=True) except KeyboardInterrupt: pass elif result == "p": sleep(1) try: sound.play_effect(star_bonus_sound) dialogs.alert("New Personal Best!", "Well done " + username + "!\n\nYour top score is " + str(number), "OK", hide_cancel_button=True) except KeyboardInterrupt: pass @ui.in_background def display_scores(self): all_scores = get_all_scores(difficulty) if not all_scores: sound.play_effect(fail_sound) dialogs.hud_alert("Cannot get high scores", icon="error") self.highscore_button.alpha = 1 self.can_highscore = True return if self.can_play: can_play_marker = True self.can_play = False else: can_play_marker = False if not self.win: for square in self.squares: if square.state == 0: square.run_action(A.fade_to(0, 0.2)) paused_time = self.t sheet = ui.load_view("ui/highscores.pyui") score_table = sheet["score_table"] title_label = sheet["title_label"] title_label.text = "High Scores - " + ["Easy", "Regular", "Hard"][difficulty - 1] champion = sheet["champion"] champion_label = sheet["champion_label"] high_score_strings = ["{}{}{}".format(str(index + 1).ljust(4, ' '), str(item[0])[:20].ljust( 22, ' '), str(item[1]).rjust(5, ' ').ljust(8, ' ')) for index, item in enumerate(all_scores)] data_source = ui.ListDataSource(high_score_strings) data_source.font = ("Inconsolata-Regular", 18) score_table.data_source = data_source score_table.reload_data() score_table.separator_color = color4 champion.text = all_scores[0][0][:20] + " : " + str(all_scores[0][1]) score_table.bounces = False sheet.present(hide_title_bar=True) if can_play_marker: self.can_play = True self.can_restart = True self.can_highscore = True self.highscore_button.alpha = 1 self.timestamp += self.t - paused_time
MIT License
tianxiaohu/gomokuagent
genetic_algo/gomoku.py
Gomoku.delay
python
def delay(self, n): if not self.fastmode: time.sleep(n)
Delay n seconds if not in fastmode
https://github.com/tianxiaohu/gomokuagent/blob/8cb05025059945692846cbb0541a834e9f985ce2/genetic_algo/gomoku.py#L184-L187
from __future__ import print_function, division import os, sys, time, collections from functools import update_wrapper import pickle def decorator(d): def _d(fn): return update_wrapper(d(fn), fn) update_wrapper(_d, d) return _d @decorator def memo(f): cache = {} def _f(*args): try: return cache[args] except KeyError: cache[args] = result = f(*args) return result except TypeError: return f(args) _f.cache = cache return _f @memo def colored(s, color=''): if color.lower() == 'green': return '\033[92m' + s + '\033[0m' elif color.lower() == 'yellow': return '\033[93m' + s + '\033[0m' elif color.lower() == 'red': return '\033[91m' + s + '\033[0m' elif color.lower() == 'blue': return '\033[94m' + s + '\033[0m' elif color.lower() == 'bold': return '\033[1m' + s + '\033[0m' else: return s class Gomoku(object): def __init__(self, board_size=15, players=None, fastmode=False, first_center=None, silent_mode=False, winning_num=5): self.reset() self.board_size = board_size self.fastmode = fastmode self.playing = None self.players = [Player(player_name) for player_name in players] self.winning_stones = set() self.last_move = None self.first_center = first_center self.silent_mode = silent_mode self.winning_num = winning_num @property def state(self): return (self.board, self.last_move, self.playing, self.board_size) def load_state(self, state): (self.board, self.last_move, self.playing, self.board_size) = state def reset(self): self.board = (set(), set()) def print_board(self): print(' '*4 + ' '.join([chr(97+i) for i in range(self.board_size)])) print(' '*3 + '='*(2*self.board_size)) for x in range(1, self.board_size+1): row = ['%2s|'%x] for y in range(1, self.board_size+1): if (x,y) in self.board[0]: c = 'x' elif (x,y) in self.board[1]: c = 'o' else: c = '-' if (x,y) in self.winning_stones or (x,y) == self.last_move: c = colored(c, 'green') row.append(c) print(' '.join(row)) def play(self): if self.fastmode < 2: print("Game Start!") i_turn = len(self.board[0]) + len(self.board[1]) new_step = None while True: if self.fastmode < 2: print("----- Turn %d -------" % i_turn) self.playing = i_turn % 2 if self.fastmode < 2 and not self.silent_mode: self.print_board() current_player = self.players[self.playing] other_player = self.players[int(not self.playing)] if self.fastmode < 2: print("--- %s's turn ---" % current_player.name) max_try = 5 for i_try in range(max_try): action = current_player.strategy(self.state) if action == (0, 0): print("Player %s admit defeat!" % current_player.name) winner = other_player.name if self.fastmode < 2: print("Winner is %s"%winner) return winner self.last_move = action if self.place_stone() is True: break if i_try == max_try-1: print("Player %s has made %d illegal moves, he lost."%(current_player.name, max_try)) winner = other_player.name print("Winner is %s"%winner) return winner winner = self.check_winner() if winner: if not self.silent_mode: self.print_board() print("########## %s is the WINNER! #########" % current_player.name) return winner elif i_turn == self.board_size ** 2 - 1: print("This game is a tie!") return "Tie" i_turn += 1 def place_stone(self): r, c = self.last_move if r < 1 or r > self.board_size or c < 1 or c > self.board_size: print("This position is outside the board!") return False taken_pos = self.board[0] | self.board[1] if self.first_center is True and len(taken_pos) == 0: center = int((self.board_size+1)/2) if r != center or c != center: print("This is the first move, please put it on the center (%s%s)!"% (str(center),chr(center+96))) return False elif self.last_move in taken_pos: print("This position is already taken!") return False self.board[self.playing].add(self.last_move) return True def check_winner(self): r, c = self.last_move my_stones = self.board[self.playing] nearby_stones = set() for x in range(max(r-1, 1), min(r+2, self.board_size+1)): for y in range(max(c-1, 1), min(c+2, self.board_size+1)): stone = (x,y) if stone in my_stones and (2*r-x, 2*c-y) not in nearby_stones: nearby_stones.add(stone) for nearby_s in nearby_stones: winning_stones = {self.last_move, nearby_s} nr, nc = nearby_s dx, dy = nr-r, nc-c for i in range(1,4): ext_stone = (nr+dx*i, nc+dy*i) if ext_stone in my_stones: winning_stones.add(ext_stone) else: break for i in range(1,5): ext_stone = (r-dx*i, c-dy*i) if ext_stone in my_stones: winning_stones.add(ext_stone) else: break if len(winning_stones) >= self.winning_num: self.winning_stones = winning_stones return self.players[self.playing].name return None
MIT License
cta-observatory/ctapipe
ctapipe/tools/utils.py
get_installed_tools
python
def get_installed_tools(): from pkg_resources import get_entry_map console_tools = get_entry_map("ctapipe")["console_scripts"] return console_tools
Get list of installed scripts via ``pkg-resources``. See http://peak.telecommunity.com/DevCenter/PkgResources#convenience-api TODO: not sure if this will be useful ... maybe to check if the list of installed packages matches the available scripts somehow?
https://github.com/cta-observatory/ctapipe/blob/8851e1214409eac4564996cc0f4b76dfe05cf9cf/ctapipe/tools/utils.py#L33-L44
import argparse import importlib from collections import OrderedDict __all__ = [ "ArgparseFormatter", "get_parser", "get_installed_tools", "get_all_descriptions", ] class ArgparseFormatter( argparse.ArgumentDefaultsHelpFormatter, argparse.RawTextHelpFormatter ): pass def get_parser(function=None, description="N/A"): if function: description = function.__doc__ parser = argparse.ArgumentParser( description=description, formatter_class=ArgparseFormatter ) return parser
BSD 3-Clause New or Revised License
wapm-packages/python
Python-3.6.7/Lib/multiprocessing/semaphore_tracker.py
SemaphoreTracker.ensure_running
python
def ensure_running(self): with self._lock: if self._pid is not None: pid, status = os.waitpid(self._pid, os.WNOHANG) if not pid: return os.close(self._fd) self._fd = None self._pid = None warnings.warn('semaphore_tracker: process died unexpectedly, ' 'relaunching. Some semaphores might leak.') fds_to_pass = [] try: fds_to_pass.append(sys.stderr.fileno()) except Exception: pass cmd = 'from multiprocessing.semaphore_tracker import main;main(%d)' r, w = os.pipe() try: fds_to_pass.append(r) exe = spawn.get_executable() args = [exe] + util._args_from_interpreter_flags() args += ['-c', cmd % r] pid = util.spawnv_passfds(exe, args, fds_to_pass) except: os.close(w) raise else: self._fd = w self._pid = pid finally: os.close(r)
Make sure that semaphore tracker process is running. This can be run from any process. Usually a child process will use the semaphore created by its parent.
https://github.com/wapm-packages/python/blob/658c1822f430f6d604ecf2bcc388e469cedb2238/Python-3.6.7/Lib/multiprocessing/semaphore_tracker.py#L38-L79
import os import signal import sys import threading import warnings import _multiprocessing from . import spawn from . import util __all__ = ['ensure_running', 'register', 'unregister'] class SemaphoreTracker(object): def __init__(self): self._lock = threading.Lock() self._fd = None self._pid = None def getfd(self): self.ensure_running() return self._fd
Apache License 2.0
informatica-eic/rest-api-samples
python/edcutils.py
updateResourceDefUsingSession
python
def updateResourceDefUsingSession(url, session, resourceName, resJson): print("\tupdating resource for catalog:-" + url + " resource=" + resourceName) apiURL = url + "/access/1/catalog/resources/" + resourceName header = {"Accept": "application/json", "Content-Type": "application/json"} tResp = session.put( apiURL, data=json.dumps(resJson), headers=header, ) if tResp.status_code == 200: print(f"\tresource successfully updated, rc={tResp.status_code}") return tResp.status_code else: print("\tupdate resource failed... rc={tResp.status_code}") return tResp.status_code
update a setting in an existing resource returns rc=200 (valid) & other rc's from the put resourceDef (json)
https://github.com/informatica-eic/rest-api-samples/blob/bad412404a2807e3e5d3a1e9f09a292d2fccf76e/python/edcutils.py#L185-L214
import requests import json from requests.auth import HTTPBasicAuth import os def getFactValue(item, attrName): value = "" for facts in item["facts"]: if facts.get("attributeId") == attrName: value = facts.get("value") break return value def exportLineageLink(fromObject, toObject, linkType, csvFile): row = [linkType, "", "", fromObject, toObject] csvFile.writerow(row) return def getAllResource(url, user, pWd): print("getting resource for catalog:-" + url + " user=" + user) apiURL = url + "/access/1/catalog/resources/" header = {"Accept": "application/json"} tResp = requests.get( apiURL, params={}, headers=header, auth=HTTPBasicAuth(user, pWd), verify=False ) print("\tresponse=" + str(tResp.status_code)) if tResp.status_code == 200: return tResp.status_code, json.loads(tResp.text) else: return tResp.status_code, None def getResourceDefUsingSession(url, session, resourceName, sensitiveOptions=False): print("getting resource for catalog:-" + url + " resource=" + resourceName) apiURL = url + "/access/1/catalog/resources/" + resourceName if sensitiveOptions: apiURL += "?sensitiveOptions=true" header = {"Accept": "application/json"} tResp = session.get( apiURL, params={}, headers=header, ) print("\tresponse=" + str(tResp.status_code)) if tResp.status_code == 200: return tResp.status_code, json.loads(tResp.text) else: return tResp.status_code, None def getResourceDef(url, user, pWd, resourceName, sensitiveOptions=False): print( "getting resource for catalog:-" + url + " resource=" + resourceName + " user=" + user ) apiURL = url + "/access/1/catalog/resources/" + resourceName if sensitiveOptions: apiURL += "?sensitiveOptions=true" header = {"Accept": "application/json"} tResp = requests.get( apiURL, params={}, headers=header, auth=HTTPBasicAuth(user, pWd), verify=False ) print("\tresponse=" + str(tResp.status_code)) if tResp.status_code == 200: return tResp.status_code, json.loads(tResp.text) else: return tResp.status_code, None def updateResourceDef(url, user, pWd, resourceName, resJson): print( "\tupdating resource for catalog:-" + url + " resource=" + resourceName + " user=" + user ) print("\t" + json.dumps(resJson)) apiURL = url + "/access/1/catalog/resources/" + resourceName print("\turl=" + apiURL) header = {"Accept": "application/json", "Content-Type": "application/json"} tResp = requests.put( apiURL, data=json.dumps(resJson), headers=header, auth=HTTPBasicAuth(user, pWd), verify=False, ) print("\tresponse=" + str(tResp.status_code)) if tResp.status_code == 200: print("\tyay - update resource worked...") print(tResp) return tResp.status_code else: print("\tdarn - update resource failed...") print(tResp) return tResp.status_code
MIT License
dmlc/gluon-cv
gluoncv/model_zoo/action_recognition/slowfast.py
SlowFast._make_layer_slow
python
def _make_layer_slow(self, inplanes, planes, num_blocks, num_block_temp_kernel_slow=None, block=Bottleneck, strides=1, head_conv=1, norm_layer=BatchNorm, norm_kwargs=None, layer_name=''): downsample = None if strides != 1 or inplanes != planes * block.expansion: downsample = nn.HybridSequential(prefix=layer_name+'downsample_') with downsample.name_scope(): downsample.add(nn.Conv3D(in_channels=inplanes, channels=planes * block.expansion, kernel_size=1, strides=(1, strides, strides), use_bias=False)) downsample.add(norm_layer(in_channels=planes * block.expansion, **({} if norm_kwargs is None else norm_kwargs))) layers = nn.HybridSequential(prefix=layer_name) cnt = 0 with layers.name_scope(): layers.add(block(inplanes=inplanes, planes=planes, strides=strides, downsample=downsample, head_conv=head_conv, layer_name='block%d_' % cnt)) inplanes = planes * block.expansion cnt += 1 for _ in range(1, num_blocks): if num_block_temp_kernel_slow is not None: if cnt < num_block_temp_kernel_slow: layers.add(block(inplanes=inplanes, planes=planes, head_conv=head_conv, layer_name='block%d_' % cnt)) else: layers.add(block(inplanes=inplanes, planes=planes, head_conv=1, layer_name='block%d_' % cnt)) else: layers.add(block(inplanes=inplanes, planes=planes, head_conv=head_conv, layer_name='block%d_' % cnt)) cnt += 1 return layers
Build each stage of within the slow branch.
https://github.com/dmlc/gluon-cv/blob/f22650a5d31c31956d9392530a0e619689cdb3c5/gluoncv/model_zoo/action_recognition/slowfast.py#L481-L533
__all__ = ['SlowFast', 'slowfast_4x16_resnet50_kinetics400', 'slowfast_8x8_resnet50_kinetics400', 'slowfast_4x16_resnet101_kinetics400', 'slowfast_8x8_resnet101_kinetics400', 'slowfast_16x8_resnet101_kinetics400', 'slowfast_16x8_resnet101_50_50_kinetics400', 'slowfast_4x16_resnet50_custom'] from mxnet import init from mxnet.context import cpu from mxnet.gluon.block import HybridBlock from mxnet.gluon import nn from mxnet.gluon.nn import BatchNorm class Bottleneck(HybridBlock): expansion = 4 def __init__(self, inplanes, planes, strides=1, downsample=None, head_conv=1, norm_layer=BatchNorm, norm_kwargs=None, layer_name=''): super(Bottleneck, self).__init__() bottleneck = nn.HybridSequential(prefix=layer_name) with bottleneck.name_scope(): if head_conv == 1: self.conv1 = nn.Conv3D(in_channels=inplanes, channels=planes, kernel_size=1, use_bias=False) self.bn1 = norm_layer(in_channels=planes, **({} if norm_kwargs is None else norm_kwargs)) elif head_conv == 3: self.conv1 = nn.Conv3D(in_channels=inplanes, channels=planes, kernel_size=(3, 1, 1), padding=(1, 0, 0), use_bias=False) self.bn1 = norm_layer(in_channels=planes, **({} if norm_kwargs is None else norm_kwargs)) else: raise ValueError("Unsupported head_conv!") self.conv2 = nn.Conv3D(in_channels=planes, channels=planes, kernel_size=(1, 3, 3), strides=(1, strides, strides), padding=(0, 1, 1), use_bias=False) self.bn2 = norm_layer(in_channels=planes, **({} if norm_kwargs is None else norm_kwargs)) self.conv3 = nn.Conv3D(in_channels=planes, channels=planes * self.expansion, kernel_size=1, strides=1, use_bias=False) self.bn3 = norm_layer(in_channels=planes * self.expansion, gamma_initializer='zeros', **({} if norm_kwargs is None else norm_kwargs)) self.relu = nn.Activation('relu') self.downsample = downsample def hybrid_forward(self, F, x): identity = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) out = self.conv3(out) out = self.bn3(out) if self.downsample is not None: identity = self.downsample(x) out = F.Activation(out + identity, act_type='relu') return out class SlowFast(HybridBlock): def __init__(self, nclass, block=Bottleneck, layers=None, num_block_temp_kernel_fast=None, num_block_temp_kernel_slow=None, pretrained=False, pretrained_base=False, feat_ext=False, num_segments=1, num_crop=1, bn_eval=True, bn_frozen=False, partial_bn=False, frozen_stages=-1, dropout_ratio=0.5, init_std=0.01, alpha=8, beta_inv=8, fusion_conv_channel_ratio=2, fusion_kernel_size=5, width_per_group=64, num_groups=1, slow_temporal_stride=16, fast_temporal_stride=2, slow_frames=4, fast_frames=32, norm_layer=BatchNorm, norm_kwargs=None, ctx=None, **kwargs): super(SlowFast, self).__init__() self.num_segments = num_segments self.num_crop = num_crop self.dropout_ratio = dropout_ratio self.init_std = init_std self.alpha = alpha self.beta_inv = beta_inv self.fusion_conv_channel_ratio = fusion_conv_channel_ratio self.fusion_kernel_size = fusion_kernel_size self.width_per_group = width_per_group self.num_groups = num_groups self.dim_inner = self.num_groups * self.width_per_group self.out_dim_ratio = self.beta_inv // self.fusion_conv_channel_ratio self.slow_temporal_stride = slow_temporal_stride self.fast_temporal_stride = fast_temporal_stride self.slow_frames = slow_frames self.fast_frames = fast_frames self.feat_ext = feat_ext with self.name_scope(): fast = nn.HybridSequential(prefix='fast_') with fast.name_scope(): self.fast_conv1 = nn.Conv3D(in_channels=3, channels=self.width_per_group // self.beta_inv, kernel_size=(5, 7, 7), strides=(1, 2, 2), padding=(2, 3, 3), use_bias=False) self.fast_bn1 = norm_layer(in_channels=self.width_per_group // self.beta_inv, **({} if norm_kwargs is None else norm_kwargs)) self.fast_relu = nn.Activation('relu') self.fast_maxpool = nn.MaxPool3D(pool_size=(1, 3, 3), strides=(1, 2, 2), padding=(0, 1, 1)) self.fast_res2 = self._make_layer_fast(inplanes=self.width_per_group // self.beta_inv, planes=self.dim_inner // self.beta_inv, num_blocks=layers[0], head_conv=3, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='fast_res2_') self.fast_res3 = self._make_layer_fast(inplanes=self.width_per_group * 4 // self.beta_inv, planes=self.dim_inner * 2 // self.beta_inv, num_blocks=layers[1], strides=2, head_conv=3, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='fast_res3_') self.fast_res4 = self._make_layer_fast(inplanes=self.width_per_group * 8 // self.beta_inv, planes=self.dim_inner * 4 // self.beta_inv, num_blocks=layers[2], num_block_temp_kernel_fast=num_block_temp_kernel_fast, strides=2, head_conv=3, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='fast_res4_') self.fast_res5 = self._make_layer_fast(inplanes=self.width_per_group * 16 // self.beta_inv, planes=self.dim_inner * 8 // self.beta_inv, num_blocks=layers[3], strides=2, head_conv=3, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='fast_res5_') self.lateral_p1 = nn.HybridSequential(prefix='lateral_p1_') with self.lateral_p1.name_scope(): self.lateral_p1.add(nn.Conv3D(in_channels=self.width_per_group // self.beta_inv, channels=self.width_per_group // self.beta_inv * self.fusion_conv_channel_ratio, kernel_size=(self.fusion_kernel_size, 1, 1), strides=(self.alpha, 1, 1), padding=(self.fusion_kernel_size // 2, 0, 0), use_bias=False)) self.lateral_p1.add(norm_layer(in_channels=self.width_per_group // self.beta_inv * self.fusion_conv_channel_ratio, **({} if norm_kwargs is None else norm_kwargs))) self.lateral_p1.add(nn.Activation('relu')) self.lateral_res2 = nn.HybridSequential(prefix='lateral_res2_') with self.lateral_res2.name_scope(): self.lateral_res2.add(nn.Conv3D(in_channels=self.width_per_group * 4 // self.beta_inv, channels=self.width_per_group * 4 // self.beta_inv * self.fusion_conv_channel_ratio, kernel_size=(self.fusion_kernel_size, 1, 1), strides=(self.alpha, 1, 1), padding=(self.fusion_kernel_size // 2, 0, 0), use_bias=False)) self.lateral_res2.add(norm_layer(in_channels=self.width_per_group * 4 // self.beta_inv * self.fusion_conv_channel_ratio, **({} if norm_kwargs is None else norm_kwargs))) self.lateral_res2.add(nn.Activation('relu')) self.lateral_res3 = nn.HybridSequential(prefix='lateral_res3_') with self.lateral_res3.name_scope(): self.lateral_res3.add(nn.Conv3D(in_channels=self.width_per_group * 8 // self.beta_inv, channels=self.width_per_group * 8 // self.beta_inv * self.fusion_conv_channel_ratio, kernel_size=(self.fusion_kernel_size, 1, 1), strides=(self.alpha, 1, 1), padding=(self.fusion_kernel_size // 2, 0, 0), use_bias=False)) self.lateral_res3.add(norm_layer(in_channels=self.width_per_group * 8 // self.beta_inv * self.fusion_conv_channel_ratio, **({} if norm_kwargs is None else norm_kwargs))) self.lateral_res3.add(nn.Activation('relu')) self.lateral_res4 = nn.HybridSequential(prefix='lateral_res4_') with self.lateral_res4.name_scope(): self.lateral_res4.add(nn.Conv3D(in_channels=self.width_per_group * 16 // self.beta_inv, channels=self.width_per_group * 16 // self.beta_inv * self.fusion_conv_channel_ratio, kernel_size=(self.fusion_kernel_size, 1, 1), strides=(self.alpha, 1, 1), padding=(self.fusion_kernel_size // 2, 0, 0), use_bias=False)) self.lateral_res4.add(norm_layer(in_channels=self.width_per_group * 16 // self.beta_inv * self.fusion_conv_channel_ratio, **({} if norm_kwargs is None else norm_kwargs))) self.lateral_res4.add(nn.Activation('relu')) slow = nn.HybridSequential(prefix='slow_') with slow.name_scope(): self.slow_conv1 = nn.Conv3D(in_channels=3, channels=self.width_per_group, kernel_size=(1, 7, 7), strides=(1, 2, 2), padding=(0, 3, 3), use_bias=False) self.slow_bn1 = norm_layer(in_channels=self.width_per_group, **({} if norm_kwargs is None else norm_kwargs)) self.slow_relu = nn.Activation('relu') self.slow_maxpool = nn.MaxPool3D(pool_size=(1, 3, 3), strides=(1, 2, 2), padding=(0, 1, 1)) self.slow_res2 = self._make_layer_slow(inplanes=self.width_per_group + self.width_per_group // self.out_dim_ratio, planes=self.dim_inner, num_blocks=layers[0], head_conv=1, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='slow_res2_') self.slow_res3 = self._make_layer_slow(inplanes=self.width_per_group * 4 + self.width_per_group * 4 // self.out_dim_ratio, planes=self.dim_inner * 2, num_blocks=layers[1], strides=2, head_conv=1, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='slow_res3_') self.slow_res4 = self._make_layer_slow(inplanes=self.width_per_group * 8 + self.width_per_group * 8 // self.out_dim_ratio, planes=self.dim_inner * 4, num_blocks=layers[2], num_block_temp_kernel_slow=num_block_temp_kernel_slow, strides=2, head_conv=3, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='slow_res4_') self.slow_res5 = self._make_layer_slow(inplanes=self.width_per_group * 16 + self.width_per_group * 16 // self.out_dim_ratio, planes=self.dim_inner * 8, num_blocks=layers[3], strides=2, head_conv=3, norm_layer=norm_layer, norm_kwargs=norm_kwargs, layer_name='slow_res5_') self.avg = nn.GlobalAvgPool3D() self.dp = nn.Dropout(rate=self.dropout_ratio) self.feat_dim = self.width_per_group * 32 // self.beta_inv + self.width_per_group * 32 self.fc = nn.Dense(in_units=self.feat_dim, units=nclass, weight_initializer=init.Normal(sigma=self.init_std), use_bias=True) self.initialize(init.MSRAPrelu(), ctx=ctx) def hybrid_forward(self, F, x): fast_input = F.slice(x, begin=(None, None, 0, None, None), end=(None, None, self.fast_frames, None, None)) slow_input = F.slice(x, begin=(None, None, self.fast_frames, None, None), end=(None, None, self.fast_frames + self.slow_frames, None, None)) fast, lateral = self.FastPath(F, fast_input) slow = self.SlowPath(F, slow_input, lateral) x = F.concat(slow, fast, dim=1) x = F.reshape(x, shape=(-1, self.num_segments * self.num_crop, self.feat_dim)) x = F.mean(x, axis=1) if self.feat_ext: return x x = self.dp(x) x = self.fc(x) return x def SlowPath(self, F, x, lateral): x = self.slow_conv1(x) x = self.slow_bn1(x) x = self.slow_relu(x) pool1 = self.slow_maxpool(x) pool1_lat = F.concat(pool1, lateral[0], dim=1) res2 = self.slow_res2(pool1_lat) res2_lat = F.concat(res2, lateral[1], dim=1) res3 = self.slow_res3(res2_lat) res3_lat = F.concat(res3, lateral[2], dim=1) res4 = self.slow_res4(res3_lat) res4_lat = F.concat(res4, lateral[3], dim=1) res5 = self.slow_res5(res4_lat) out = self.avg(res5) out = F.squeeze(out, axis=(2, 3, 4)) return out def FastPath(self, F, x): lateral = [] x = self.fast_conv1(x) x = self.fast_bn1(x) x = self.fast_relu(x) pool1 = self.fast_maxpool(x) lateral_p = self.lateral_p1(pool1) lateral.append(lateral_p) res2 = self.fast_res2(pool1) lateral_res2 = self.lateral_res2(res2) lateral.append(lateral_res2) res3 = self.fast_res3(res2) lateral_res3 = self.lateral_res3(res3) lateral.append(lateral_res3) res4 = self.fast_res4(res3) lateral_res4 = self.lateral_res4(res4) lateral.append(lateral_res4) res5 = self.fast_res5(res4) out = self.avg(res5) out = F.squeeze(out, axis=(2, 3, 4)) return out, lateral def _make_layer_fast(self, inplanes, planes, num_blocks, num_block_temp_kernel_fast=None, block=Bottleneck, strides=1, head_conv=1, norm_layer=BatchNorm, norm_kwargs=None, layer_name=''): downsample = None if strides != 1 or inplanes != planes * block.expansion: downsample = nn.HybridSequential(prefix=layer_name+'downsample_') with downsample.name_scope(): downsample.add(nn.Conv3D(in_channels=inplanes, channels=planes * block.expansion, kernel_size=1, strides=(1, strides, strides), use_bias=False)) downsample.add(norm_layer(in_channels=planes * block.expansion, **({} if norm_kwargs is None else norm_kwargs))) layers = nn.HybridSequential(prefix=layer_name) cnt = 0 with layers.name_scope(): layers.add(block(inplanes=inplanes, planes=planes, strides=strides, downsample=downsample, head_conv=head_conv, layer_name='block%d_' % cnt)) inplanes = planes * block.expansion cnt += 1 for _ in range(1, num_blocks): if num_block_temp_kernel_fast is not None: if cnt < num_block_temp_kernel_fast: layers.add(block(inplanes=inplanes, planes=planes, head_conv=head_conv, layer_name='block%d_' % cnt)) else: layers.add(block(inplanes=inplanes, planes=planes, head_conv=1, layer_name='block%d_' % cnt)) else: layers.add(block(inplanes=inplanes, planes=planes, head_conv=head_conv, layer_name='block%d_' % cnt)) cnt += 1 return layers
Apache License 2.0
gavento/gamegym
gamegym/games/goofspiel.py
Goofspiel.initial_state
python
def initial_state(self) -> StateInfo: cset = list(range(1, self.cards + 1)) state = ([tuple(cset)] * 3, (0.0, 0.0)) return StateInfo.new_chance(state, tuple(cset), None)
Return the initial internal state and active player.
https://github.com/gavento/gamegym/blob/818c89b43b7e49a377698387e4098f86640c13b0/gamegym/games/goofspiel.py#L29-L35
from ..game import ObservationSequenceGame, Action from ..situation import Situation, StateInfo from ..utils import uniform from typing import Any, Tuple import enum import numpy as np class Goofspiel(ObservationSequenceGame): EPS = 1e-6 class Scoring(enum.Enum): WINLOSS = 0 ZEROSUM = 1 ABSOLUTE = 2 def __init__(self, cards: int, scoring=None, rewards=None): assert cards >= 1 super().__init__(2, range(1, cards + 1)) self.cards = cards self.custom_rewards = rewards is not None if rewards is None: rewards = range(1, self.cards + 1) self.rewards = np.array(rewards, dtype=float) assert len(self.rewards) == self.cards self.scoring = self.Scoring.ZEROSUM if scoring is None else scoring
MIT License
eleurent/rl-agents
rl_agents/agents/tree_search/graph_based_stochastic.py
GraphDecisionNode.__init__
python
def __init__(self, planner, state, observation): super().__init__(planner, state, observation) self.count = 0 self.cumulative_reward = 0 self.mu_ucb = 1 self.mu_lcb = 0
Visit count N(s) (when in planner.nodes) or N(s,a,s') (when child of a chance node)
https://github.com/eleurent/rl-agents/blob/d55126007fcc5d2a882843f6c3a63000b4fc7c92/rl_agents/agents/tree_search/graph_based_stochastic.py#L31-L40
import operator from collections import OrderedDict, defaultdict import numpy as np import logging from rl_agents.agents.common.factory import safe_deepcopy_env from rl_agents.agents.dynamic_programming.value_iteration import ValueIterationAgent from rl_agents.agents.tree_search.graph_based import GraphBasedPlannerAgent, GraphNode, GraphBasedPlanner from rl_agents.agents.tree_search.olop import OLOP from rl_agents.utils import kl_upper_bound, max_expectation_under_constraint logger = logging.getLogger(__name__) class GraphDecisionNode(GraphNode):
MIT License
aws/aws-parallelcluster-node
src/common/schedulers/slurm_commands.py
is_static_node
python
def is_static_node(nodename): _, node_type, _ = parse_nodename(nodename) return "st" == node_type
Check if the node is static or dynamic. Valid NodeName format: {queue_name}-{st/dy}-{instancetype}-{number}
https://github.com/aws/aws-parallelcluster-node/blob/00cee467f983b18075752ed3fbc3bfe52420147c/src/common/schedulers/slurm_commands.py#L62-L69
import logging import re from common.utils import check_command_output, grouper, run_command from retrying import retry from slurm_plugin.slurm_resources import ( DynamicNode, InvalidNodenameError, PartitionStatus, SlurmPartition, StaticNode, parse_nodename, ) log = logging.getLogger(__name__) PENDING_RESOURCES_REASONS = [ "Resources", "Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions", "BeginTime", "NodeDown", "Priority", "ReqNodeNotAvail, May be reserved for other job", ] SQUEUE_FIELD_SIZE = 200 _SQUEUE_FIELDS = [ "jobid", "statecompact", "numnodes", "numcpus", "numtasks", "cpus-per-task", "mincpus", "reason", "tres-per-job", "tres-per-task", "tres-per-node", "cpus-per-tres", ] SQUEUE_FIELD_STRING = ",".join([field + ":{size}" for field in _SQUEUE_FIELDS]).format(size=SQUEUE_FIELD_SIZE) SCONTROL = "sudo /opt/slurm/bin/scontrol" SINFO = "/opt/slurm/bin/sinfo" DEFAULT_GET_INFO_COMMAND_TIMEOUT = 30 DEFAULT_UPDATE_COMMAND_TIMEOUT = 60
Apache License 2.0
econ-ark/hark
HARK/interpolation.py
HARKinterpolator1D._evaluate
python
def _evaluate(self, x): raise NotImplementedError()
Interpolated function evaluator, to be defined in subclasses.
https://github.com/econ-ark/hark/blob/d59269d4ca41afd5e2c3bf8c50ebf99b2073b998/HARK/interpolation.py#L127-L131
import warnings from copy import deepcopy import numpy as np from scipy.interpolate import CubicHermiteSpline from HARK.utilities import CRRAutility, CRRAutilityP, CRRAutilityPP from .core import MetricObject def _isscalar(x): return np.isscalar(x) or hasattr(x, "shape") and x.shape == () def _check_grid_dimensions(dimension, *args): if dimension == 1: if len(args[0]) != len(args[1]): raise ValueError("Grid dimensions of x and f(x) do not match") elif dimension == 2: if args[0].shape != (args[1].size, args[2].size): raise ValueError("Grid dimensions of x, y and f(x, y) do not match") elif dimension == 3: if args[0].shape != (args[1].size, args[2].size, args[3].size): raise ValueError("Grid dimensions of x, y, z and f(x, y, z) do not match") elif dimension == 4: if args[0].shape != (args[1].size, args[2].size, args[3].size, args[4].size): raise ValueError("Grid dimensions of x, y, z and f(x, y, z) do not match") else: raise ValueError("Dimension should be between 1 and 4 inclusive.") def _check_flatten(dimension, *args): if dimension == 1: if isinstance(args[0], np.ndarray) and args[0].shape != args[0].flatten().shape: warnings.warn("input not of the size (n, ), attempting to flatten") return False else: return True class HARKinterpolator1D(MetricObject): distance_criteria = [] def __call__(self, x): z = np.asarray(x) return (self._evaluate(z.flatten())).reshape(z.shape) def derivative(self, x): z = np.asarray(x) return (self._der(z.flatten())).reshape(z.shape) def eval_with_derivative(self, x): z = np.asarray(x) y, dydx = self._evalAndDer(z.flatten()) return y.reshape(z.shape), dydx.reshape(z.shape)
Apache License 2.0
sofia-netsurv/python-netsurv
env/lib/python3.5/site-packages/_pytest/nodes.py
Collector.repr_failure
python
def repr_failure(self, excinfo): if excinfo.errisinstance(self.CollectError): exc = excinfo.value return str(exc.args[0]) tbstyle = self.config.getoption("tbstyle", "auto") if tbstyle == "auto": tbstyle = "short" return self._repr_failure_py(excinfo, style=tbstyle)
represent a collection failure.
https://github.com/sofia-netsurv/python-netsurv/blob/429fb07a2b06cc505fdd9350148266a6b4e23e64/env/lib/python3.5/site-packages/_pytest/nodes.py#L318-L330
import os import warnings import py import _pytest._code from _pytest.compat import getfslineno from _pytest.mark.structures import NodeKeywords from _pytest.outcomes import fail SEP = "/" tracebackcutdir = py.path.local(_pytest.__file__).dirpath() def _splitnode(nodeid): if nodeid == "": return [] parts = nodeid.split(SEP) parts[-1:] = parts[-1].split("::") return parts def ischildnode(baseid, nodeid): base_parts = _splitnode(baseid) node_parts = _splitnode(nodeid) if len(node_parts) < len(base_parts): return False return node_parts[: len(base_parts)] == base_parts class Node: def __init__( self, name, parent=None, config=None, session=None, fspath=None, nodeid=None ): self.name = name self.parent = parent self.config = config or parent.config self.session = session or parent.session self.fspath = fspath or getattr(parent, "fspath", None) self.keywords = NodeKeywords(self) self.own_markers = [] self.extra_keyword_matches = set() self._name2pseudofixturedef = {} if nodeid is not None: assert "::()" not in nodeid self._nodeid = nodeid else: self._nodeid = self.parent.nodeid if self.name != "()": self._nodeid += "::" + self.name @property def ihook(self): return self.session.gethookproxy(self.fspath) def __repr__(self): return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None)) def warn(self, warning): from _pytest.warning_types import PytestWarning if not isinstance(warning, PytestWarning): raise ValueError( "warning must be an instance of PytestWarning or subclass, got {!r}".format( warning ) ) path, lineno = get_fslocation_from_item(self) warnings.warn_explicit( warning, category=None, filename=str(path), lineno=lineno + 1 if lineno is not None else None, ) @property def nodeid(self): return self._nodeid def __hash__(self): return hash(self.nodeid) def setup(self): pass def teardown(self): pass def listchain(self): chain = [] item = self while item is not None: chain.append(item) item = item.parent chain.reverse() return chain def add_marker(self, marker, append=True): from _pytest.mark import MarkDecorator, MARK_GEN if isinstance(marker, str): marker = getattr(MARK_GEN, marker) elif not isinstance(marker, MarkDecorator): raise ValueError("is not a string or pytest.mark.* Marker") self.keywords[marker.name] = marker if append: self.own_markers.append(marker.mark) else: self.own_markers.insert(0, marker.mark) def iter_markers(self, name=None): return (x[1] for x in self.iter_markers_with_node(name=name)) def iter_markers_with_node(self, name=None): for node in reversed(self.listchain()): for mark in node.own_markers: if name is None or getattr(mark, "name", None) == name: yield node, mark def get_closest_marker(self, name, default=None): return next(self.iter_markers(name=name), default) def listextrakeywords(self): extra_keywords = set() for item in self.listchain(): extra_keywords.update(item.extra_keyword_matches) return extra_keywords def listnames(self): return [x.name for x in self.listchain()] def addfinalizer(self, fin): self.session._setupstate.addfinalizer(fin, self) def getparent(self, cls): current = self while current and not isinstance(current, cls): current = current.parent return current def _prunetraceback(self, excinfo): pass def _repr_failure_py(self, excinfo, style=None): if excinfo.errisinstance(fail.Exception): if not excinfo.value.pytrace: return str(excinfo.value) fm = self.session._fixturemanager if excinfo.errisinstance(fm.FixtureLookupError): return excinfo.value.formatrepr() tbfilter = True if self.config.getoption("fulltrace", False): style = "long" else: tb = _pytest._code.Traceback([excinfo.traceback[-1]]) self._prunetraceback(excinfo) if len(excinfo.traceback) == 0: excinfo.traceback = tb tbfilter = False if style == "auto": style = "long" if style is None: if self.config.getoption("tbstyle", "auto") == "short": style = "short" else: style = "long" if self.config.getoption("verbose", 0) > 1: truncate_locals = False else: truncate_locals = True try: os.getcwd() abspath = False except OSError: abspath = True return excinfo.getrepr( funcargs=True, abspath=abspath, showlocals=self.config.getoption("showlocals", False), style=style, tbfilter=tbfilter, truncate_locals=truncate_locals, ) repr_failure = _repr_failure_py def get_fslocation_from_item(item): result = getattr(item, "location", None) if result is not None: return result[:2] obj = getattr(item, "obj", None) if obj is not None: return getfslineno(obj) return getattr(item, "fspath", "unknown location"), -1 class Collector(Node): class CollectError(Exception): def collect(self): raise NotImplementedError("abstract")
MIT License
sunspec/pysunspec
sunspec/core/device.py
Device.to_pics
python
def to_pics(self, parent, single_repeating=True): attr = {pics.PICS_ATTR_VERSION: str(pics.PICS_VERSION)} e = ET.SubElement(parent, pics.PICS_DEVICE, attrib=attr) for model in self.models_list: model.to_pics(e, single_repeating=single_repeating)
Adds the device and all elements within the device to the parent element. If *single_repeating* is True, only the first repeating block for each model is added to the document. Parameters: parent : Element Tree element on which to place the device element. single_repeating : Flag to indicate whether to include a single or all repeating blocks within each model in the PICS document.
https://github.com/sunspec/pysunspec/blob/119b17600c8f20243dfdb694aacdaffefec2521b/sunspec/core/device.py#L216-L236
import contextlib import os import math try: import xml.etree.ElementTree as ET except: import elementtree.ElementTree as ET import sunspec.core.pics as pics import sunspec.core.smdx as smdx import sunspec.core.suns as suns import sunspec.core.util as util from sunspec.core.util import SunSpecError fspath = getattr(os, 'fspath', str) file_pathlist = None @contextlib.contextmanager def fresh_file_pathlist(*paths): global file_pathlist original_pathlist = file_pathlist file_pathlist = util.PathList() for path in paths: file_pathlist.add(fspath(path)) try: yield file_pathlist finally: file_pathlist = original_pathlist MAX_READ_COUNT = 125 class Device(object): def __init__(self, addr=suns.SUNS_BASE_ADDR_DEFAULT): self.base_addr = addr self.models_list = [] self.models = {} def add_model(self, model): models = self.models.get(model.id) if models is None: self.models[model.id] = [] models = self.models.get(model.id) models.append(model) self.models_list.append(model) def from_pics(self, element=None, filename=None, pathlist=None): global file_pathlist pics_data = '' try: if element is None: if pathlist is not None: try: pics_data = pathlist.read(filename) except NameError: pass if not pics_data and file_pathlist is not None: try: pics_data = file_pathlist.read(filename) except NameError: pass if not pics_data: f = open(filename, 'r') pics_data = f.read() f.close() root = ET.fromstring(pics_data) if root.tag != pics.PICS_ROOT: raise SunSpecError("Unexpected root element: %s" % (root.tag)) d = root.find(pics.PICS_DEVICE) if d is None: raise SunSpecError("No '{}' elements found in '{}' element".format(pics.PICS_DEVICE, root.tag)) else: d = element if d.tag != pics.PICS_DEVICE: raise SunSpecError("Unexpected device tag: '%s'" % (d.tag)) self.base_addr = d.attrib.get(pics.PICS_ATTR_BASE_ADDR, pics.PICS_BASE_ADDR_DEFAULT) addr = self.base_addr + 2 for m in d.findall('*'): if m is None: raise SunSpecError("No '{}' elements found in '{}' element".format(pics.PICS_MODEL, d.tag)) if m.tag != pics.PICS_MODEL: raise SunSpecError("Unexpected '{}' element in '{}' element".format(m.tag, d.tag)) model_id = m.attrib.get(pics.PICS_ATTR_ID) if model_id is None: raise SunSpecError('Module id error') model_len = m.attrib.get(pics.PICS_ATTR_LEN) if model_len is not None: model_len = int(model_len) model = Model(self, model_id, addr + 2, model_len) try: model.load() except Exception as e: model.load_error = str(e) model.from_pics(m) self.add_model(model) addr += model.len + 2 except Exception as e: raise SunSpecError('Error loading PICS: %s' % str(e)) """ def to_pics(self, pretty_print=False, single_repeating=True): attr = {pics.PICS_ATTR_VERSION: str(pics.PICS_VERSION)} root = ET.Element(pics.PICS_ROOT) e = ET.SubElement(root, pics.PICS_DEVICE, attrib=attr) for model in self.models_list: model.to_pics(e, single_repeating=single_repeating) if pretty_print: util.indent(root) return ET.tostring(root) """
MIT License
deepanshs/mrsimulator
src/mrsimulator/utils/spectral_fitting.py
_post_sim_LMFIT_params
python
def _post_sim_LMFIT_params(params, process, index): _ = [ params.add( name=f"SP_{index}_operation_{i}_{operation.__class__.__name__}_{attr}", value=getattr(operation, attr), ) for i, operation in enumerate(process.operations) if operation.__class__.__name__ in POST_SIM_DICT for attr in POST_SIM_DICT[operation.__class__.__name__] ]
Creates a LMFIT Parameters object for SignalProcessor operations involved in spectrum fitting. Args: params: LMFIT parameters object. process: SignalProcessor object at index *index*. int index: List index of the SingalProcessor object. Returns: Parameters object.
https://github.com/deepanshs/mrsimulator/blob/9ab8a5edfa66434301b9d79da0c01a294e173704/src/mrsimulator/utils/spectral_fitting.py#L145-L165
import mrsimulator.signal_processing as sp import numpy as np from lmfit import Parameters from mrsimulator import Simulator __author__ = ["Maxwell C Venetos", "Deepansh Srivastava"] __email__ = ["maxvenetos@gmail.com", "srivastava.89@osu.edu"] START = "sys_" ENCODING_PAIRS = [ ["spin_systems[", START], ["].abundance", "_abundance"], ["].sites[", "_site_"], ["].isotropic_chemical_shift", "_isotropic_chemical_shift"], ["].shielding_symmetric.", "_shielding_symmetric_"], ["].quadrupolar.", "_quadrupolar_"], ["].couplings[", "_coupling_"], ["].isotropic_j", "_isotropic_j"], ["].j_symmetric.", "_j_symmetric_"], ["].dipolar.", "_dipolar_"], ] DECODING_PAIRS = [ ["spin_systems.", "sys_"], [".abundance", "_abundance"], [".sites.", "_site_"], [".isotropic_chemical_shift", "_isotropic_chemical_shift"], [".shielding_symmetric.", "_shielding_symmetric_"], [".quadrupolar.", "_quadrupolar_"], [".couplings.", "_coupling_"], [".isotropic_j", "_isotropic_j"], [".j_symmetric.", "_j_symmetric_"], [".dipolar.", "_dipolar_"], ] EXCLUDE = [ "property_units", "isotope", "name", "label", "description", "transition_pathways", ] POST_SIM_DICT = { "Gaussian": {"FWHM": "FWHM"}, "Exponential": {"FWHM": "FWHM"}, "Scale": {"factor": "factor"}, "ConstantOffset": {"offset": "offset"}, "Linear": {"amplitude": "amplitude", "offset": "offset"}, } def _str_encode(my_string): for item in ENCODING_PAIRS: my_string = my_string.replace(*item) return my_string def _str_decode(my_string): for item in DECODING_PAIRS: my_string = my_string.replace(*item[::-1]) my_string = my_string.split(".") return my_string def _list_of_dictionaries(my_list): return [item.dict() for item in my_list] def _traverse_dictionaries(instance, parent="spin_systems"): if isinstance(instance, list): return [ value for i, obj in enumerate(instance) for value in _traverse_dictionaries(obj, _str_encode(f"{parent}[{i}]")) ] if isinstance(instance, dict): return [ item for key, value in instance.items() if key not in EXCLUDE and value is not None for item in ( _traverse_dictionaries(value, _str_encode(f"{parent}.{key}")) if isinstance(value, (dict, list)) else [_str_encode(f"{parent}.{key}")] ) ] return []
BSD 3-Clause New or Revised License
powervm/pypowervm
pypowervm/utils/validation.py
ProcValidator._validate_deploy
python
def _validate_deploy(self): self._validate_host_has_available_res( self.des_procs, self.procs_avail, self.res_name)
Enforce validation rules specific to LPAR deployment.
https://github.com/powervm/pypowervm/blob/68f2b586b4f17489f379534ab52fc56a524b6da5/pypowervm/utils/validation.py#L362-L365
import abc import six from oslo_log import log as logging from pypowervm.i18n import _ from pypowervm.wrappers import base_partition as bp LOG = logging.getLogger(__name__) class ValidatorException(Exception): pass class LPARWrapperValidator(object): def __init__(self, lpar_w, host_w, cur_lpar_w=None): self.lpar_w = lpar_w self.host_w = host_w self.cur_lpar_w = cur_lpar_w def validate_all(self, check_dlpar=True): ProcValidator(self.lpar_w, self.host_w, cur_lpar_w=self.cur_lpar_w).validate( check_dlpar=check_dlpar) MemValidator(self.lpar_w, self.host_w, cur_lpar_w=self.cur_lpar_w).validate( check_dlpar=check_dlpar) CapabilitiesValidator(self.lpar_w, self.host_w, cur_lpar_w=self.cur_lpar_w).validate( check_dlpar=check_dlpar) @six.add_metaclass(abc.ABCMeta) class BaseValidator(object): def __init__(self, lpar_w, host_w, cur_lpar_w=None): self.lpar_w = lpar_w self.host_w = host_w self.cur_lpar_w = cur_lpar_w def validate(self, check_dlpar=True): self._populate_new_values() if self.cur_lpar_w is None: self._validate_deploy() else: if check_dlpar: self._can_modify() self._populate_resize_diffs() if self.cur_lpar_w.state == bp.LPARState.NOT_ACTIVATED: self._validate_inactive_resize() else: self._validate_active_resize() self._validate_common() @abc.abstractmethod def _populate_new_values(self): @abc.abstractmethod def _populate_resize_diffs(self): @abc.abstractmethod def _validate_deploy(self): @abc.abstractmethod def _validate_active_resize(self): @abc.abstractmethod def _validate_inactive_resize(self): @abc.abstractmethod def _validate_common(self): @abc.abstractmethod def _can_modify(self): def _validate_host_has_available_res(self, des, avail, res_name): if round(des, 2) > round(avail, 2): ex_args = {'requested': '%.2f' % des, 'avail': '%.2f' % avail, 'instance_name': self.lpar_w.name, 'res_name': res_name} msg = _("Insufficient available %(res_name)s on host for virtual " "machine '%(instance_name)s' (%(requested)s " "requested, %(avail)s available)") % ex_args LOG.error(msg) raise ValidatorException(msg) class MemValidator(BaseValidator): def __init__(self, lpar_w, host_w, cur_lpar_w=None): super(MemValidator, self).__init__(lpar_w, host_w, cur_lpar_w=cur_lpar_w) self.ppt_ratio = None def _populate_new_values(self): mem_cfg = self.lpar_w.mem_config self.des_mem = mem_cfg.desired self.max_mem = mem_cfg.max self.min_mem = mem_cfg.min self.exp_fact = mem_cfg.exp_factor self.avail_mem = self.host_w.memory_free self.ppt_ratio = mem_cfg.ppt_ratio self.res_name = _('memory') def _populate_resize_diffs(self): deltas = self._calculate_resize_deltas() self.delta_des_mem = deltas['delta_mem'] self.delta_max_mem = deltas['delta_max_mem'] self.delta_exp_fact = deltas['delta_exp_factor'] def _validate_deploy(self): self._validate_host_has_available_res( self.des_mem, self.avail_mem, self.res_name) def _validate_active_resize(self): curr_mem_cfg = self.cur_lpar_w.mem_config curr_min_mem = curr_mem_cfg.min curr_max_mem = curr_mem_cfg.max if self.max_mem != curr_max_mem or self.min_mem != curr_min_mem: msg = (_("The virtual machine must be powered off before changing " "the minimum or maximum memory. Power off virtual " "machine %s and try again.") % self.cur_lpar_w.name) raise ValidatorException(msg) if self.delta_exp_fact != 0: msg = (_("The virtual machine must be powered off before changing " "the expansion factor. Power off virtual machine %s and " "try again.") % self.cur_lpar_w.name) raise ValidatorException(msg) if (self.ppt_ratio is not None and self.ppt_ratio != curr_mem_cfg.ppt_ratio): msg = ("The virtual machine must be powered off before changing " "the physical page table ratio. Power off virtual " "machine %s and try again.") % self.cur_lpar_w.name raise ValidatorException(msg) self._validate_resize_common() def _validate_inactive_resize(self): self._validate_resize_common() def _validate_common(self): pass def _can_modify(self): modifiable, reason = self.cur_lpar_w.can_modify_mem() if not modifiable: LOG.error(reason) raise ValidatorException(reason) def _validate_resize_common(self): self._validate_host_has_available_res(self.delta_des_mem, self.avail_mem, self.res_name) def _calculate_resize_deltas(self): deltas = {} curr_mem_cfg = self.cur_lpar_w.mem_config curr_des_mem = curr_mem_cfg.desired curr_max_mem = curr_mem_cfg.max curr_exp_fact = curr_mem_cfg.exp_factor deltas['delta_mem'] = self.des_mem - curr_des_mem deltas['delta_max_mem'] = self.max_mem - curr_max_mem deltas['delta_exp_factor'] = self.exp_fact - curr_exp_fact return deltas class ProcValidator(BaseValidator): def _populate_new_values(self): self.has_dedicated = self.lpar_w.proc_config.has_dedicated self.procs_avail = self.host_w.proc_units_avail self.proc_compat_mode = self.lpar_w.proc_compat_mode if self.has_dedicated: self._populate_dedicated_proc_values() else: self._populate_shared_proc_values() def _populate_dedicated_proc_values(self): ded_proc_cfg = self.lpar_w.proc_config.dedicated_proc_cfg self.des_procs = ded_proc_cfg.desired self.res_name = _('CPUs') self.max_procs_per_aix_linux_lpar = ( self.host_w.max_procs_per_aix_linux_lpar) self.max_sys_procs_limit = self.host_w.max_sys_procs_limit self.des_vcpus = self.des_procs self.max_vcpus = ded_proc_cfg.max self.min_vcpus = ded_proc_cfg.min def _populate_shared_proc_values(self): shr_proc_cfg = self.lpar_w.proc_config.shared_proc_cfg self.des_procs = shr_proc_cfg.desired_units self.res_name = _('processing units') self.max_procs_per_aix_linux_lpar = ( self.host_w.max_vcpus_per_aix_linux_lpar) self.max_sys_procs_limit = self.host_w.max_sys_vcpus_limit self.des_vcpus = shr_proc_cfg.desired_virtual self.max_vcpus = shr_proc_cfg.max_virtual self.min_vcpus = shr_proc_cfg.min_virtual self.max_proc_units = shr_proc_cfg.max_units self.min_proc_units = shr_proc_cfg.min_units self.pool_id = shr_proc_cfg.pool_id def _populate_resize_diffs(self): deltas = self._calculate_resize_deltas() self.delta_des_vcpus = deltas['delta_vcpu']
Apache License 2.0
a5kin/xentica
xentica/core/topology/lattice.py
OrthogonalLattice.is_off_board_code
python
def is_off_board_code(self, coord_prefix): self._define_constants_once() conditions = [] for i in range(self.dimensions): condition = "{x}{i} < 0 || {x}{i} >= {w}{i}".format( x=coord_prefix, i=i, w=self.width_prefix ) conditions.append(condition) return " || ".join(conditions)
Generate C code to test if the cell's coordinates are off board. See :meth:`Lattice.is_off_board_code` for details.
https://github.com/a5kin/xentica/blob/ca08fac9f85af71c9d6d98545a33d50323f851b3/xentica/core/topology/lattice.py#L206-L221
import abc from xentica.core.mixins import DimensionsMixin, BscaDetectorMixin from xentica.core.variables import Constant from xentica.core.exceptions import XenticaException __all__ = ['Lattice', 'OrthogonalLattice', ] class Lattice(DimensionsMixin, BscaDetectorMixin, metaclass=abc.ABCMeta): width_prefix = "_w" def _define_constants_once(self): num_dimensions = self.bsca.topology.dimensions for i in range(num_dimensions): if not hasattr(self.bsca, "size") or i >= len(self.bsca.size): msg = "Wrong field's dimensionality ({} instead of {})." msg = msg.format(len(self.bsca.size), num_dimensions) raise XenticaException(msg) size = self.bsca.size[i] constant = Constant("%s%d" % (self.width_prefix, i), size) self.bsca.define_constant(constant) @abc.abstractmethod def index_to_coord_code(self, index_name, coord_prefix): @abc.abstractmethod def index_to_coord(self, idx, bsca): @abc.abstractmethod def coord_to_index_code(self, coord_prefix): @abc.abstractmethod def is_off_board_code(self, coord_prefix): class OrthogonalLattice(Lattice): supported_dimensions = list(range(1, 100)) def index_to_coord_code(self, index_name, coord_prefix): self._define_constants_once() i = 0 def wrap_format(text): return text.format(x=coord_prefix, i=i, index=index_name, w=self.width_prefix) for i in range(self.dimensions): if i == 0: code = wrap_format("int _{index} = {index};\n") index_name = "_" + index_name if i < self.dimensions - 1: code += wrap_format("int {x}{i} = {index} % {w}{i};\n") code += wrap_format("{index} /= {w}{i};\n") else: code += wrap_format("int {x}{i} = {index};\n") return code def index_to_coord(self, idx, bsca): coord = [] for i in range(bsca.topology.dimensions): if i < self.dimensions - 1: x_i = idx % bsca.size[i] idx //= bsca.size[i] else: x_i = idx coord.append(x_i) return coord def coord_to_index_code(self, coord_prefix): self._define_constants_once() summands = [] for i in range(self.dimensions): summand = coord_prefix + str(i) for j in range(i): summand = self.width_prefix + str(j) + " * " + summand summands.append(summand) return " + ".join(summands)
MIT License
google-research/dice_rl
scripts/create_dataset.py
get_onpolicy_dataset
python
def get_onpolicy_dataset(load_dir, env_name, tabular_obs, max_trajectory_length, alpha, seed): tf_env, tf_policy = env_policies.get_env_and_policy( load_dir, env_name, alpha, env_seed=seed, tabular_obs=tabular_obs) dataset = tf_agents_onpolicy_dataset.TFAgentsOnpolicyDataset( tf_env, tf_policy, episode_step_limit=max_trajectory_length) return dataset
Get on-policy dataset.
https://github.com/google-research/dice_rl/blob/4855e16a475ff0685068190c0504b28b5ed64233/scripts/create_dataset.py#L57-L66
from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app from absl import flags import numpy as np import os import tensorflow.compat.v2 as tf tf.compat.v1.enable_v2_behavior() import pickle from tf_agents.environments import gym_wrapper from tf_agents.environments import tf_py_environment import dice_rl.environments.env_policies as env_policies import dice_rl.data.tf_agents_onpolicy_dataset as tf_agents_onpolicy_dataset import dice_rl.estimators.estimator as estimator_lib import dice_rl.utils.common as common_utils from dice_rl.data.dataset import Dataset, EnvStep, StepType from dice_rl.data.tf_offpolicy_dataset import TFOffpolicyDataset FLAGS = flags.FLAGS flags.DEFINE_string('env_name', 'taxi', 'Environment name.') flags.DEFINE_integer('seed', 0, 'Initial random seed.') flags.DEFINE_integer('num_trajectory', 100, 'Number of trajectories to collect.') flags.DEFINE_integer('max_trajectory_length', 500, 'Cutoff trajectory at this step.') flags.DEFINE_float('alpha', 1.0, 'How close to target policy.') flags.DEFINE_bool('tabular_obs', True, 'Whether to use tabular observations.') flags.DEFINE_string('save_dir', None, 'Directory to save dataset to.') flags.DEFINE_string('load_dir', None, 'Directory to load policies from.') flags.DEFINE_bool('force', False, 'Whether to force overwriting any existing dataset.')
Apache License 2.0
suomichain/suomi-core
sdk/python/suomi_sdk/processor/config.py
_get_dir
python
def _get_dir(toml_config_setting, suomi_home_dir, windows_dir, default_dir): conf_file = os.path.join(get_config_dir(), 'path.toml') if os.path.exists(conf_file): with open(conf_file) as fd: raw_config = fd.read() toml_config = toml.loads(raw_config) if toml_config_setting in toml_config: return toml_config[toml_config_setting] if 'SUOMI_HOME' in os.environ: return os.path.join(os.environ['SUOMI_HOME'], suomi_home_dir) if os.name == 'nt': base_dir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) return os.path.join(base_dir, windows_dir) return default_dir
Determines the directory path based on configuration. Arguments: toml_config_setting (str): The name of the config setting related to the directory which will appear in path.toml. suomi_home_dir (str): The directory under the SUOMI_HOME environment variable. For example, for 'data' if the data directory is $SUOMI_HOME/data. windows_dir (str): The windows path relative to the computed base directory. default_dir (str): The default path on Linux. Returns: directory (str): The path.
https://github.com/suomichain/suomi-core/blob/dada0499ddc2b4b4a5d9a975de5af63b87ded9d2/sdk/python/suomi_sdk/processor/config.py#L36-L71
import os import sys import toml import yaml def get_config_dir(): if 'SUOMI_HOME' in os.environ: return os.path.join(os.environ['SUOMI_HOME'], 'etc') if os.name == 'nt': base_dir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) return os.path.join(base_dir, 'conf') return '/etc/suomi'
Apache License 2.0
batzner/tensorlm
tensorlm/common/util.py
restore_possible
python
def restore_possible(out_dir): ckpt = tf.train.get_checkpoint_state(out_dir) return ckpt and ckpt.model_checkpoint_path
Check if a directory contains the necessary files for loading a TF model from it.
https://github.com/batzner/tensorlm/blob/4c47c0ff9e31df5960a1bf83dcbdd370eaf7ce5e/tensorlm/common/util.py#L34-L37
import tensorflow as tf def get_chunks(items, size): chunks = [] for i in range(0, len(items), size): chunks.append(items[i:i + size]) return chunks
MIT License
binaryanalysisplatform/bap-ida-python
plugins/bap/utils/trace.py
machine_id
python
def machine_id(state, fromto): state['machine-id'] = int(fromto[1])
tracks machine identifier Maintains the 'machine-id' field in the state.
https://github.com/binaryanalysisplatform/bap-ida-python/blob/d8d4679de2f50bb75f556419565821d95404034e/plugins/bap/utils/trace.py#L187-L192
from .sexp import Parser handlers = {} filters = {} class Loader(object): def __init__(self, *args): self.parser = Parser(*args) self.state = {} self._handlers = [] self._filters = [] self._filter_reqs = set() def __iter__(self): return self def __next__(self): return self.next() def enable_handlers(self, names): self._handlers = satisfy_requirements(self._handlers + names) for name in self._handlers: handlers[name].init(self.state) def enable_filter(self, filter_name, *args, **kwargs): filter = filters[filter_name] requires = satisfy_requirements(filter.requires) self.enable_handlers(requires) for req in requires: self._filter_reqs.add(req) self._filters.append(filter(*args, **kwargs)) def next(self): event = self.parser.next() if len(event) != 2: raise ValueError('Malformed Observation {}'.format(event)) event, payload = event completed = set() self.state['event'] = event for h in self._handlers: if h in self._filter_reqs and event in handlers[h].events: completed.add(h) handlers[h](self.state, payload) for accept in self._filters: if not accept(self.state): break else: for h in self._handlers: if h not in completed and event in handlers[h].events: handlers[h](self.state, payload) return self.state def run(self): for state in self: pass def attach_meta_attributes(h, **kwargs): if 'name' not in kwargs: name = h.__name__.replace('_', '-') h.__dict__['name'] = name req = kwargs.get('requires', []) if 'requires' in kwargs: del kwargs['requires'] h.__dict__['requires'] = req if isinstance(req, list) else [req] h.__dict__.update(kwargs) def handler(*args, **kwargs): def make_handler(f): f.__dict__['events'] = args if 'init' in kwargs: default = kwargs['init'] f.__dict__['init'] = lambda s: s.update(default) del kwargs['init'] else: f.__dict__['init'] = lambda x: None attach_meta_attributes(f, **kwargs) handlers[f.name] = f return make_handler def filter(**kwargs): def make_filter(f): def init(**kwargs): return lambda state: f(state, **kwargs) attach_meta_attributes(f, **kwargs) attach_meta_attributes(init, name=f.name, **kwargs) filters[init.name] = init return make_filter @handler('machine-switch', 'machine-fork', init={'machine-id': 0})
MIT License
francocruces/mioconnect
src/myodriver.py
MyoDriver.run
python
def run(self): self.disconnect_all() while len(self.myos) < self.config.MYO_AMOUNT: print( "*** Connecting myo " + str(len(self.myos) + 1) + " out of " + str(self.config.MYO_AMOUNT) + " ***") print() self.add_myo_connection() self.receive()
Main. Disconnects possible connections and starts as many connections as needed.
https://github.com/francocruces/mioconnect/blob/c58c9646fe6bb005e8f4d5e85551d1a0d31aeb91/src/myodriver.py#L30-L40
import sys import time from src.public.myohw import * from src.myo import Myo from src.bluetooth import Bluetooth from src.data_handler import DataHandler class MyoDriver: def __init__(self, config): self.config = config print("OSC Address: " + str(self.config.OSC_ADDRESS)) print("OSC Port: " + str(self.config.OSC_PORT)) print() self.data_handler = DataHandler(self.config) self.bluetooth = Bluetooth(self.config.MESSAGE_DELAY) self.myos = [] self.myo_to_connect = None self.scanning = False self.set_handlers()
MIT License
parquery/icontract
icontract/_decorators.py
ensure.__call__
python
def __call__(self, func: CallableT) -> CallableT: if not self.enabled: return func contract_checker = icontract._checkers.find_checker(func=func) if contract_checker is None: contract_checker = icontract._checkers.decorate_with_checker(func=func) result = contract_checker assert self._contract is not None icontract._checkers.add_postcondition_to_checker(checker=contract_checker, contract=self._contract) return result
Add the postcondition to the list of postconditions of the function ``func``. The function ``func`` is decorated with a contract checker if there is no contract checker in the decorator stack. :param func: function to be wrapped :return: contract checker around ``func`` if no contract checker on the decorator stack, or ``func`` otherwise
https://github.com/parquery/icontract/blob/8d3193144a0b92e96f548b2ee828eb36a405ed40/icontract/_decorators.py#L242-L267
import inspect import reprlib import traceback from typing import Callable, Optional, Union, Any, List, Type, TypeVar import icontract._checkers from icontract._globals import CallableT, ExceptionT, ClassT from icontract._types import Contract, Snapshot class require: def __init__(self, condition: Callable[..., Any], description: Optional[str] = None, a_repr: reprlib.Repr = icontract._globals.aRepr, enabled: bool = __debug__, error: Optional[Union[Callable[..., ExceptionT], Type[ExceptionT], BaseException]] = None) -> None: self.enabled = enabled self._contract = None if not enabled: return if error is None: pass elif isinstance(error, type): if not issubclass(error, BaseException): raise ValueError(("The error of the contract is given as a type, " "but the type does not inherit from BaseException: {}").format(error)) else: if not inspect.isfunction(error) and not inspect.ismethod(error) and not isinstance(error, BaseException): raise ValueError( ("The error of the contract must be either a callable (a function or a method), " "a class (subclass of BaseException) or an instance of BaseException, but got: {}").format(error)) location = None tb_stack = traceback.extract_stack(limit=2)[:1] if len(tb_stack) > 0: frame = tb_stack[0] location = 'File {}, line {} in {}'.format(frame.filename, frame.lineno, frame.name) self._contract = Contract( condition=condition, description=description, a_repr=a_repr, error=error, location=location) def __call__(self, func: CallableT) -> CallableT: if not self.enabled: return func contract_checker = icontract._checkers.find_checker(func=func) if contract_checker is None: contract_checker = icontract._checkers.decorate_with_checker(func=func) result = contract_checker assert self._contract is not None icontract._checkers.add_precondition_to_checker(checker=contract_checker, contract=self._contract) return result class snapshot: def __init__(self, capture: Callable[..., Any], name: Optional[str] = None, enabled: bool = __debug__) -> None: self._snapshot = None self.enabled = enabled if enabled: location = None tb_stack = traceback.extract_stack(limit=2)[:1] if len(tb_stack) > 0: frame = tb_stack[0] location = 'File {}, line {} in {}'.format(frame.filename, frame.lineno, frame.name) self._snapshot = Snapshot(capture=capture, name=name, location=location) def __call__(self, func: CallableT) -> CallableT: if not self.enabled: return func contract_checker = icontract._checkers.find_checker(func=func) if contract_checker is None: raise ValueError("You are decorating a function with a snapshot, but no postcondition was defined " "on the function before.") assert self._snapshot is not None, "Expected the enabled snapshot to have the property ``snapshot`` set." icontract._checkers.add_snapshot_to_checker(checker=contract_checker, snapshot=self._snapshot) return func class ensure: def __init__(self, condition: Callable[..., Any], description: Optional[str] = None, a_repr: reprlib.Repr = icontract._globals.aRepr, enabled: bool = __debug__, error: Optional[Union[Callable[..., ExceptionT], Type[ExceptionT], BaseException]] = None) -> None: self.enabled = enabled self._contract = None if not enabled: return if error is None: pass elif isinstance(error, type): if not issubclass(error, BaseException): raise ValueError(("The error of the contract is given as a type, " "but the type does not inherit from BaseException: {}").format(error)) else: if not inspect.isfunction(error) and not inspect.ismethod(error) and not isinstance(error, BaseException): raise ValueError( ("The error of the contract must be either a callable (a function or a method), " "a class (subclass of BaseException) or an instance of BaseException, but got: {}").format(error)) location = None tb_stack = traceback.extract_stack(limit=2)[:1] if len(tb_stack) > 0: frame = tb_stack[0] location = 'File {}, line {} in {}'.format(frame.filename, frame.lineno, frame.name) self._contract = Contract( condition=condition, description=description, a_repr=a_repr, error=error, location=location)
MIT License
pennylaneai/pennylane-sf
pennylane_sf/tf.py
StrawberryFieldsTF.apply
python
def apply(self, operation, wires, par): device_wires = self.map_wires(wires) if operation not in self.matrix_gates: param_labels = [str(uuid.uuid4()) for _ in range(len(par))] for l, v in zip(param_labels, par): self.params[l] = v par = self.prog.params(*param_labels) if not isinstance(par, Sequence): par = (par,) op = self._operation_map[operation](*par) op | [self.q[i] for i in device_wires.labels]
Apply a quantum operation. Args: operation (str): name of the operation wires (Wires): subsystems the operation is applied on par (tuple): parameters for the operation
https://github.com/pennylaneai/pennylane-sf/blob/d5188553bdb19ee758a0464cfdc08e19505bcd06/pennylane_sf/tf.py#L195-L222
from collections import OrderedDict from collections.abc import Sequence import uuid import numpy as np import tensorflow as tf import strawberryfields as sf from strawberryfields.backends.tfbackend.states import FockStateTF from strawberryfields.ops import ( Coherent, DensityMatrix, DisplacedSqueezed, Fock, Ket, Squeezed, Thermal, Gaussian, ) from strawberryfields.ops import ( BSgate, CKgate, CXgate, CZgate, Dgate, Kgate, Pgate, Rgate, S2gate, Sgate, Vgate, Interferometer, ) from pennylane.wires import Wires from .expectations import mean_photon, number_expectation, homodyne, poly_xp from .simulator import StrawberryFieldsSimulator def identity(state, device_wires, params): N = state.num_modes D = state.cutoff_dim if N == len(device_wires): tr = state.trace() return tr, tr - tr ** 2 N = len(device_wires) dm = state.reduced_dm(modes=device_wires.tolist()) new_ax = np.arange(2 * N).reshape([N, 2]).T.flatten() tr = tf.math.real(tf.linalg.trace(tf.reshape(tf.transpose(dm, new_ax), [D ** N, D ** N]))) return tr, tr - tr ** 2 def fock_state(state, device_wires, params): n = params[0] N = state.num_modes if N == len(device_wires): ex = state.fock_prob(n) return ex, ex - ex ** 2 dm = state.reduced_dm(modes=device_wires.tolist()) ex = tf.math.real(dm[tuple(n[i // 2] for i in range(len(n) * 2))]) var = ex - ex ** 2 return ex, var class StrawberryFieldsTF(StrawberryFieldsSimulator): name = "Strawberry Fields TensorFlow PennyLane plugin" short_name = "strawberryfields.tf" _capabilities = {"model": "cv", "passthru_interface": "tf"} _operation_map = { "CoherentState": Coherent, "FockDensityMatrix": DensityMatrix, "DisplacedSqueezedState": DisplacedSqueezed, "FockState": Fock, "FockStateVector": Ket, "SqueezedState": Squeezed, "ThermalState": Thermal, "GaussianState": Gaussian, "Beamsplitter": BSgate, "CrossKerr": CKgate, "ControlledAddition": CXgate, "ControlledPhase": CZgate, "Displacement": Dgate, "Kerr": Kgate, "QuadraticPhase": Pgate, "Rotation": Rgate, "TwoModeSqueezing": S2gate, "Squeezing": Sgate, "CubicPhase": Vgate, "InterferometerUnitary": Interferometer, } _observable_map = { "NumberOperator": mean_photon, "TensorN": number_expectation, "X": homodyne(0), "P": homodyne(np.pi / 2), "QuadOperator": homodyne(), "PolyXP": poly_xp, "FockStateProjector": fock_state, "Identity": identity, } matrix_gates = { "FockDensityMatrix", "GaussianState", "InterferometerUnitary", "FockStateVector", } _circuits = {} _asarray = staticmethod(tf.convert_to_tensor) def __init__(self, wires, *, cutoff_dim, shots=None, hbar=2): super().__init__(wires, shots=shots, hbar=hbar) self.cutoff = cutoff_dim self.params = {}
Apache License 2.0
cisco/mindmeld
mindmeld/auto_annotator.py
SpacyAnnotator._is_plural_entity
python
def _is_plural_entity(self, entity): return ( self.language == ENGLISH_LANGUAGE_CODE and len(entity["body"]) >= 2 and entity["body"][-2:] == "'s" )
Check if an entity is plural. Args: entity (dict): A dictionary representing an entity. Returns: is_plural (bool): Whether the entity is plural.
https://github.com/cisco/mindmeld/blob/d3a0606b5eaa92733dd12674438d45de4b124c63/mindmeld/auto_annotator.py#L779-L792
import logging import os import re from abc import ABC, abstractmethod from enum import Enum from typing import List from tqdm import tqdm from .resource_loader import ResourceLoader from .components._config import ( ENGLISH_LANGUAGE_CODE, ENGLISH_US_LOCALE, ) from .components.translators import NoOpTranslator, TranslatorFactory from .text_preparation.spacy_model_factory import SpacyModelFactory from .system_entity_recognizer import ( DucklingRecognizer, duckling_item_to_query_entity, ) from .markup import load_query, dump_queries from .core import Entity, Span, QueryEntity, _get_overlap, NestedEntity from .exceptions import MarkupError from .models.helpers import register_annotator from .constants import ( DUCKLING_TO_SYS_ENTITY_MAPPINGS, ANNOTATOR_TO_SYS_ENTITY_MAPPINGS, SPACY_SYS_ENTITIES_NOT_IN_DUCKLING, CURRENCY_SYMBOLS, SYSTEM_ENTITY_PREFIX, ) from .components import NaturalLanguageProcessor from .path import get_entity_types from .query_factory import QueryFactory logger = logging.getLogger(__name__) class AnnotatorAction(Enum): ANNOTATE = "annotate" UNANNOTATE = "unannotate" class Annotator(ABC): def __init__( self, app_path, annotation_rules=None, language=ENGLISH_LANGUAGE_CODE, locale=ENGLISH_US_LOCALE, overwrite=False, unannotate_supported_entities_only=True, unannotation_rules=None, **kwargs, ): self.app_path = app_path self.language = language self.locale = locale self.overwrite = overwrite self.annotation_rules = annotation_rules or [] self.unannotate_supported_entities_only = unannotate_supported_entities_only self.unannotation_rules = unannotation_rules or [] self._resource_loader = ResourceLoader.create_resource_loader(app_path) self.duckling = DucklingRecognizer.get_instance() def _get_file_entities_map(self, action: AnnotatorAction): all_file_paths = self._resource_loader.get_all_file_paths() file_entities_map = {path: [] for path in all_file_paths} if action == AnnotatorAction.ANNOTATE: rules = self.annotation_rules elif action == AnnotatorAction.UNANNOTATE: rules = self.unannotation_rules else: raise AssertionError(f"{action} is an invalid Annotator action.") for rule in rules: pattern = Annotator._get_pattern(rule) compiled_pattern = re.compile(pattern) filtered_paths = self._resource_loader.filter_file_paths( compiled_pattern=compiled_pattern, file_paths=all_file_paths ) for path in filtered_paths: entities = self._get_entities(rule) file_entities_map[path] = entities return file_entities_map @staticmethod def _get_pattern(rule): pattern = [rule[x] for x in ["domains", "intents", "files"]] return ".*/" + "/".join(pattern) def _get_entities(self, rule): if rule["entities"].strip() in ["*", ".*", ".+"]: return ["*"] entities = re.sub("[()]", "", rule["entities"]).split("|") valid_entities = [] for entity in entities: entity = entity.strip() if self.valid_entity_check(entity): valid_entities.append(entity) else: logger.warning("%s is not a valid entity. Skipping entity.", entity) return valid_entities @property @abstractmethod def supported_entity_types(self): raise NotImplementedError("Subclasses must implement this method") def valid_entity_check(self, entity): entity = entity.lower().strip() return entity in self.supported_entity_types def annotate(self): if not self.annotation_rules: logger.warning( """'annotate' field is not configured or misconfigured in the `config.py`. We can't find any file to annotate.""" ) return self._modify_queries(action=AnnotatorAction.ANNOTATE) def unannotate(self): if not self.unannotate: logger.warning( """'unannotate' field is not configured or misconfigured in the `config.py`. We can't find any file to unannotate.""" ) return self._modify_queries(action=AnnotatorAction.UNANNOTATE) def _modify_queries(self, action: AnnotatorAction): file_entities_map = self._get_file_entities_map(action=action) query_factory = QueryFactory.create_query_factory(self.app_path) path_list = [p for p in file_entities_map if file_entities_map[p]] for path in path_list: processed_queries = Annotator._get_processed_queries( file_path=path, query_factory=query_factory ) tqdm_desc = "Processing " + path + ": " for processed_query in tqdm(processed_queries, ascii=True, desc=tqdm_desc): entity_types = file_entities_map[path] if action == AnnotatorAction.ANNOTATE: self._annotate_query( processed_query=processed_query, entity_types=entity_types, ) elif action == AnnotatorAction.UNANNOTATE: self._unannotate_query( processed_query=processed_query, remove_entities=entity_types, ) with open(path, "w") as outfile: outfile.write("".join(list(dump_queries(processed_queries)))) outfile.close() @staticmethod def _get_processed_queries(file_path, query_factory): with open(file_path) as infile: queries = infile.readlines() processed_queries = [] domain, intent = file_path.split(os.sep)[-3:-1] for query in queries: try: processed_query = load_query( markup=query, domain=domain, intent=intent, query_factory=query_factory, ) processed_queries.append(processed_query) except (AssertionError, MarkupError): logger.warning("Skipping query. Error in processing: %s", query) return processed_queries def _annotate_query(self, processed_query, entity_types): current_entities = list(processed_query.entities) annotated_entities = self._get_annotated_entities( processed_query=processed_query, entity_types=entity_types ) final_entities = Annotator._resolve_conflicts( target_entities=annotated_entities if self.overwrite else current_entities, other_entities=current_entities if self.overwrite else annotated_entities, ) processed_query.entities = tuple(final_entities) def _get_annotated_entities(self, processed_query, entity_types=None): if len(entity_types) == 0: return [] entity_types = None if entity_types == ["*"] else entity_types return self.parse( sentence=processed_query.query.text, entity_types=entity_types, domain=processed_query.domain, intent=processed_query.intent, ) @staticmethod def _item_to_query_entity(item, processed_query): span = Span(start=item["start"], end=item["end"] - 1) role = item.get("role") entity = Entity( text=item["body"], entity_type=item["dim"], role=role, value=item["value"] ) query_entity = QueryEntity.from_query( query=processed_query.query, span=span, entity=entity ) return query_entity @staticmethod def _resolve_conflicts(target_entities, other_entities): additional_entities = [] for o_entity in other_entities: no_overlaps = [ not _get_overlap(o_entity.span, t_entity.span) for t_entity in target_entities ] if all(no_overlaps): additional_entities.append(o_entity) target_entities.extend(additional_entities) return target_entities def _unannotate_query(self, processed_query, remove_entities): keep_entities = [] for query_entity in processed_query.entities: if remove_entities == ["*"]: is_supported_entity = self.valid_entity_check(query_entity.entity.type) if self.unannotate_supported_entities_only and not is_supported_entity: keep_entities.append(query_entity) elif query_entity.entity.type not in remove_entities: keep_entities.append(query_entity) processed_query.entities = tuple(keep_entities) @abstractmethod def parse(self, sentence, **kwargs): raise NotImplementedError("Subclasses must implement this method") class SpacyAnnotator(Annotator): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.spacy_model_size = kwargs.get("spacy_model_size", "lg") self.nlp = SpacyModelFactory.get_spacy_language_model( self.language, self.spacy_model_size ) @property def supported_entity_types(self): spacy_supported_entities = [e.lower() for e in self.nlp.get_pipe("ner").labels] supported_entities = set() for entity in spacy_supported_entities: if entity == "misc": continue if entity in ["time", "date", "datetime"]: supported_entities.update(["sys_time", "sys_duration", "sys_interval"]) elif entity in ANNOTATOR_TO_SYS_ENTITY_MAPPINGS: supported_entities.add(ANNOTATOR_TO_SYS_ENTITY_MAPPINGS[entity]) else: supported_entities.add(f"sys_{entity}") if "sys_weight" in supported_entities: supported_entities.update(["sys_distance", "sys_other-quantity"]) supported_entities = self._remove_unresolvable_entities(supported_entities) return supported_entities def _remove_unresolvable_entities(self, entities): filtered_entities = [] for entity in entities: if entity not in SPACY_SYS_ENTITIES_NOT_IN_DUCKLING: if ( self.language in DUCKLING_TO_SYS_ENTITY_MAPPINGS and entity in DUCKLING_TO_SYS_ENTITY_MAPPINGS[self.language] ): filtered_entities.append(entity) else: filtered_entities.append(entity) return filtered_entities def parse(self, sentence, entity_types=None, **kwargs): doc = self.nlp(sentence) spacy_entities = [ { "body": ent.text, "start": ent.start_char, "end": ent.end_char, "value": {"value": ent.text}, "dim": ent.label_.lower(), } for ent in doc.ents ] entity_resolution_func_map = { "time": self._resolve_time_date, "date": self._resolve_time_date, "datetime": self._resolve_time_date, "cardinal": self._resolve_cardinal, "money": self._resolve_money, "ordinal": self._resolve_ordinal, "quantity": self._resolve_quantity, "percent": self._resolve_percent, "person": self._resolve_person, } entities = [] for entity in spacy_entities: if entity["dim"] in ["per", "persName"]: entity["dim"] = "person" elif entity["dim"] == "misc": continue if entity["dim"] in entity_resolution_func_map: params = {"entity": entity} if entity["dim"] in ["time", "date", "datetime"]: params["entity_types"] = entity_types elif entity["dim"] in ["money"]: params["sentence"] = sentence entity = entity_resolution_func_map[entity["dim"]](**params) else: entity["dim"] = SYSTEM_ENTITY_PREFIX + entity["dim"].replace("_", "-") if entity: entities.append(entity) if entity_types: entities = [e for e in entities if e["dim"] in entity_types] processed_query = load_query( sentence, query_factory=self._resource_loader.query_factory, domain=kwargs.get("domain"), intent=kwargs.get("intent"), ) return [ Annotator._item_to_query_entity(entity, processed_query) for entity in entities ] def _resolve_time_date(self, entity, entity_types=None): candidates = self.duckling.get_candidates_for_text( entity["body"], language=self.language, locale=self.locale ) if len(candidates) == 0: return time_entities = ["sys_duration", "sys_interval", "sys_time"] if entity_types: time_entities = [e for e in time_entities if e in entity_types] if SpacyAnnotator._resolve_time_exact_match(entity, candidates, time_entities): return entity elif SpacyAnnotator._resolve_largest_substring( entity, candidates, entity_types=time_entities, is_time_related=True ): return entity @staticmethod def _get_time_entity_type(candidate): if candidate["dim"] == "duration": return "sys_duration" if candidate["dim"] == "time": if candidate["value"]["type"] == "interval": return "sys_interval" else: return "sys_time" @staticmethod def _resolve_time_exact_match(entity, candidates, time_entities): for candidate in candidates: candidate_entity = SpacyAnnotator._get_time_entity_type(candidate) if ( candidate_entity in time_entities and candidate["body"] == entity["body"] ): entity["dim"] = candidate_entity entity["value"] = candidate["value"] return entity @staticmethod def _resolve_largest_substring(entity, candidates, entity_types, is_time_related): largest_candidate = None resolved_entity_type = None for entity_type in entity_types: for candidate in candidates: if is_time_related: candidate_entity = SpacyAnnotator._get_time_entity_type(candidate) else: candidate_entity = candidate["entity_type"] if ( candidate_entity == entity_type and candidate["body"] in entity["body"] and ( largest_candidate is None or len(candidate["body"]) > len(largest_candidate["body"]) ) ): largest_candidate = candidate resolved_entity_type = entity_type if largest_candidate: entity["body"] = largest_candidate["body"] offset = entity["start"] entity["start"] = offset + largest_candidate["start"] entity["end"] = offset + largest_candidate["end"] entity["value"] = largest_candidate["value"] entity["dim"] = resolved_entity_type return entity def _resolve_cardinal(self, entity): if self._resolve_exact_match(entity): return entity candidates = self.duckling.get_candidates_for_text( entity["body"], language=self.language, locale=self.locale ) if self._resolve_largest_substring( entity, candidates, entity_types=["sys_number"], is_time_related=False ): return entity def _resolve_money(self, entity, sentence): for symbol in CURRENCY_SYMBOLS: if symbol in sentence: start = entity["start"] if (start == 1 and sentence[0] == symbol) or ( start >= 2 and sentence[start - 2 : start] == " " + symbol ): entity["start"] -= 1 entity["body"] = sentence[entity["start"] : entity["end"]] return self._resolve_exact_match(entity) def _resolve_ordinal(self, entity): return self._resolve_exact_match(entity) def _resolve_exact_match(self, entity): entity["dim"] = ANNOTATOR_TO_SYS_ENTITY_MAPPINGS[entity["dim"]] candidates = self.duckling.get_candidates_for_text( entity["body"], language=self.language, locale=self.locale ) if len(candidates) == 0: return for candidate in candidates: if ( candidate["entity_type"] == entity["dim"] and entity["body"] == candidate["body"] ): entity["value"] = candidate["value"] return entity def _resolve_quantity(self, entity): candidates = self.duckling.get_candidates_for_text(entity["body"]) if len(candidates) == 0: entity["dim"] = "sys_other-quantity" return entity entity_types = ["distance", "quantity"] for entity_type in entity_types: for candidate in candidates: if ( candidate["dim"] == entity_type and candidate["body"] == entity["body"] ): entity["value"] = candidate["value"] entity["dim"] = ANNOTATOR_TO_SYS_ENTITY_MAPPINGS[entity_type] return entity if SpacyAnnotator._resolve_largest_substring( entity, candidates, entity_types=entity_types, is_time_related=False ): return entity else: entity["dim"] = "sys_other-quantity" return entity def _resolve_percent(self, entity): entity["dim"] = ANNOTATOR_TO_SYS_ENTITY_MAPPINGS[entity["dim"]] candidates = self.duckling.get_candidates_for_text( entity["body"], language=self.language, locale=self.locale ) if len(candidates) == 0: return possible_values = [] for candidate in candidates: if candidate["entity_type"] == "sys_number": value = candidate["value"]["value"] if isinstance(value, float): entity["value"]["value"] = value / 100 return entity else: possible_values.append(value) entity["value"]["value"] = max(possible_values) / 100 return entity def _resolve_person(self, entity): entity["dim"] = ANNOTATOR_TO_SYS_ENTITY_MAPPINGS[entity["dim"]] if self._is_plural_entity(entity): entity["value"] = {"value": entity["body"][:-2]} entity["body"] = entity["body"][:-2] entity["end"] -= 2 return entity
Apache License 2.0
daxm/fmcapi
fmcapi/api_objects/deployment_services/deploymentrequests.py
DeploymentRequests.__init__
python
def __init__(self, fmc): logging.debug("In __init__ for DeploymentRequests() class.") self.fmc = fmc self.URL = f"{self.fmc.configuration_url}{self.URL_SUFFIX}" self.uuids = None
Initialize DeploymentRequests object. :param fmc (object): FMC object :return: None
https://github.com/daxm/fmcapi/blob/fc4bad7ff733a6283e83970d7844c73e7e88a50c/fmcapi/api_objects/deployment_services/deploymentrequests.py#L19-L30
import logging from .deployabledevices import DeployableDevices import datetime class DeploymentRequests( object ): URL_SUFFIX = "/deployment/deploymentrequests"
BSD 3-Clause New or Revised License
hfaran/ubc-timetabler
timetabler/scheduler.py
Scheduler.add_constraint
python
def add_constraint(self, constraint): self._constraints.append(constraint)
Add constraint ``constraint`` to list of constraints :type constraint: callable :param constraint: A callable that takes a Schedule and returns True or False depending on whether a constraint is met
https://github.com/hfaran/ubc-timetabler/blob/ce8fd668fd60c847e9524288ace58a9ec5677cbc/timetabler/scheduler.py#L97-L105
import logging from itertools import combinations, ifilter, product from timetabler.ssc import SSCConnection from timetabler.util import check_equal, all_unique from timetabler.schedule import Schedule class NoActivitiesError(Exception): def __init__(self, course_name): self.course_name = course_name def __str__(self): return self.course_name class Scheduler(object): def __init__(self, courses, session="2014W", terms=(1, 2), refresh=False, duplicates=True, ssc_conn=None): self.ssc_conn = SSCConnection() if ssc_conn is None else ssc_conn self.courses = {c: self.ssc_conn.get_course(c, session, refresh=refresh, duplicates=duplicates) for c in courses} self.terms = terms self.session = session self._constraints = [] def generate_schedules(self, bad_statuses=("Full", "Blocked")): schedules_by_course = {} for name, course in self.courses.items(): logging.info("Generating schedules for {} ...".format(name)) if not course.activities: raise NoActivitiesError(name) acts = course.activities r = sum(c[1] for c in course.num_section_constraints) combs = combinations(acts, r) filter_func = lambda combo: all([ all( sum(int(isinstance(act, constraint[0])) for act in combo) == constraint[1] for constraint in course.num_section_constraints ), all(act.term in self.terms for act in combo), (check_equal([act.term for act in combo]) or any(act.is_multi_term for act in combo)), all(a.status not in bad_statuses for a in combo), all(c(combo) for c in course.constraints) ]) filtered_combs = filter(filter_func, combs) schedules_by_course[name] = filtered_combs logging.info("Schedules for {} generated.".format(name)) all_scheds = self._generate_combinations(schedules_by_course) filter_func = lambda s: all([ not self._check_schedule_conflicts(s) ]) filtered_all_scheds = ifilter(filter_func, all_scheds) logging.info("Generating all valid schedules ...") schedules = [Schedule(sched) for sched in filtered_all_scheds] filter_func = lambda s: all(c(s) for c in self._constraints) schedules = filter(filter_func, schedules) logging.info("Found {} valid schedules.".format(len(schedules))) return schedules
MIT License
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/components/sensor/gitter.py
GitterSensor.update
python
def update(self): data = self._data.user.unread_items(self._room) if 'error' not in data.keys(): self._mention = len(data['mention']) self._state = len(data['chat']) else: _LOGGER.error("Not joined: %s", self._room)
Get the latest data and updates the state.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/components/sensor/gitter.py#L97-L104
import logging import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME, CONF_API_KEY, CONF_ROOM from homeassistant.helpers.entity import Entity REQUIREMENTS = ['gitterpy==0.1.6'] _LOGGER = logging.getLogger(__name__) ATTR_MENTION = 'mention' ATTR_ROOM = 'room' ATTR_USERNAME = 'username' DEFAULT_NAME = 'Gitter messages' DEFAULT_ROOM = 'home-assistant/home-assistant' ICON = 'mdi:message-settings-variant' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_ROOM, default=DEFAULT_ROOM): cv.string, }) def setup_platform(hass, config, add_devices, discovery_info=None): from gitterpy.client import GitterClient from gitterpy.errors import GitterTokenError name = config.get(CONF_NAME) api_key = config.get(CONF_API_KEY) room = config.get(CONF_ROOM) gitter = GitterClient(api_key) try: username = gitter.auth.get_my_id['name'] except GitterTokenError: _LOGGER.error("Token is not valid") return False add_devices([GitterSensor(gitter, room, name, username)], True) class GitterSensor(Entity): def __init__(self, data, room, name, username): self._name = name self._data = data self._room = room self._username = username self._state = None self._mention = 0 self._unit_of_measurement = 'Msg' @property def name(self): return self._name @property def state(self): return self._state @property def unit_of_measurement(self): return self._unit_of_measurement @property def device_state_attributes(self): return { ATTR_USERNAME: self._username, ATTR_ROOM: self._room, ATTR_MENTION: self._mention, } @property def icon(self): return ICON
MIT License
charanpald/apgl
apgl/graph/DictGraph.py
DictGraph.getNumEdges
python
def getNumEdges(self): numEdges = 0 for vertex1 in self.adjacencies.keys(): numEdges += len(self.adjacencies[vertex1]) if not self.undirected: return numEdges else: for vertex1 in self.adjacencies.keys(): if vertex1 in self.adjacencies[vertex1]: numEdges += 1 return numEdges/2
Returns the total number of edges in graph.
https://github.com/charanpald/apgl/blob/f8b71be2c49c62ace7ce5691216d2ff041ff978b/apgl/graph/DictGraph.py#L108-L123
import numpy import heapq import scipy.sparse from apgl.graph.AbstractSingleGraph import AbstractSingleGraph class DictGraph(AbstractSingleGraph): def __init__(self, undirected=True): self.undirected = undirected self.adjacencies = {} self.vertices = {} def addEdge(self, vertex1, vertex2, value=1.0): if value == None: raise ValueError("Cannot have None as edge value") self.__touchVertex(vertex1) self.__touchVertex(vertex2) self.adjacencies[vertex1][vertex2] = value if self.undirected: self.adjacencies[vertex2][vertex1] = value def addEdges(self, edgeList, edgeValues=None): i = 0 for edge in edgeList: (vertex1, vertex2) = edge if edgeValues == None: value = 1 else: value = edgeValues[i] self.__touchVertex(vertex1) self.__touchVertex(vertex2) self.adjacencies[vertex1][vertex2] = value if self.undirected: self.adjacencies[vertex2][vertex1] = value i += 1 def __touchVertex(self, vertexId): if vertexId not in self.vertices: self.vertices[vertexId] = None if vertexId not in self.adjacencies: self.adjacencies[vertexId] = {} def removeEdge(self, vertex1, vertex2): self.__removeDirectedEdge(vertex1, vertex2) if self.undirected: self.__removeDirectedEdge(vertex2, vertex1) def __removeDirectedEdge(self, vertex1, vertex2): if vertex1 not in self.adjacencies: raise ValueError("Vertex is not present in graph: " + str(vertex1)) if vertex2 not in self.adjacencies[vertex1]: raise ValueError("Vertex is not a neighbour of " + str(vertex1) + " in graph: " + str(vertex2)) del self.adjacencies[vertex1][vertex2] def isUndirected(self): return self.undirected
BSD 3-Clause New or Revised License
vmware/vsphere-automation-sdk-python
samples/vsphere/vcenter/vm/hardware/serial.py
cleanup_backends
python
def cleanup_backends(): datacenter_name = testbed.config['SERIAL_PORT_DATACENTER_NAME'] datastore_path = testbed.config['SERIAL_PORT_DATASTORE_PATH'] delete_file(client, service_instance, 'Serial Port', datacenter_name, datastore_path)
Cleanup after the serial port samples. The files backing the serial port file backing needs to be removed or else the next time the VM is powered on and connected to the serial port, the VM will post a question asking if the file should be Replaced or Appended. This is only an issue for backings that are write-only.
https://github.com/vmware/vsphere-automation-sdk-python/blob/73624d9e20083002af770cf8763683f3c4681a16/samples/vsphere/vcenter/vm/hardware/serial.py#L232-L248
__author__ = 'VMware, Inc.' __vcenter_version__ = '6.5+' import atexit from vmware.vapi.vsphere.client import create_vsphere_client from com.vmware.vcenter.vm.hardware_client import Serial from pyVim.connect import SmartConnect, Disconnect from samples.vsphere.common.sample_util import parse_cli_args_vm from samples.vsphere.common.sample_util import pp from samples.vsphere.common.ssl_helper import get_unverified_context from samples.vsphere.common.vim.file import delete_file from samples.vsphere.vcenter.helper.vm_helper import get_vm from samples.vsphere.vcenter.setup import testbed from samples.vsphere.common.ssl_helper import get_unverified_session vm = None vm_name = None client = None service_instance = None cleardata = False serials_to_delete = [] orig_serial_summaries = None def setup(context=None): global vm_name, client, service_instance, cleardata if context: client = context.client vm_name = testbed.config['VM_NAME_DEFAULT'] service_instance = context.service_instance else: server, username, password, cleardata, skip_verification, vm_name = parse_cli_args_vm(testbed.config['VM_NAME_DEFAULT']) session = get_unverified_session() if skip_verification else None client = create_vsphere_client(server=server, username=username, password=password, session=session) context = None if skip_verification: context = get_unverified_context() service_instance = SmartConnect(host=server, user=username, pwd=password, sslContext=context) atexit.register(Disconnect, service_instance) def run(): global vm vm = get_vm(client, vm_name) if not vm: raise Exception('Sample requires an existing vm with name ({}). ' 'Please create the vm first.'.format(vm_name)) print("Using VM '{}' ({}) for Serial Sample".format(vm_name, vm)) print('\n# Example: List all Serial ports for a VM') serial_summaries = client.vcenter.vm.hardware.Serial.list(vm=vm) print('vm.hardware.Serial.list({}) -> {}'.format(vm, serial_summaries)) global orig_serial_summaries orig_serial_summaries = serial_summaries for serial_summary in serial_summaries: serial = serial_summary.port serial_info = client.vcenter.vm.hardware.Serial.get(vm=vm, port=serial) print('vm.hardware.Serial.get({}, {}) -> {}'.format(vm, serial, pp(serial_info))) global serials_to_delete print('\n# Example: Create Serial port with defaults') serial_create_spec = Serial.CreateSpec() serial = client.vcenter.vm.hardware.Serial.create(vm, serial_create_spec) print('vm.hardware.Serial.create({}, {}) -> {}'. format(vm, serial_create_spec, serial)) serials_to_delete.append(serial) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) cleanup_backends() print('\n# Example: Create Serial port with FILE backing') serial_port_datastore_path = testbed.config['SERIAL_PORT_DATASTORE_PATH'] serial_create_spec = Serial.CreateSpec( start_connected=True, allow_guest_control=True, backing=Serial.BackingSpec(type=Serial.BackingType.FILE, file=serial_port_datastore_path)) serial = client.vcenter.vm.hardware.Serial.create(vm, serial_create_spec) print('vm.hardware.Serial.create({}, {}) -> {}'. format(vm, serial_create_spec, serial)) serials_to_delete.append(serial) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) print('\n# Example: Create Serial port to use NETWORK_SERVER') serial_port_network_server_location = testbed.config['SERIAL_PORT_NETWORK_SERVER_LOCATION'] serial_create_spec = Serial.CreateSpec( start_connected=True, allow_guest_control=True, backing=Serial.BackingSpec(type=Serial.BackingType.NETWORK_SERVER, network_location=serial_port_network_server_location)) serial = client.vcenter.vm.hardware.Serial.create(vm, serial_create_spec) print('vm.hardware.Serial.create({}, {}) -> {}'. format(vm, serial_create_spec, serial)) serials_to_delete.append(serial) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) print('\n# Example: Update Serial port to use NETWORK_CLIENT') serial_port_network_client_location = testbed.config['SERIAL_PORT_NETWORK_CLIENT_LOCATION'] serial_port_network_proxy = testbed.config['SERIAL_PORT_NETWORK_PROXY'] serial_update_spec = Serial.UpdateSpec( start_connected=False, allow_guest_control=False, backing=Serial.BackingSpec(type=Serial.BackingType.NETWORK_CLIENT, network_location=serial_port_network_client_location, proxy=serial_port_network_proxy)) client.vcenter.vm.hardware.Serial.update(vm, serial, serial_update_spec) print('vm.hardware.Serial.update({}, {}) -> {}'. format(vm, serial_update_spec, serial)) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) print('\n# Starting VM to run connect/disconnect sample') print('vm.Power.start({})'.format(vm)) client.vcenter.vm.Power.start(vm) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) print('\n# Example: Connect Serial port after powering on VM') client.vcenter.vm.hardware.Serial.connect(vm, serial) print('vm.hardware.Serial.connect({}, {})'.format(vm, serial)) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) print('\n# Example: Disconnect Serial port while VM is powered on') client.vcenter.vm.hardware.Serial.disconnect(vm, serial) print('vm.hardware.Serial.disconnect({}, {})'.format(vm, serial)) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) print('\n# Stopping VM after connect/disconnect sample') print('vm.Power.start({})'.format(vm)) client.vcenter.vm.Power.stop(vm) serial_info = client.vcenter.vm.hardware.Serial.get(vm, serial) print('vm.hardware.Serial.get({}, {}) -> {}'. format(vm, serial, pp(serial_info))) serial_summaries = client.vcenter.vm.hardware.Serial.list(vm=vm) print('vm.hardware.Serial.list({}) -> {}'.format(vm, serial_summaries)) cleanup_backends() def cleanup(): print('\n# Delete VM Serial ports that were added') for serial in serials_to_delete: client.vcenter.vm.hardware.Serial.delete(vm, serial) print('vm.hardware.Serial.delete({}, {})'.format(vm, serial)) serial_summaries = client.vcenter.vm.hardware.Serial.list(vm) print('vm.hardware.Serial.list({}) -> {}'.format(vm, serial_summaries)) if set(orig_serial_summaries) != set(serial_summaries): print('vm.hardware.Serial WARNING: ' 'Final Serial ports info does not match original')
MIT License
diofant/diofant
diofant/tensor/array/dense_ndim_array.py
DenseNDimArray.reshape
python
def reshape(self, *newshape): new_total_size = functools.reduce(lambda x, y: x*y, newshape) if new_total_size != self._loop_size: raise ValueError('Invalid reshape parameters ' + str(newshape)) return type(self)(self._array, newshape)
Returns MutableDenseNDimArray instance with new shape. Elements number must be suitable to new shape. The only argument of method sets new shape. Examples ======== >>> a = MutableDenseNDimArray([1, 2, 3, 4, 5, 6], (2, 3)) >>> a.shape (2, 3) >>> a [[1, 2, 3], [4, 5, 6]] >>> b = a.reshape(3, 2) >>> b.shape (3, 2) >>> b [[1, 2], [3, 4], [5, 6]]
https://github.com/diofant/diofant/blob/05c50552b0e0533f1dbf2ec05e65b6c45b7e2c11/diofant/tensor/array/dense_ndim_array.py#L89-L115
import functools import itertools from ...core import Basic, Tuple from ...core.sympify import sympify from ...matrices import Matrix from ...utilities import flatten from .mutable_ndim_array import MutableNDimArray from .ndim_array import ImmutableNDimArray, NDimArray class DenseNDimArray(NDimArray): def __getitem__(self, index): syindex = self._check_symbolic_index(index) if syindex is not None: return syindex if isinstance(index, tuple) and any(isinstance(i, slice) for i in index): def slice_expand(s, dim): if not isinstance(s, slice): return s, start, stop, step = s.indices(dim) return [start + i*step for i in range((stop-start)//step)] sl_factors = [slice_expand(i, dim) for (i, dim) in zip(index, self.shape)] eindices = itertools.product(*sl_factors) array = [self._array[self._parse_index(i)] for i in eindices] nshape = [len(el) for i, el in enumerate(sl_factors) if isinstance(index[i], slice)] return type(self)(array, nshape) else: if isinstance(index, slice): return self._array[index] else: index = self._parse_index(index) return self._array[index] @classmethod def zeros(cls, *shape): list_length = functools.reduce(lambda x, y: x*y, shape) return cls._new(([0]*list_length,), shape) def tomatrix(self): if self.rank() != 2: raise ValueError('Dimensions must be of size of 2') return Matrix(self.shape[0], self.shape[1], self._array) def __iter__(self): return self._array.__iter__()
BSD 3-Clause New or Revised License
quantumlib/openfermion-cirq
openfermioncirq/gates/fermionic_simulation.py
InteractionOperatorFermionicGate.interaction_operator_generator
python
def interaction_operator_generator( self, *, operator: Optional[openfermion.InteractionOperator] = None, modes: Optional[Sequence[int]] = None ) -> openfermion.InteractionOperator: if modes is None: modes = tuple(range(self.num_qubits())) if operator is None: n_modes = max(modes) + 1 operator = openfermion.InteractionOperator.zero(n_modes) else: n_modes = operator.n_qubits fermion_operator = self.fermion_generator return openfermion.get_interaction_operator(fermion_operator, n_qubits=n_modes)
Constructs the Hamiltonian corresponding to the gate's generator.
https://github.com/quantumlib/openfermion-cirq/blob/655b00fee21c94cc96c343c63f7c52ea1aa329dc/openfermioncirq/gates/fermionic_simulation.py#L372-L388
import abc import itertools from typing import (cast, Dict, Optional, Sequence, Tuple, TYPE_CHECKING, Union) import cirq import numpy as np import openfermion import scipy.linalg as la import sympy if TYPE_CHECKING: import openfermioncirq as ofc def _arg(x): if x == 0: return 0 if cirq.is_parameterized(x): return sympy.arg(x) return np.angle(x) def _canonicalize_weight(w): if w == 0: return (0, 0) if cirq.is_parameterized(w): return (cirq.PeriodicValue(abs(w), 2 * sympy.pi), sympy.arg(w)) period = 2 * np.pi return (np.round((w.real % period) if (w == np.real(w)) else (abs(w) % period) * w / abs(w), 8), 0) def state_swap_eigen_component(x: str, y: str, sign: int = 1, angle: float = 0): if not (isinstance(x, str) and isinstance(y, str)): raise TypeError('not (isinstance(x, str) and isinstance(y, str))') if len(x) != len(y): raise ValueError('len(x) != len(y)') if set(x).union(y).difference('01'): raise ValueError('Arguments must be 0-1 strings.') if x == y: raise ValueError('x == y') if sign not in (-1, 1): raise ValueError('sign not in (-1, 1)') dim = 2**len(x) i, j = int(x, 2), int(y, 2) component = np.zeros((dim, dim), dtype=np.complex128) component[i, i] = component[j, j] = 0.5 component[j, i] = sign * 0.5 * 1j**(angle * 2 / np.pi) component[i, j] = sign * 0.5 * 1j**(-angle * 2 / np.pi) return component def fermionic_simulation_gates_from_interaction_operator( operator: openfermion.InteractionOperator): n_qubits = operator.n_qubits gates: Dict[Tuple[int, ...], cirq.Gate] = {} if operator.constant: gates[()] = operator.constant for p in range(n_qubits): coeff = operator.one_body_tensor[p, p] if coeff: gates[(p,)] = cirq.Z**(coeff / np.pi) for modes in itertools.combinations(range(n_qubits), 2): gate: Optional[InteractionOperatorFermionicGate] = ( QuadraticFermionicSimulationGate.from_interaction_operator( operator=operator, modes=modes)) if gate: gates[modes] = gate for modes in itertools.combinations(range(n_qubits), 3): gate = CubicFermionicSimulationGate.from_interaction_operator( operator=operator, modes=modes) if gate: gates[modes] = gate for modes in itertools.combinations(range(n_qubits), 4): gate = QuarticFermionicSimulationGate.from_interaction_operator( operator=operator, modes=modes) if gate: gates[modes] = gate return gates def sum_of_interaction_operator_gate_generators( n_modes: int, gates: Dict[Tuple[int, ...], Union[float, cirq.Gate]], ) -> openfermion.InteractionOperator: operator = openfermion.InteractionOperator.zero(n_modes) for indices, gate in gates.items(): if not indices: operator.constant += gate elif isinstance(gate, cirq.ZPowGate): coeff = gate._exponent * np.pi operator.constant += gate._exponent * gate._global_shift * np.pi operator.one_body_tensor[indices * 2] += coeff elif isinstance(gate, InteractionOperatorFermionicGate): gate.interaction_operator_generator(operator=operator, modes=indices) else: raise TypeError(f'Gate type {gate} not supported.') return operator @cirq.value_equality(approximate=True) class ParityPreservingFermionicGate(cirq.Gate, metaclass=abc.ABCMeta): def __init__( self, weights: Optional[Tuple[complex, ...]] = None, absorb_exponent: bool = False, exponent: cirq.TParamVal = 1.0, global_shift: float = 0.0, ) -> None: if weights is None: weights = (1.,) * self.num_weights() self.weights = weights self._exponent = exponent self._global_shift = global_shift self._canonical_exponent_cached = None if absorb_exponent: self.absorb_exponent_into_weights() @staticmethod @abc.abstractmethod def fermion_generator_components() -> Tuple[openfermion.FermionOperator]: @abc.abstractmethod def fswap(self, i: int): @classmethod def num_weights(cls) -> int: return len(cls.fermion_generator_components()) @property def qubit_generator_matrix(self) -> np.ndarray: return openfermion.jordan_wigner_sparse(self.fermion_generator, self.num_qubits()).toarray() @property def fermion_generator(self) -> openfermion.FermionOperator: half_generator = sum(( w * G for w, G in zip(self.weights, self.fermion_generator_components())), openfermion.FermionOperator()) return half_generator + openfermion.hermitian_conjugated(half_generator) def _diagram_exponent(self, args: cirq.CircuitDiagramInfoArgs, *, ignore_global_phase: bool = True): if not isinstance(self._exponent, (int, float)): return self._exponent result = float(self._exponent) if args.precision is not None: result = np.around(result, args.precision) return result @classmethod def wire_symbol(cls, use_unicode: bool): return cls.__name__ def _resolve_parameters_(self, resolver): resolved_weights = cirq.resolve_parameters(self.weights, resolver) resolved_exponent = cirq.resolve_parameters(self._exponent, resolver) resolved_global_shift = cirq.resolve_parameters(self._global_shift, resolver) return type(self)(resolved_weights, exponent=resolved_exponent, global_shift=resolved_global_shift) def _value_equality_values_(self): return tuple( _canonicalize_weight(w * self.exponent) for w in list(self.weights) + [self._global_shift]) def _is_parameterized_(self) -> bool: return any( cirq.is_parameterized(v) for V in self._value_equality_values_() for v in V) def absorb_exponent_into_weights(self): period = (2 * sympy.pi) if self._is_parameterized_() else 2 * (np.pi) new_weights = [] for weight in self.weights: if not weight: new_weights.append(weight) continue old_abs = abs(weight) new_abs = (old_abs * self._exponent) % period new_weights.append(weight * new_abs / old_abs) self.weights = tuple(new_weights) self._global_shift *= self._exponent self._exponent = 1 def permute(self, init_pos: Sequence[int]): I = range(self.num_qubits()) if sorted(init_pos) != list(I): raise ValueError(f'{init_pos} is not a permutation of {I}.') curr_pos = list(init_pos) for i in I: for j in I[i % 2:-1:2]: if curr_pos[j] > curr_pos[j + 1]: self.fswap(j) curr_pos[j:j + 2] = reversed(curr_pos[j:j + 2]) assert curr_pos == list(I) def permuted(self, init_pos: Sequence[int]): gate = self.__copy__() gate.permute(init_pos) return gate def __copy__(self): return type(self)(self.weights, exponent=self.exponent, global_shift=self._global_shift) def _circuit_diagram_info_(self, args: cirq.CircuitDiagramInfoArgs ) -> cirq.CircuitDiagramInfo: wire_symbols = [self.wire_symbol(args.use_unicode_characters) ] * self.num_qubits() wire_symbols[0] += f'{tuple(self.weights)}' exponent = self._diagram_exponent(args) return cirq.CircuitDiagramInfo(wire_symbols=wire_symbols, exponent=exponent) class InteractionOperatorFermionicGate(ParityPreservingFermionicGate): @classmethod @abc.abstractmethod def from_interaction_operator( cls, *, operator: openfermion.InteractionOperator, modes: Optional[Sequence[int]] = None, ) -> Optional['ParityPreservingFermionicGate']:
Apache License 2.0
openforcefield/openff-interchange
openff/interchange/interop/internal/gromacs.py
to_top
python
def to_top(openff_sys: "Interchange", file_path: Union[Path, str]): if isinstance(file_path, str): path = Path(file_path) if isinstance(file_path, Path): path = file_path with open(path, "w") as top_file: top_file.write("; Generated by OpenFF Interchange\n") _write_top_defaults(openff_sys, top_file) typemap = _build_typemap(openff_sys) virtual_site_map = _build_virtual_site_map(openff_sys) _write_atomtypes(openff_sys, top_file, typemap, virtual_site_map) _write_moleculetype(top_file) _write_atoms(top_file, openff_sys, typemap, virtual_site_map) _write_valence(top_file, openff_sys) _write_virtual_sites( top_file, openff_sys, virtual_site_map, ) _write_system(top_file, openff_sys)
Write a GROMACS topology (.top) file. See https://manual.gromacs.org/documentation/current/reference-manual/file-formats.html#top for more details. This code is partially copied from InterMol, see https://github.com/shirtsgroup/InterMol/tree/v0.1/intermol/gromacs
https://github.com/openforcefield/openff-interchange/blob/a080e348b62c36c3c6a6b04e8afde64556f3186e/openff/interchange/interop/internal/gromacs.py#L209-L243
import math from pathlib import Path from typing import IO, TYPE_CHECKING, Callable, Dict, Set, Tuple, Union import mdtraj as md import numpy as np from openff.units import unit from openff.interchange.components.base import ( BaseAngleHandler, BaseBondHandler, BaseElectrostaticsHandler, BaseImproperTorsionHandler, BaseProperTorsionHandler, BasevdWHandler, ) from openff.interchange.components.mdtraj import ( _iterate_angles, _iterate_impropers, _iterate_pairs, _iterate_propers, _OFFBioTop, _store_bond_partners, ) from openff.interchange.components.potentials import Potential from openff.interchange.exceptions import UnsupportedExportError from openff.interchange.models import PotentialKey, TopologyKey, VirtualSiteKey if TYPE_CHECKING: from openff.interchange.components.interchange import Interchange def to_gro(openff_sys: "Interchange", file_path: Union[Path, str], decimal=8): if isinstance(file_path, str): path = Path(file_path) if isinstance(file_path, Path): path = file_path rounded_positions = np.round(openff_sys.positions, decimal) rounded_positions = rounded_positions.to(unit.nanometer).magnitude n = decimal n_particles = openff_sys.positions.shape[0] typemap = _build_typemap(openff_sys) virtual_site_map = _build_virtual_site_map(openff_sys) n_particles += len(virtual_site_map) with open(path, "w") as gro: gro.write("Generated by OpenFF\n") gro.write(f"{n_particles}\n") for atom in openff_sys.topology.mdtop.atoms: res = atom.residue residue_idx = (res.index + 1) % 100000 residue_name = res.name[:5] atom_name = typemap[atom.index] atom_index = (atom.index + 1) % 100000 gro.write( f"%5d%-5s%5s%5d%{n+5}.{n}f%{n+5}.{n}f%{n+5}.{n}f\n" % ( residue_idx, residue_name, atom_name, atom_index, rounded_positions[atom.index, 0], rounded_positions[atom.index, 1], rounded_positions[atom.index, 2], ) ) for virtual_site_key in virtual_site_map: atom_name = "VS" residue_idx = 1 residue_name = "" atom_index = virtual_site_map[virtual_site_key] gro.write( f"%5d%-5s%5s%5d%{n+5}.{n}f%{n+5}.{n}f%{n+5}.{n}f\n" % ( residue_idx, residue_name, atom_name, atom_index, 0.0, 0.0, 0.0, ) ) if openff_sys.box is None: box = 11 * np.eye(3) else: box = openff_sys.box.to(unit.nanometer).magnitude if (box == np.diag(np.diagonal(box))).all(): for i in range(3): gro.write(f"{box[i, i]:11.7f}") else: for i in range(3): gro.write(f"{box[i, i]:11.7f}") for i in range(3): for j in range(3): if i != j: gro.write(f"{box[i, j]:11.7f}") gro.write("\n") def _read_coordinates(file_path: Union[Path, str]) -> np.ndarray: def _infer_coord_precision(file_path: Union[Path, str]) -> int: with open(file_path) as file_in: file_in.readline() file_in.readline() atom_line = file_in.readline() period_indices = [i for i, x in enumerate(atom_line) if x == "."] spacing_between_periods = period_indices[-1] - period_indices[-2] precision = spacing_between_periods - 5 return precision precision = _infer_coord_precision(file_path) coordinate_width = precision + 5 coordinate_columns = [ 20, 20 + coordinate_width, 20 + 2 * coordinate_width, 20 + 3 * coordinate_width, ] with open(file_path) as gro_file: gro_file.readline() n_atoms = int(gro_file.readline()) unitless_coordinates = np.zeros((n_atoms, 3)) for coordinate_index in range(n_atoms): line = gro_file.readline() _ = int(line[:5]) _ = line[5:10] _ = line[10:15] _ = int(line[15:20]) x = float(line[coordinate_columns[0] : coordinate_columns[1]]) y = float(line[coordinate_columns[1] : coordinate_columns[2]]) z = float(line[coordinate_columns[2] : coordinate_columns[3]]) unitless_coordinates[coordinate_index] = np.array([x, y, z]) coordinates = unitless_coordinates * unit.nanometer return coordinates def _read_box(file_path: Union[Path, str]) -> np.ndarray: with open(file_path) as gro_file: gro_file.readline() n_atoms = int(gro_file.readline()) box_line = gro_file.readlines()[n_atoms] parsed_box = [float(val) for val in box_line.split()] if len(parsed_box) == 3: box = parsed_box * np.eye(3) * unit.nanometer return box def from_gro(file_path: Union[Path, str]) -> "Interchange": if isinstance(file_path, str): path = Path(file_path) if isinstance(file_path, Path): path = file_path coordinates = _read_coordinates(path) box = _read_box(path) from openff.interchange.components.interchange import Interchange interchange = Interchange() interchange.box = box interchange.positions = coordinates return interchange
MIT License
asyml/texar-pytorch
texar/torch/data/vocabulary.py
Vocab.unk_token
python
def unk_token(self) -> str: return self._unk_token
r"""A string of the special token indicating unknown token.
https://github.com/asyml/texar-pytorch/blob/5d67ae957763a3683ce8dd5e4d1208cc9fdb4d33/texar/torch/data/vocabulary.py#L225-L228
import warnings from collections import defaultdict from typing import DefaultDict, Dict, List, Optional, Sequence, Tuple, Union import numpy as np from texar.torch.utils.utils import ( _recur_split, dict_lookup, str_join, strip_special_tokens) __all__ = [ "SpecialTokens", "Vocab", "map_ids_to_strs", ] class SpecialTokens: PAD = "<PAD>" BOS = "<BOS>" EOS = "<EOS>" UNK = "<UNK>" def _make_defaultdict(keys: Sequence[Union[int, str]], values: Sequence[Union[int, str]], default_value: Union[int, str]) -> DefaultDict[Union[int, str], Union[int, str]]: dict_: DefaultDict[Union[int, str], Union[int, str]] dict_ = defaultdict(lambda: default_value) for k, v in zip(keys, values): dict_[k] = v return dict_ class Vocab: def __init__(self, filename: str, pad_token: str = SpecialTokens.PAD, bos_token: str = SpecialTokens.BOS, eos_token: str = SpecialTokens.EOS, unk_token: str = SpecialTokens.UNK): self._filename = filename self._pad_token = pad_token self._bos_token = bos_token self._eos_token = eos_token self._unk_token = unk_token self._id_to_token_map_py, self._token_to_id_map_py = self.load(self._filename) def load(self, filename: str) -> Tuple[Dict[int, str], Dict[str, int]]: with open(filename, "r") as vocab_file: vocab = list(line.strip() for line in vocab_file) warnings.simplefilter("ignore", UnicodeWarning) if self._bos_token in vocab: raise ValueError("Special begin-of-seq token already exists in the " "vocabulary: '%s'" % self._bos_token) if self._eos_token in vocab: raise ValueError("Special end-of-seq token already exists in the " "vocabulary: '%s'" % self._eos_token) if self._unk_token in vocab: raise ValueError("Special UNK token already exists in the " "vocabulary: '%s'" % self._unk_token) if self._pad_token in vocab: raise ValueError("Special padding token already exists in the " "vocabulary: '%s'" % self._pad_token) warnings.simplefilter("default", UnicodeWarning) vocab = [self._pad_token, self._bos_token, self._eos_token, self._unk_token] + vocab vocab_size = len(vocab) id_to_token_map_py = dict(zip(range(vocab_size), vocab)) token_to_id_map_py = dict(zip(vocab, range(vocab_size))) return id_to_token_map_py, token_to_id_map_py def map_ids_to_tokens_py(self, ids: Union[List[int], np.ndarray]) -> np.ndarray: return dict_lookup(self.id_to_token_map_py, ids, self.unk_token) def map_tokens_to_ids_py(self, tokens: List[str]) -> np.ndarray: return dict_lookup(self.token_to_id_map_py, tokens, self.unk_token_id) @property def id_to_token_map_py(self) -> Dict[int, str]: return self._id_to_token_map_py @property def token_to_id_map_py(self) -> Dict[str, int]: return self._token_to_id_map_py @property def size(self) -> int: return len(self.token_to_id_map_py) @property def bos_token(self) -> str: return self._bos_token @property def bos_token_id(self) -> int: return self.token_to_id_map_py[self._bos_token] @property def eos_token(self) -> str: return self._eos_token @property def eos_token_id(self) -> int: return self.token_to_id_map_py[self._eos_token] @property
Apache License 2.0
protothis/python-synology
src/synology_dsm/helpers.py
SynoFormatHelper.bytes_to_readable
python
def bytes_to_readable(num): if num < 512: return "0 Kb" elif num < 1024: return "1 Kb" for unit in ["", "Kb", "Mb", "Gb", "Tb", "Pb", "Eb", "Zb"]: if abs(num) < 1024.0: return "%3.1f%s" % (num, unit) num /= 1024.0 return "%.1f%s" % (num, "Yb")
Converts bytes to a human readable format.
https://github.com/protothis/python-synology/blob/645b818be2013231ac126c6962d2f9092a5c3aae/src/synology_dsm/helpers.py#L8-L19
class SynoFormatHelper: @staticmethod
MIT License
monaen/lightfieldreconstruction
evaluation_ViewSynthesis.py
ApertureWisePSNR
python
def ApertureWisePSNR(Groundtruth, Reconstruction): h, w, s, t = Groundtruth.shape[:4] PSNRs = np.zeros([s, t]) for i in range(s): for j in range(t): gtimg = Groundtruth[:, :, i, j, ...] gtimg = np.squeeze(gtimg) recons = Reconstruction[:, :, i, j, ...] recons = np.squeeze(recons) PSNRs[i, j] = psnr(gtimg, recons) return PSNRs
Calculate the PSNR value for each sub-aperture image of the input reconstructed light field. :param Groundtruth: input groundtruth light field :param Reconstruction: input reconstruced light field :return: aperture-wise PSNR values
https://github.com/monaen/lightfieldreconstruction/blob/5bc51c93d320a1a74e2050257ed16d95cb6edb5e/evaluation_ViewSynthesis.py#L113-L132
import tensorflow as tf import numpy as np import argparse import sys import cv2 import scipy.io as sio from tqdm import tqdm from utils.utils import psnr, ssim_exact, downsampling, LF_split_patches, shaveLF, shave_batch_LFs, shaveLF_by_factor, shaved_LF_reconstruct from tool.log_config import * log_config() tf.logging.set_verbosity(tf.logging.ERROR) parser = argparse.ArgumentParser(description="HDDRNet Tensorflow Implementation") parser.add_argument("--datapath", type=str, default="./data/testset/occlusions20/occlusions_48.mat", help="The evaluation data path") parser.add_argument("--batchSize", type=int, default=1, help="The batchsize of the input data") parser.add_argument("--imageSize", type=int, default=96, help="Spatial size of the input light fields") parser.add_argument("--viewSize", type=int, default=5, help="Angular size of the input light fields") parser.add_argument("--channels", type=int, default=1, help="Channels=1 means only the luma channel; Channels=3 means RGB channels (not supported)") parser.add_argument("--verbose", default=True, action="store_true", help="Whether print the network structure or not") parser.add_argument("--gamma_S", type=int, default=1, choices=[1, 2, 3, 4], help="Spatial downscaling factor") parser.add_argument("--gamma_A", type=int, default=4, choices=[0, 1, 2, 3, 4], help="Angular downscaling factor, '0' represents 3x3->7x7") parser.add_argument("--num_GRL_HRB", type=int, default=5, help="The number of HRB in GRLNet (only for AAAI model)") parser.add_argument("--num_SRe_HRB", type=int, default=3, help="The number of HRB in SReNet (only for AAAI model)") parser.add_argument("--pretrained_model", type=str, default="pretrained_models/M-HDDRNet/Ax4/M-HDDRNet", help="Path to store the pretrained model.") parser.add_argument("--select_gpu", type=str, default="3", help="Select the gpu for training or evaluation") args = parser.parse_args() def import_model(scale_S, scale_A): if scale_A == 1: if scale_S == 4: from networks.HDDRNet_Sx4 import HDDRNet if scale_S == 3: from networks.HDDRNet_Sx3 import HDDRNet if scale_S == 2: from networks.HDDRNet_Sx2 import HDDRNet elif scale_S == 1: if scale_A == 0: from networks.HDDRNet_A3x3_7x7 import HDDRNet if scale_A == 2: from networks.HDDRNet_Ax2 import HDDRNet if scale_A == 3: from networks.HDDRNet_Ax3 import HDDRNet if scale_A == 4: from networks.HDDRNet_Ax4 import HDDRNet else: if scale_A == 2 and scale_S == 2: from networks.HDDRNet_Sx2Ax2 import HDDRNet return HDDRNet def get_state(spatial_scale, angular_scale): statetype = "" if spatial_scale != 1: statetype += "Sx{:d}".format(spatial_scale) if angular_scale != 1: statetype += "Ax{:d}".format(angular_scale) return statetype
MIT License
mplewis/csvtomd
csvtomd/csvtomd.py
pad_to
python
def pad_to(unpadded, target_len): under = target_len - len(unpadded) if under <= 0: return unpadded return unpadded + (' ' * under)
Pad a string to the target length in characters, or return the original string if it's longer than the target length.
https://github.com/mplewis/csvtomd/blob/1a23a5b37a973a1dc69ad4c69e81edea5d096ac9/csvtomd/csvtomd.py#L31-L39
import argparse import csv import sys DEFAULT_PADDING = 2 def check_negative(value): try: ivalue = int(value) except ValueError: raise argparse.ArgumentTypeError( '"%s" must be an integer' % value) if ivalue < 0: raise argparse.ArgumentTypeError( '"%s" must not be a negative value' % value) return ivalue
MIT License
hobson/aima
aima/search.py
Problem.actions
python
def actions(self, state): abstract
Return the actions that can be executed in the given state. The result would typically be a list, but if there are many actions, consider yielding them one at a time in an iterator, rather than building them all at once.
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/search.py#L30-L35
from utils import PriorityQueue, FIFOQueue, Stack, Dict from utils import name, print_table, ignore from utils import if_, unimplemented, memoize, infinity, update from utils import probability, argmax, argmax_random_tie, random_tests from utils import distance, weighted_sample_with_replacement import math, random, sys, bisect import string import time class Problem(object): def __init__(self, initial, goal=None): self.initial = initial; self.goal = goal
MIT License
algrx/algorithmx-python
algorithmx/api/EdgeSelection.py
EdgeSelection.length
python
def length(self: S, length: ElementArg[NumAttr]) -> S: return self.attrs(length=length)
Sets the length of the edge. This will only take effect when :meth:`~api.Canvas.edgelayout` is set to "individual". :param length: The length of the edge. :type length: :data:`~api.types.ElementArg`\\[:data:`~api.types.NumAttr`]
https://github.com/algrx/algorithmx-python/blob/1028a69d54c0d4776c4ed47d9f5609c863a9102d/algorithmx/api/EdgeSelection.py#L94-L101
from typing import Union, Mapping, Tuple, List, Iterable, Optional, TypeVar, Any from dataclasses import dataclass, replace from .ElementSelection import ElementSelection from .LabelSelection import LabelSelection from .types import ElementArg, NumAttr, AnyId from .utils import ElementContext, apply_attrs, eval_element_value, eval_element_dict EdgeId = Union[ Tuple[AnyId, AnyId], Tuple[AnyId, AnyId, AnyId], ] @dataclass class EdgeContext(ElementContext): edges: Optional[List[EdgeId]] = None S = TypeVar("S", bound="EdgeSelection") class EdgeSelection(ElementSelection): def __init__(self: S, context: EdgeContext): self._selection: EdgeContext = context def add( self: S, attrs: ElementArg[Mapping[str, ElementArg[Any]]] = {}, **kwargs: ElementArg[Any], ) -> S: def attr_fn(data, data_index: int, element_index: int): attr_obj = { **( eval_element_dict(attrs, data, data_index) if attrs is not None else {} ), **eval_element_dict(kwargs, data, data_index), } return ( { "source": str(self._selection.edges[element_index][0]), "target": str(self._selection.edges[element_index][1]), **attr_obj, } if self._selection.edges is not None else attr_obj ) apply_attrs(self._selection, attr_fn) return self.duration(0) def label(self, id: AnyId = 0) -> LabelSelection: return self.labels([id]) def labels(self, ids: Iterable[AnyId]) -> LabelSelection: return LabelSelection( replace( self._selection, ids=[str(l) for l in ids], data=None, parentkey="labels", parent=self._selection, ) ) def directed(self: S, directed: ElementArg[bool]) -> S: return self.attrs(directed=directed)
MIT License
allegro/django-powerdns-dnssec
powerdns/models/requests.py
DomainRequest.as_history_dump
python
def as_history_dump(self): return {}
We don't care about domain history for now
https://github.com/allegro/django-powerdns-dnssec/blob/333bdc668b6cda1a2ff240efd814a1896a1a8e07/powerdns/models/requests.py#L346-L348
import logging from django.db import models, transaction from django.conf import settings from django_extensions.db.fields.json import JSONField from dj.choices import Choices from dj.choices.fields import ChoiceField from django.contrib.contenttypes.fields import ContentType, GenericForeignKey from django.utils.translation import ugettext_lazy as _ from threadlocals.threadlocals import get_current_user from .powerdns import ( Domain, Owned, Record, validate_domain_name, ) from .ownership import Service from ..utils import AutoPtrOptions, RecordLike, TimeTrackable, flat_dict_diff log = logging.getLogger(__name__) def can_auto_accept_record_request(user_request, user, action): def _validate_domain(domain): if not domain: raise Exception( "Can't check auto acceptance without domain set" ) can_auto_accept = False domain = ( user_request.domain if action != 'delete' else user_request.target.domain ) _validate_domain(domain) if action == 'create': can_auto_accept = ( user_request.domain.can_auto_accept(user) and not user_request.is_sec_acceptance_required() ) elif action == 'update': can_auto_accept = ( user_request.domain.can_auto_accept(user) and user_request.record.can_auto_accept(user) and not user_request.is_sec_acceptance_required() ) elif action == 'delete': can_auto_accept = ( user_request.target.domain.can_auto_accept(user) and user_request.target.can_auto_accept(user) and not user_request.is_seo_acceptance_required() ) return can_auto_accept class RequestStates(Choices): _ = Choices.Choice OPEN = _('Open') ACCEPTED = _('Accepted') REJECTED = _('Rejected') class Request(Owned, TimeTrackable): class Meta: abstract = True state = ChoiceField( choices=RequestStates, default=RequestStates.OPEN, ) key = models.CharField( max_length=255, null=True, blank=True ) last_change_json = JSONField(null=True, blank=True) def save(self, *args, **kwargs): if self.owner is None: self.owner = get_current_user() super().save(*args, **kwargs) def _log_processed_request_message(self): log.warning('{} (id:{}) already {}'.format( self._meta.object_name, self.id, RequestStates.DescFromID(self.state).lower(), )) class DeleteRequest(Request): content_type = models.ForeignKey(ContentType) target_id = models.PositiveIntegerField() target = GenericForeignKey('content_type', 'target_id') @transaction.atomic def accept(self): if self.state != RequestStates.OPEN: self._log_processed_request_message() return old_dict = self.target.as_history_dump() new_dict = self.target.as_empty_history() result = flat_dict_diff(old_dict, new_dict) result['_request_type'] = 'delete' self.last_change_json = result self.target.delete() self.state = RequestStates.ACCEPTED self.save() @transaction.atomic def reject(self): if self.state != RequestStates.OPEN: self._log_processed_request_message() return self.state = RequestStates.REJECTED self.save() def __str__(self): return 'Delete {}'.format(self.target) def is_seo_acceptance_required(self): if self.owner and self.owner.is_superuser: return False return ( isinstance(self.target, Record) and self.target.type in settings.SEO_ACCEPTANCE_FOR_RECORD_TYPE and ( not self.target.domain.template or self.target.domain.template.is_public_domain ) and self.target.domain.require_seo_acceptance ) class ChangeCreateRequest(Request): ignore_fields = {'created', 'modified'} prefix = 'target_' class Meta: abstract = True def _get_json_history(self, object_): if object_.id: old_dict = object_.as_history_dump() else: old_dict = object_.as_empty_history() new_dict = self.as_history_dump() result = flat_dict_diff(old_dict, new_dict) result['_request_type'] = 'update' if object_.id else 'create' return result def _set_json_history(self, object_): self.last_change_json = self._get_json_history(object_) @transaction.atomic def accept(self): object_ = self.get_object() if self.state != RequestStates.OPEN: self._log_processed_request_message() return object_ self._set_json_history(object_) for field_name in type(self).copy_fields: if field_name in self.ignore_fields: continue if field_name == 'target_owner' and not getattr(self, field_name): continue setattr( object_, field_name[len(self.prefix):], getattr(self, field_name) ) object_.save() self.assign_object(object_) self.state = RequestStates.ACCEPTED self.save() return object_ @transaction.atomic def reject(self): if self.state != RequestStates.OPEN: self._log_processed_request_message() return object_ = self.get_object() self._set_json_history(object_) self.state = RequestStates.REJECTED self.save() def copy_records_data(self, fields_to_copy): all_fields = self._meta.get_all_field_names() for field_name, value in fields_to_copy: if field_name in all_fields: setattr(self, field_name, value) elif 'target_' + field_name in all_fields: setattr(self, 'target_' + field_name, value) else: log.warning("Unknown field {}:{}".format(field_name, value)) class DomainRequest(ChangeCreateRequest): copy_fields = [ 'target_name', 'target_master', 'target_type', 'target_account', 'target_remarks', 'target_template', 'target_reverse_template', 'target_auto_ptr', 'target_owner', 'target_service', ] domain = models.ForeignKey( Domain, related_name='requests', null=True, blank=True, help_text=_( 'The domain for which a change is requested' ), ) parent_domain = models.ForeignKey( Domain, related_name='child_requests', null=True, blank=True, help_text=_( 'The parent domain for which a new subdomain is to be created' ), ) target_service = models.ForeignKey(Service, blank=True, null=True) target_name = models.CharField( _("name"), max_length=255, validators=[validate_domain_name], blank=False, null=False, ) target_master = models.CharField( _("master"), max_length=128, blank=True, null=True, ) target_type = models.CharField( _("type"), max_length=6, blank=True, null=True, choices=Domain.DOMAIN_TYPE, ) target_account = models.CharField( _("account"), max_length=40, blank=True, null=True, ) target_remarks = models.TextField(_('Additional remarks'), blank=True) target_template = models.ForeignKey( 'powerdns.DomainTemplate', verbose_name=_('Template'), blank=True, null=True, related_name='template_for_requests' ) target_reverse_template = models.ForeignKey( 'powerdns.DomainTemplate', verbose_name=_('Reverse template'), blank=True, null=True, related_name='reverse_template_for_requests', help_text=_( 'A template that should be used for reverse domains when ' 'PTR templates are automatically created for A records in this ' 'template.' ) ) target_auto_ptr = ChoiceField( choices=AutoPtrOptions, default=AutoPtrOptions.ALWAYS, help_text=_( 'Should A records have auto PTR by default' ) ) target_unrestricted = models.BooleanField( _('Unrestricted'), null=False, default=False, help_text=_( "Can users that are not owners of this domain add records" "to it without owner's permission?" ) ) target_owner = models.ForeignKey( settings.AUTH_USER_MODEL, verbose_name=_('Owner'), null=True, blank=False, related_name='+', ) def __str__(self): return self.target_name def get_object(self): if self.domain is not None: return self.domain else: return Domain() def assign_object(self, obj): self.domain = obj
BSD 2-Clause Simplified License
googlecloudplatform/gsutil
gslib/metrics.py
MetricsCollector._CollectPerformanceSummaryMetric
python
def _CollectPerformanceSummaryMetric(self): if self.perf_sum_params is None: return custom_params = {} for attr_name, label in ( ('num_processes', 'Num Processes'), ('num_threads', 'Num Threads'), ('num_retryable_service_errors', 'Num Retryable Service Errors'), ('num_retryable_network_errors', 'Num Retryable Network Errors'), ('avg_throughput', 'Average Overall Throughput'), ('num_objects_transferred', 'Number of Files/Objects Transferred'), ('total_bytes_transferred', 'Size of Files/Objects Transferred'), ): custom_params[_GA_LABEL_MAP[label]] = getattr(self.perf_sum_params, attr_name) if system_util.IS_LINUX: disk_start = self.perf_sum_params.disk_counters_start disk_end = system_util.GetDiskCounters() custom_params[_GA_LABEL_MAP['Disk I/O Time']] = ( sum([stat[4] + stat[5] for stat in disk_end.values()]) - sum([stat[4] + stat[5] for stat in disk_start.values()])) if self.perf_sum_params.has_cloud_src: src_url_type = 'both' if self.perf_sum_params.has_file_src else 'cloud' else: src_url_type = 'file' custom_params[_GA_LABEL_MAP['Source URL Type']] = src_url_type if self.perf_sum_params.uses_fan: strategy = 'both' if self.perf_sum_params.uses_slice else 'fan' else: strategy = 'slice' if self.perf_sum_params.uses_slice else 'none' custom_params[_GA_LABEL_MAP['Parallelism Strategy']] = strategy total_time = (self.perf_sum_params.thread_idle_time + self.perf_sum_params.thread_execution_time) if total_time: custom_params[_GA_LABEL_MAP['Thread Idle Time Percent']] = ( float(self.perf_sum_params.thread_idle_time) / float(total_time)) if self.perf_sum_params.thread_throughputs: throughputs = [ thread.GetThroughput() for thread in self.perf_sum_params.thread_throughputs.values() ] custom_params[_GA_LABEL_MAP['Slowest Thread Throughput']] = min( throughputs) custom_params[_GA_LABEL_MAP['Fastest Thread Throughput']] = max( throughputs) custom_params[_GA_LABEL_MAP['Provider Types']] = ','.join( sorted(self.perf_sum_params.provider_types)) transfer_types = { 'CloudToCloud': self.perf_sum_params.has_cloud_src and self.perf_sum_params.has_cloud_dst, 'CloudToFile': self.perf_sum_params.has_cloud_src and self.perf_sum_params.has_file_dst, 'DaisyChain': self.perf_sum_params.is_daisy_chain, 'FileToCloud': self.perf_sum_params.has_file_src and self.perf_sum_params.has_cloud_dst, 'FileToFile': self.perf_sum_params.has_file_src and self.perf_sum_params.has_file_dst, } action = ','.join( sorted([ transfer_type for transfer_type, cond in six.iteritems(transfer_types) if cond ])) apply_execution_time = _GetTimeInMillis( self.perf_sum_params.total_elapsed_time) self.CollectGAMetric(category=_GA_PERFSUM_CATEGORY, action=action, execution_time=apply_execution_time, **custom_params)
Aggregates PerformanceSummary info and adds the metric to the list.
https://github.com/googlecloudplatform/gsutil/blob/b1361dd5e9c2a246b328e871603f3a2b0d5fd5fa/gslib/metrics.py#L638-L740
from __future__ import absolute_import from __future__ import print_function from __future__ import division from __future__ import unicode_literals import atexit from collections import defaultdict from functools import wraps import logging import os import pickle import platform import re import socket import subprocess import sys import tempfile import textwrap import time import uuid import six from six.moves import input from six.moves import urllib import boto from gslib import VERSION from gslib.metrics_tuple import Metric from gslib.utils import system_util from gslib.utils.unit_util import CalculateThroughput from gslib.utils.unit_util import HumanReadableToBytes _GA_ENDPOINT = 'https://ssl.google-analytics.com/collect' _GA_TID = 'UA-36037335-16' _GA_TID_TESTING = 'UA-36037335-17' _GA_COMMANDS_CATEGORY = 'Command' _GA_ERRORRETRY_CATEGORY = 'RetryableError' _GA_ERRORFATAL_CATEGORY = 'FatalError' _GA_PERFSUM_CATEGORY = 'PerformanceSummary' _GOOGLE_CORP_HOST_RE = re.compile(r'.*google\.com$') _UUID_FILE_PATH = os.path.expanduser(os.path.join('~', '.gsutil/analytics-uuid')) _DISABLED_TEXT = 'DISABLED' _GA_LABEL_MAP = { 'Event Category': 'ec', 'Event Action': 'ea', 'Event Label': 'el', 'Event Value': 'ev', 'Command Name': 'cd1', 'Global Options': 'cd2', 'Command-Level Options': 'cd3', 'Config': 'cd4', 'Command Alias': 'cd5', 'Fatal Error': 'cd6', 'Parallelism Strategy': 'cd7', 'Source URL Type': 'cd8', 'Provider Types': 'cd9', 'Timestamp': 'cd10', 'Execution Time': 'cm1', 'Retryable Errors': 'cm2', 'Is Google Corp User': 'cm3', 'Num Processes': 'cm4', 'Num Threads': 'cm5', 'Number of Files/Objects Transferred': 'cm6', 'Size of Files/Objects Transferred': 'cm7', 'Average Overall Throughput': 'cm8', 'Num Retryable Service Errors': 'cm9', 'Num Retryable Network Errors': 'cm10', 'Thread Idle Time Percent': 'cm11', 'Slowest Thread Throughput': 'cm12', 'Fastest Thread Throughput': 'cm13', 'Disk I/O Time': 'cm14', } class MetricsCollector(object): def __init__(self, ga_tid=_GA_TID, endpoint=_GA_ENDPOINT): self.start_time = _GetTimeInMillis() self.endpoint = endpoint self.logger = logging.getLogger() cid = MetricsCollector._GetCID() use_test_property = boto.config.getbool('GSUtil', 'use_test_GA_property') if use_test_property: ga_tid = _GA_TID_TESTING config_values = self._ValidateAndGetConfigValues() is_corp_user = 0 if _GOOGLE_CORP_HOST_RE.match(socket.gethostname()): is_corp_user = 1 self.ga_params = { 'v': '1', 'tid': ga_tid, 'cid': cid, 't': 'event', _GA_LABEL_MAP['Config']: config_values, _GA_LABEL_MAP['Is Google Corp User']: is_corp_user, } self.user_agent = '{system}/{release}'.format(system=platform.system(), release=platform.release()) self._metrics = [] self.retryable_errors = defaultdict(int) self.perf_sum_params = None _instance = None _disabled_cache = None def _ValidateAndGetConfigValues(self): config_values = [] invalid_value_string = 'INVALID' def GetAndValidateConfigValue(section, category, validation_fn): try: config_value = boto.config.get_value(section, category) if config_value and validation_fn(config_value): config_values.append((category, config_value)) elif config_value: config_values.append((category, invalid_value_string)) except: config_values.append((category, invalid_value_string)) for section, bool_category in (('Boto', 'https_validate_certificates'), ('GSUtil', 'disable_analytics_prompt'), ('GSUtil', 'use_magicfile'), ('GSUtil', 'tab_completion_time_logs')): GetAndValidateConfigValue(section=section, category=bool_category, validation_fn=lambda val: str(val).lower() in ('true', 'false')) small_int_threshold = 2000 for section, small_int_category in ( ('Boto', 'debug'), ('Boto', 'http_socket_timeout'), ('Boto', 'num_retries'), ('Boto', 'max_retry_delay'), ('GSUtil', 'default_api_version'), ('GSUtil', 'sliced_object_download_max_components'), ('GSUtil', 'parallel_process_count'), ('GSUtil', 'parallel_thread_count'), ('GSUtil', 'software_update_check_period'), ('GSUtil', 'tab_completion_timeout'), ('OAuth2', 'oauth2_refresh_retries'), ): GetAndValidateConfigValue(section=section, category=small_int_category, validation_fn=lambda val: str(val).isdigit() and int(val) < small_int_threshold) for section, large_int_category in (('GSUtil', 'resumable_threshold'), ('GSUtil', 'rsync_buffer_lines'), ('GSUtil', 'task_estimation_threshold')): GetAndValidateConfigValue(section=section, category=large_int_category, validation_fn=lambda val: str(val).isdigit()) for section, data_size_category in ( ('GSUtil', 'parallel_composite_upload_component_size'), ('GSUtil', 'parallel_composite_upload_threshold'), ('GSUtil', 'sliced_object_download_component_size'), ('GSUtil', 'sliced_object_download_threshold'), ): config_value = boto.config.get_value(section, data_size_category) if config_value: try: size_in_bytes = HumanReadableToBytes(config_value) config_values.append((data_size_category, size_in_bytes)) except ValueError: config_values.append((data_size_category, invalid_value_string)) GetAndValidateConfigValue( section='GSUtil', category='check_hashes', validation_fn=lambda val: val in ('if_fast_else_fail', 'if_fast_else_skip', 'always', 'never')) GetAndValidateConfigValue( section='GSUtil', category='content_language', validation_fn=lambda val: val.isalpha() and len(val) <= 3) GetAndValidateConfigValue( section='GSUtil', category='json_api_version', validation_fn=lambda val: val[0].lower() == 'v' and val[1:].isdigit()) GetAndValidateConfigValue(section='GSUtil', category='prefer_api', validation_fn=lambda val: val in ('json', 'xml')) GetAndValidateConfigValue(section='OAuth2', category='token_cache', validation_fn=lambda val: val in ('file_system', 'in_memory')) return ','.join( sorted([ '{0}:{1}'.format(config[0], config[1]) for config in config_values ])) @staticmethod def GetCollector(ga_tid=_GA_TID): if MetricsCollector.IsDisabled(): return None if not MetricsCollector._instance: MetricsCollector._instance = MetricsCollector(ga_tid) return MetricsCollector._instance @staticmethod def IsDisabled(): if MetricsCollector._disabled_cache is None: MetricsCollector._CheckAndSetDisabledCache() return MetricsCollector._disabled_cache @classmethod def _CheckAndSetDisabledCache(cls): if os.environ.get('GSUTIL_TEST_ANALYTICS') == '1': cls._disabled_cache = True elif os.environ.get('GSUTIL_TEST_ANALYTICS') == '2': cls._disabled_cache = False cls.StartTestCollector() elif system_util.InvokedViaCloudSdk(): cls._disabled_cache = not os.environ.get('GA_CID') elif os.path.exists(_UUID_FILE_PATH): with open(_UUID_FILE_PATH) as f: cls._disabled_cache = (f.read() == _DISABLED_TEXT) else: cls._disabled_cache = True @classmethod def StartTestCollector(cls, endpoint='https://example.com', user_agent='user-agent-007', ga_params=None): if cls.IsDisabled(): os.environ['GSUTIL_TEST_ANALYTICS'] = '0' cls._disabled_cache = False cls._instance = cls(_GA_TID_TESTING, endpoint) if ga_params is None: ga_params = {'a': 'b', 'c': 'd'} cls._instance.ga_params = ga_params cls._instance.user_agent = user_agent if os.environ['GSUTIL_TEST_ANALYTICS'] != '2': cls._instance.start_time = 0 @classmethod def StopTestCollector(cls, original_instance=None): os.environ['GSUTIL_TEST_ANALYTICS'] = '1' cls._disabled_cache = None cls._instance = original_instance @staticmethod def _GetCID(): if os.path.exists(_UUID_FILE_PATH): with open(_UUID_FILE_PATH) as f: cid = f.read() if cid: return cid return os.environ.get('GA_CID') def ExtendGAParams(self, new_params): self.ga_params.update(new_params) def GetGAParam(self, param_name): return self.ga_params.get(_GA_LABEL_MAP[param_name]) def CollectGAMetric(self, category, action, label=VERSION, value=0, execution_time=None, **custom_params): params = [('ec', category), ('ea', action), ('el', label), ('ev', value), (_GA_LABEL_MAP['Timestamp'], _GetTimeInMillis())] params.extend([ (k, v) for k, v in six.iteritems(custom_params) if v is not None ]) params.extend([ (k, v) for k, v in six.iteritems(self.ga_params) if v is not None ]) if execution_time is None: execution_time = _GetTimeInMillis() - self.start_time params.append((_GA_LABEL_MAP['Execution Time'], execution_time)) data = urllib.parse.urlencode(sorted(params)) self._metrics.append( Metric(endpoint=self.endpoint, method='POST', body=data, user_agent=self.user_agent)) class _PeformanceSummaryParams(object): def __init__(self): self.num_processes = 0 self.num_threads = 0 self.num_retryable_service_errors = 0 self.num_retryable_network_errors = 0 self.provider_types = set() if system_util.IS_LINUX: self.disk_counters_start = system_util.GetDiskCounters() self.uses_fan = False self.uses_slice = False self.thread_idle_time = 0 self.thread_execution_time = 0 self.thread_throughputs = defaultdict(self._ThreadThroughputInformation) self.avg_throughput = None self.total_elapsed_time = None self.total_bytes_transferred = None self.num_objects_transferred = 0 self.is_daisy_chain = False self.has_file_dst = False self.has_cloud_dst = False self.has_file_src = False self.has_cloud_src = False class _ThreadThroughputInformation(object): def __init__(self): self.total_bytes_transferred = 0 self.total_elapsed_time = 0 self.task_start_time = None self.task_size = None def LogTaskStart(self, start_time, bytes_to_transfer): self.task_start_time = start_time self.task_size = bytes_to_transfer def LogTaskEnd(self, end_time): self.total_elapsed_time += end_time - self.task_start_time self.total_bytes_transferred += self.task_size self.task_start_time = None self.task_size = None def GetThroughput(self): return CalculateThroughput(self.total_bytes_transferred, self.total_elapsed_time) def UpdatePerformanceSummaryParams(self, params): if self.GetGAParam('Command Name') not in ('cp', 'rsync'): return if self.perf_sum_params is None: self.perf_sum_params = self._PeformanceSummaryParams() if 'file_message' in params: self._ProcessFileMessage(file_message=params['file_message']) return for param_name, param in six.iteritems(params): if param_name in ('uses_fan', 'uses_slice', 'avg_throughput', 'is_daisy_chain', 'has_file_dst', 'has_cloud_dst', 'has_file_src', 'has_cloud_src', 'total_elapsed_time', 'total_bytes_transferred', 'num_objects_transferred'): cur_value = getattr(self.perf_sum_params, param_name) if not cur_value: setattr(self.perf_sum_params, param_name, param) if param_name in ('thread_idle_time', 'thread_execution_time', 'num_retryable_service_errors', 'num_retryable_network_errors'): cur_value = getattr(self.perf_sum_params, param_name) setattr(self.perf_sum_params, param_name, cur_value + param) if param_name in ('num_processes', 'num_threads'): cur_value = getattr(self.perf_sum_params, param_name) if cur_value < param: setattr(self.perf_sum_params, param_name, param) if param_name == 'provider_types': self.perf_sum_params.provider_types.update(param) def _ProcessFileMessage(self, file_message): thread_info = (self.perf_sum_params.thread_throughputs[( file_message.process_id, file_message.thread_id)]) if file_message.finished: if not (self.perf_sum_params.uses_slice or self.perf_sum_params.uses_fan): self.perf_sum_params.num_objects_transferred += 1 thread_info.LogTaskEnd(file_message.time) else: thread_info.LogTaskStart(file_message.time, file_message.size) def _CollectCommandAndErrorMetrics(self): command_name = self.GetGAParam('Command Name') if command_name: self.CollectGAMetric(category=_GA_COMMANDS_CATEGORY, action=command_name, **{ _GA_LABEL_MAP['Retryable Errors']: sum(self.retryable_errors.values()) }) for error_type, num_errors in six.iteritems(self.retryable_errors): self.CollectGAMetric(category=_GA_ERRORRETRY_CATEGORY, action=error_type, **{_GA_LABEL_MAP['Retryable Errors']: num_errors}) fatal_error_type = self.GetGAParam('Fatal Error') if fatal_error_type: self.CollectGAMetric(category=_GA_ERRORFATAL_CATEGORY, action=fatal_error_type)
Apache License 2.0
vincent-lg/tsunami
src/primaires/combat/types/armure.py
Armure.objets_contenus
python
def objets_contenus(self, conteneur): objets = [] if conteneur.au_fourreau: objet = conteneur.au_fourreau objets.append(objet) objets.extend(objet.prototype.objets_contenus(objet)) return objets
Retourne les objets contenus.
https://github.com/vincent-lg/tsunami/blob/36b3b974f6eefbf15cd5d5f099fc14630e66570b/src/primaires/combat/types/armure.py#L152-L160
from random import randint from bases.objet.attribut import Attribut from primaires.combat.types.editeurs.fourreau import EdtFourreau from primaires.format.fonctions import oui_ou_non from primaires.interpreteur.editeur.entier import Entier from primaires.objet.types.base import BaseType class Armure(BaseType): nom_type = "armure" _nom = "prototype_armure" _version = 1 empilable_sur = ["vêtement"] def __init__(self, cle=""): BaseType.__init__(self, cle) self.encaissement_fixe = 5 self.encaissement_variable = 0 self.fourreau = False self.fourreau_visible = True self.poids_max_fourreau = 1 self.types_fourreau = [] self._attributs = { "au_fourreau": Attribut(None), } self.etendre_editeur("f", "encaissement fixe", Entier, self, "encaissement_fixe") self.etendre_editeur("v", "encaissement variable", Entier, self, "encaissement_variable") self.etendre_editeur("fo", "fourreau", EdtFourreau, self, "") @property def str_types_fourreau(self): if len(self.types_fourreau) == 0: return "aucun" else: return ", ".join(self.types_fourreau) @property def str_fourreau(self): return oui_ou_non(self.fourreau) def travailler_enveloppes(self, enveloppes): fixe = enveloppes["f"] fixe.apercu = "{objet.encaissement_fixe}" fixe.prompt = "Encaissement fixe de l'armure : " fixe.aide_courte = "Entrez l'|ent|encaissement fixe|ff| de l'armure. Il " "représente\nla quantité de dégâts fixes que l'armure peut " "encaisser.\nÀ cet encaissement s'ajoute l'encaissement " "variable. Si les\ndégâts dépassent l'encaissement de l'armure, " "l'armure n'encaisse\nque ce qu'elle a été configurée pour " "et le personnage derrière\nreçoit les dégâts compensés. Si " "les dégâts sont inférieurs à\nl'enciassement de l'armure, " "le personnage ne reçoit rien.\n\n" "Encaissement fixe actuel : {objet.encaissement_fixe}" variable = enveloppes["v"] variable.apercu = "{objet.encaissement_variable}" variable.prompt = "Encaissement variable de l'armure : " variable.aide_courte = "Entrez l'|ent|encaissement variable|ff| de l'armure. Il " "représente\nla partie variable de l'encaissement global, " "celui-ci étant\nl'encaissement fixe plus l'encaissement " "variable déterminé aléatoirement,\nentre |ent|0|ff| et " "l'encaissement variable configuré. Une armure\navec un " "encaissement fixe de |ent|5|ff| et des dégâts variables de " "|ent|2|ff|\naura un encaissement entre |ent|5|ff| et " "|ent|7|ff|.\n\nEncaissement variable actuel : " "{objet.encaissement_variable}" fourreau = enveloppes["fo"] fourreau.apercu = "{objet.str_fourreau}" def encaisser(self, personnage, arme, degats): if degats <= 1: return 0 taux = 0.5 + personnage.stats.robustesse / 200 encaissement = self.encaissement_fixe if self.encaissement_variable > 0: encaissement = randint(self.encaissement_fixe, self.encaissement_fixe + self.encaissement_variable) encaissement = int(taux * encaissement) if encaissement > degats - 1: encaissement = degats - 1 return encaissement def calculer_poids(self): poids = self.poids_unitaire if self.au_fourreau: poids += self.au_fourreau.poids return round(poids, 3)
BSD 3-Clause New or Revised License
rasahq/rasa
rasa/utils/tensorflow/layers_utils.py
reduce_mean_equal
python
def reduce_mean_equal( x: tf.Tensor, y: tf.Tensor, mask: Optional[tf.Tensor] = None ) -> tf.Tensor: if mask is None: mask = tf.ones_like(x, dtype=tf.float32) equal_indices = tf.cast(tf.math.equal(x, y), tf.float32) * mask return tf.reduce_mean( tf.math.divide_no_nan( tf.reduce_sum(equal_indices, axis=-1), tf.reduce_sum(mask, axis=-1) ) )
Computes the mean number of matches between x and y. If `x` and `y` have `n` dimensions, then the mean equal number of indices is calculated for the last dimension by only taking the valid indices into consideration (from the mask) and then it is averaged over all other `n-1` dimensions. For e.g., if: x = [[1,2,3,4] [5,6,7,8]] y = [[1,2,3,4] [5,6,0,0]] mask = [[1,1,1,1], [1,1,1,0]] then the output will be calculated as `((4/4) + 2/3) / 2` Args: x: Any numeric tensor. y: Another tensor with same shape and type as x. mask: Tensor with a mask to distinguish actual indices from padding indices. Shape should be the same as `x` and `y`. Returns: The mean of "x == y"
https://github.com/rasahq/rasa/blob/83eb56d257c2cd744183a2c5613a489f0a5b15d4/rasa/utils/tensorflow/layers_utils.py#L74-L113
import tensorflow as tf from tensorflow import Tensor from typing import Union, Optional def random_indices( batch_size: Union[Tensor, int], n: Union[Tensor, int], n_max: Union[Tensor, int] ) -> Tensor: return tf.random.uniform(shape=(batch_size, n), maxval=n_max, dtype=tf.int32) def batch_flatten(x: Tensor) -> Tensor: return tf.reshape(x, (-1, x.shape[-1])) def get_candidate_values( x: tf.Tensor, candidate_ids: tf.Tensor, ) -> tf.Tensor: tiled_x = tf.tile( tf.expand_dims(batch_flatten(x), 0), (tf.shape(candidate_ids)[0], 1, 1), ) candidate_values = tf.gather(tiled_x, candidate_ids, batch_dims=1) return candidate_values
Apache License 2.0
jdasoftwaregroup/kartothek
kartothek/io/eager.py
store_dataframes_as_dataset
python
def store_dataframes_as_dataset( store: KeyValueStore, dataset_uuid: str, dfs: List[Union[pd.DataFrame, Dict[str, pd.DataFrame]]], metadata: Optional[Dict[str, Dict[str, Any]]] = None, partition_on: Optional[List[str]] = None, df_serializer: Optional[ParquetSerializer] = None, overwrite: bool = False, secondary_indices=None, metadata_storage_format: str = DEFAULT_METADATA_STORAGE_FORMAT, metadata_version: int = DEFAULT_METADATA_VERSION, ): if isinstance(dfs, (pd.DataFrame, dict)): dfs = [dfs] warnings.warn( "Passing a single dataframe instead of an iterable is deprecated and may " "be removed in the next major release.", DeprecationWarning, ) return store_dataframes_as_dataset__iter( dfs, store=store, dataset_uuid=dataset_uuid, metadata=metadata, partition_on=partition_on, df_serializer=df_serializer, overwrite=overwrite, secondary_indices=secondary_indices, metadata_storage_format=metadata_storage_format, metadata_version=metadata_version, )
Utility function to store a list of dataframes as a partitioned dataset with multiple tables (files). Useful for very small datasets where all data fits into memory. Parameters ---------- dfs: The dataframe(s) to be stored.
https://github.com/jdasoftwaregroup/kartothek/blob/6bc7e868435e98cbda0b695900f29d1ff7d49110/kartothek/io/eager.py#L521-L563
import warnings from functools import partial from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union, cast import pandas as pd from simplekv import KeyValueStore from kartothek.core.common_metadata import ( empty_dataframe_from_schema, make_meta, store_schema_metadata, ) from kartothek.core.dataset import DatasetMetadata, DatasetMetadataBuilder from kartothek.core.docs import default_docs from kartothek.core.factory import DatasetFactory, _ensure_factory from kartothek.core.naming import ( DEFAULT_METADATA_STORAGE_FORMAT, DEFAULT_METADATA_VERSION, METADATA_BASE_SUFFIX, METADATA_FORMAT_JSON, PARQUET_FILE_SUFFIX, get_partition_file_prefix, ) from kartothek.core.typing import StoreInput from kartothek.core.utils import lazy_store from kartothek.io.iter import store_dataframes_as_dataset__iter from kartothek.io_components.delete import ( delete_common_metadata, delete_indices, delete_top_level_metadata, ) from kartothek.io_components.gc import delete_files, dispatch_files_to_gc from kartothek.io_components.index import update_indices_from_partitions from kartothek.io_components.metapartition import ( SINGLE_TABLE, MetaPartition, parse_input_to_metapartition, ) from kartothek.io_components.read import dispatch_metapartitions_from_factory from kartothek.io_components.update import update_dataset_from_partitions from kartothek.io_components.utils import ( _ensure_compatible_indices, align_categories, normalize_args, sort_values_categorical, validate_partition_keys, ) from kartothek.io_components.write import raise_if_dataset_exists from kartothek.serialization import DataFrameSerializer from kartothek.serialization._parquet import ParquetSerializer from kartothek.utils.ktk_adapters import get_dataset_keys from kartothek.utils.store import copy_rename_keys __all__ = ( "delete_dataset", "read_dataset_as_dataframes", "read_table", "commit_dataset", "store_dataframes_as_dataset", "create_empty_dataset_header", "write_single_partition", "update_dataset_from_dataframes", "build_dataset_indices", "garbage_collect_dataset", "copy_dataset", ) @default_docs @normalize_args def delete_dataset(dataset_uuid=None, store=None, factory=None): ds_factory = _ensure_factory( dataset_uuid=dataset_uuid, load_schema=False, store=store, factory=factory, load_dataset_metadata=False, ) garbage_collect_dataset(factory=ds_factory) delete_indices(dataset_factory=ds_factory) for metapartition in dispatch_metapartitions_from_factory(ds_factory): metapartition = cast(MetaPartition, metapartition) metapartition.delete_from_store(dataset_uuid=dataset_uuid, store=store) delete_common_metadata(dataset_factory=ds_factory) delete_top_level_metadata(dataset_factory=ds_factory) @default_docs def read_dataset_as_dataframes( dataset_uuid: Optional[str] = None, store=None, tables: Optional[List[str]] = None, columns: Dict[str, List[str]] = None, concat_partitions_on_primary_index: bool = False, predicate_pushdown_to_io: bool = True, categoricals: Dict[str, List[str]] = None, label_filter: Callable = None, dates_as_object: bool = False, predicates: Optional[List[List[Tuple[str, str, Any]]]] = None, factory: Optional[DatasetFactory] = None, dispatch_by: Optional[List[str]] = None, ) -> List[pd.DataFrame]: ds_factory = _ensure_factory( dataset_uuid=dataset_uuid, store=store, factory=factory, load_dataset_metadata=True, ) mps = read_dataset_as_metapartitions( tables=tables, columns=columns, concat_partitions_on_primary_index=concat_partitions_on_primary_index, predicate_pushdown_to_io=predicate_pushdown_to_io, categoricals=categoricals, label_filter=label_filter, dates_as_object=dates_as_object, predicates=predicates, factory=ds_factory, dispatch_by=dispatch_by, dispatch_metadata=False, ) return [mp.data for mp in mps] @default_docs def read_dataset_as_metapartitions( dataset_uuid=None, store=None, tables=None, columns=None, concat_partitions_on_primary_index=False, predicate_pushdown_to_io=True, categoricals=None, label_filter=None, dates_as_object=False, predicates=None, factory=None, dispatch_by=None, dispatch_metadata=True, ): ds_factory = _ensure_factory( dataset_uuid=dataset_uuid, store=store, factory=factory, load_dataset_metadata=False, ) if len(ds_factory.tables) > 1: warnings.warn( "Trying to read a dataset with multiple internal tables. This functionality will be removed in the next " "major release. If you require a multi tabled data format, we recommend to switch to the kartothek Cube " "functionality. " "https://kartothek.readthedocs.io/en/stable/guide/cube/kartothek_cubes.html", DeprecationWarning, ) from .iter import read_dataset_as_metapartitions__iterator ds_iter = read_dataset_as_metapartitions__iterator( tables=tables, columns=columns, concat_partitions_on_primary_index=concat_partitions_on_primary_index, predicate_pushdown_to_io=predicate_pushdown_to_io, categoricals=categoricals, label_filter=label_filter, dates_as_object=dates_as_object, predicates=predicates, factory=ds_factory, dispatch_by=dispatch_by, dispatch_metadata=dispatch_metadata, ) return list(ds_iter) def _check_compatible_list(table, obj, argument_name=""): if obj is None: return obj elif isinstance(obj, dict): if table not in obj: raise ValueError( "Provided table {} is not compatible with input from argument {}.".format( table, argument_name ) ) return obj elif isinstance(obj, list): return {table: obj} else: raise TypeError( "Unknown type encountered for argument {}. Expected `list`, got `{}` instead".format( argument_name, type(obj) ) ) @default_docs def read_table( dataset_uuid: Optional[str] = None, store=None, table: Optional[str] = SINGLE_TABLE, columns: Dict[str, List[str]] = None, concat_partitions_on_primary_index: bool = False, predicate_pushdown_to_io: bool = True, categoricals: Dict[str, List[str]] = None, label_filter: Callable = None, dates_as_object: bool = False, predicates: Optional[List[List[Tuple[str, str, Any]]]] = None, factory: Optional[DatasetFactory] = None, ) -> pd.DataFrame: if concat_partitions_on_primary_index is not False: warnings.warn( "The keyword `concat_partitions_on_primary_index` is deprecated and will be removed in the next major release.", DeprecationWarning, ) if not isinstance(table, str): raise TypeError("Argument `table` needs to be a string") columns = _check_compatible_list(table, columns, "columns") categoricals = _check_compatible_list(table, categoricals, "categoricals") ds_factory = _ensure_factory( dataset_uuid=dataset_uuid, store=store, factory=factory, load_dataset_metadata=False, ) partitions = read_dataset_as_dataframes( tables=[table], columns=columns, concat_partitions_on_primary_index=concat_partitions_on_primary_index, predicate_pushdown_to_io=predicate_pushdown_to_io, categoricals=categoricals, label_filter=label_filter, dates_as_object=dates_as_object, predicates=predicates, factory=ds_factory, ) empty_df = empty_dataframe_from_schema( schema=ds_factory.table_meta[table], columns=columns[table] if columns is not None else None, ) if categoricals: empty_df = empty_df.astype({col: "category" for col in categoricals[table]}) dfs = [partition_data[table] for partition_data in partitions] + [empty_df] if categoricals: dfs = align_categories(dfs, categoricals[table]) df = pd.concat(dfs, ignore_index=True, sort=False) if len(empty_df.columns) > 0 and list(empty_df.columns) != list(df.columns): df = df.reindex(empty_df.columns, copy=False, axis=1) return df @default_docs @normalize_args def commit_dataset( store: Optional[StoreInput] = None, dataset_uuid: Optional[str] = None, new_partitions: Optional[Iterable[MetaPartition]] = None, output_dataset_uuid: Optional[str] = None, delete_scope: Optional[Iterable[Dict[str, Any]]] = None, metadata: Dict = None, df_serializer: DataFrameSerializer = None, metadata_merger: Callable[[List[Dict]], Dict] = None, default_metadata_version: int = DEFAULT_METADATA_VERSION, partition_on: Optional[Iterable[str]] = None, factory: Optional[DatasetFactory] = None, secondary_indices: Optional[Iterable[str]] = None, ): if output_dataset_uuid is not None: warnings.warn( "The keyword `output_dataset_uuid` has no use and will be removed in the next major release ", DeprecationWarning, ) if df_serializer is not None: warnings.warn( "The keyword `df_serializer` is deprecated and will be removed in the next major release.", DeprecationWarning, ) if not new_partitions and not metadata and not delete_scope: raise ValueError( "Need to provide either new data, new metadata or a delete scope. None of it was provided." ) store = lazy_store(store) ds_factory, metadata_version, partition_on = validate_partition_keys( dataset_uuid=dataset_uuid, store=store, ds_factory=factory, default_metadata_version=default_metadata_version, partition_on=partition_on, ) mps = parse_input_to_metapartition( new_partitions, metadata_version=metadata_version ) if secondary_indices: mps = mps.build_indices(columns=secondary_indices) mps_list = [_maybe_infer_files_attribute(mp, dataset_uuid) for mp in mps] dmd = update_dataset_from_partitions( mps_list, store_factory=store, dataset_uuid=dataset_uuid, ds_factory=ds_factory, delete_scope=delete_scope, metadata=metadata, metadata_merger=metadata_merger, ) return dmd def _maybe_infer_files_attribute(metapartition, dataset_uuid): new_mp = metapartition.as_sentinel() for mp in metapartition: if len(mp.files) == 0: if mp.data is None or len(mp.data) == 0: raise ValueError( "Trying to commit partitions without `data` or `files` information." "Either one is necessary to infer the dataset tables" ) new_files = {} for table in mp.data: new_files[table] = ( get_partition_file_prefix( dataset_uuid=dataset_uuid, partition_label=mp.label, table=table, metadata_version=mp.metadata_version, ) + PARQUET_FILE_SUFFIX ) mp = mp.copy(files=new_files) new_mp = new_mp.add_metapartition(mp) return new_mp @default_docs @normalize_args
MIT License
lemmonation/jm-nat
fairseq/sequence_generator.py
SequenceGenerator.generate
python
def generate( self, models, sample, prefix_tokens=None, bos_token=None, **kwargs ): model = EnsembleModel(models) if not self.retain_dropout: model.eval() encoder_input = { k: v for k, v in sample['net_input'].items() if k != 'prev_output_tokens' } src_tokens = encoder_input['src_tokens'] src_lengths = (src_tokens.ne(self.eos) & src_tokens.ne(self.pad)).long().sum(dim=1) input_size = src_tokens.size() bsz = input_size[0] src_len = input_size[1] beam_size = self.beam_size use_golden_length = self.use_golden_length finalized = [[] for i in range(bsz)] def get_hypo_nat(decoded_id): return { 'tokens': decoded_id, 'score': 0.0, 'attention': None, 'alignment': None, 'positional_scores': torch.Tensor([0.0]), } def copy_batches(tensor, num_copies): if tensor is None: return None x_size = tensor.size() tensor = tensor.contiguous().view(x_size[0], 1, -1) tensor = tensor.repeat(1, num_copies, 1) if len(x_size)==2: return tensor.view(-1, x_size[1]) elif len(x_size)==3: return tensor.view(-1, x_size[1], x_size[2]) else: raise NotImplementedError def select_worst(token_probs, num_mask): bsz, seq_len = token_probs.size() masks = [token_probs[batch, :].topk(max(1, num_mask[batch]), largest=False, sorted=False)[1] for batch in range(bsz)] masks = [torch.cat([mask, mask.new(seq_len - mask.size(0)).fill_(mask[0])], dim=0) for mask in masks] return torch.stack(masks, dim=0) encoder_outs = model.forward_encoder(encoder_input) if use_golden_length: gold_target_len = sample['target'].ne(self.pad).sum(-1) beam_starts = gold_target_len - (beam_size - 1) // 2 beam_ends = gold_target_len + beam_size // 2 + 1 beam = torch.stack([torch.arange(beam_starts[batch], beam_ends[batch], device=beam_starts.device) for batch in range(gold_target_len.size(0))], dim=0) else: predicted_lengths = encoder_outs[0]['predicted_lengths'] beam = predicted_lengths.topk(beam_size, dim=1)[1] beam[beam<2] = 2 max_len = beam.max().item() length_mask = torch.triu(src_tokens.new(max_len, max_len).fill_(1).long(), 1) length_mask = torch.stack([length_mask[beam[batch] - 1] for batch in range(bsz)], dim=0) tgt_tokens = src_tokens.new(bsz, beam_size, max_len).fill_(self.mask) tgt_tokens = (1 - length_mask) * tgt_tokens + length_mask * self.pad tgt_tokens = tgt_tokens.view(bsz * beam_size, max_len) pad_mask = tgt_tokens.eq(self.pad) seq_lens = tgt_tokens.size(1) - pad_mask.sum(dim=1) encoder_outs_value = copy_batches(encoder_outs[0]['encoder_out'].transpose(0,1), beam_size) encoder_outs_value = encoder_outs_value.transpose(0,1) encoder_padding = copy_batches(encoder_outs[0]['encoder_padding_mask'], beam_size) encoder_outs = [{'encoder_out': encoder_outs_value, 'encoder_padding_mask': encoder_padding, 'predicted_lengths': encoder_outs[0]['predicted_lengths']}] tgt_tokens, token_probs, _ = model.forward_decoder( tgt_tokens, encoder_outs, temperature=self.temperature, ) assign_single_value_byte(tgt_tokens, pad_mask, self.pad) assign_single_value_byte(token_probs, pad_mask, 1.0) for i in range(1, self.mask_pred_iter+1): num_mask = (seq_lens.float()*(1.0-i/self.mask_pred_iter)).long() assign_single_value_byte(token_probs, pad_mask, 1.0) mask_ind = select_worst(token_probs, num_mask) assign_single_value_long(tgt_tokens, mask_ind, self.mask) assign_single_value_byte(tgt_tokens, pad_mask, self.pad) new_tgt_tokens, new_token_probs, all_token_probs = model.forward_decoder( tgt_tokens, encoder_outs, temperature=self.temperature, ) assign_multi_value_long(token_probs, mask_ind, new_token_probs) assign_single_value_byte(token_probs, pad_mask, 1.0) assign_multi_value_long(tgt_tokens, mask_ind, new_tgt_tokens) assign_single_value_byte(tgt_tokens, pad_mask, self.pad) lprobs = token_probs.log().sum(-1) hypotheses = tgt_tokens.view(bsz, beam_size, max_len) lprobs = lprobs.view(bsz, beam_size) tgt_lengths = (1 - length_mask).sum(-1) length_penalty = ((5.0 + tgt_lengths.float()) ** self.len_penalty / (6.0 ** self.len_penalty)) length_penalty = length_penalty.view((bsz, beam_size)) avg_log_prob = lprobs / length_penalty best_lengths = avg_log_prob.max(-1)[1] hypotheses = torch.stack([hypotheses[b, l, :] for b, l in enumerate(best_lengths)], dim=0) for i in range(bsz): finalized[i].append(get_hypo_nat(hypotheses[i])) return finalized
Generate a batch of translations. Args: models (List[~fairseq.models.FairseqModel]): ensemble of models sample (dict): batch prefix_tokens (torch.LongTensor, optional): force decoder to begin with these tokens
https://github.com/lemmonation/jm-nat/blob/12d00a7dbbf956e0882348d4ce2bc65f5801acd8/fairseq/sequence_generator.py#L130-L265
import math import torch import torch.nn.functional as F from fairseq import search, utils, checkpoint_utils from fairseq.models import FairseqIncrementalDecoder def assign_single_value_byte(x, i, y): x.view(-1)[i.view(-1).nonzero()] = y def assign_multi_value_byte(x, i, y): x.view(-1)[i.view(-1).nonzero()] = y.view(-1)[i.view(-1).nonzero()] def assign_single_value_long(x, i, y): b, l = x.size() i = i + torch.arange(0, b*l, l, device=i.device).unsqueeze(1) x.view(-1)[i.view(-1)] = y def assign_multi_value_long(x, i, y): b, l = x.size() i = i + torch.arange(0, b*l, l, device=i.device).unsqueeze(1) x.view(-1)[i.view(-1)] = y.view(-1)[i.view(-1)] class SequenceGenerator(object): def __init__( self, tgt_dict, beam_size=1, max_len_a=0, max_len_b=200, min_len=1, stop_early=True, normalize_scores=True, len_penalty=1., unk_penalty=0., retain_dropout=False, sampling=False, sampling_topk=-1, temperature=1., diverse_beam_groups=-1, diverse_beam_strength=0.5, match_source_len=False, no_repeat_ngram_size=0, mask_pred_iter=10, use_golden_length=False, args=None, ): self.pad = tgt_dict.pad() self.unk = tgt_dict.unk() self.eos = tgt_dict.eos() self.mask = tgt_dict.mask() self.vocab_size = len(tgt_dict) self.beam_size = beam_size self.use_golden_length = use_golden_length self.beam_size = min(beam_size, self.vocab_size - 1) self.max_len_a = max_len_a self.max_len_b = max_len_b self.min_len = min_len self.stop_early = stop_early self.normalize_scores = normalize_scores self.len_penalty = len_penalty self.unk_penalty = unk_penalty self.retain_dropout = retain_dropout self.temperature = temperature self.match_source_len = match_source_len self.no_repeat_ngram_size = no_repeat_ngram_size self.mask_pred_iter = mask_pred_iter assert sampling_topk < 0 or sampling, '--sampling-topk requires --sampling' assert temperature > 0, '--temperature must be greater than 0' if sampling: self.search = search.Sampling(tgt_dict, sampling_topk) elif diverse_beam_groups > 0: self.search = search.DiverseBeamSearch(tgt_dict, diverse_beam_groups, diverse_beam_strength) elif match_source_len: self.search = search.LengthConstrainedBeamSearch( tgt_dict, min_len_a=1, min_len_b=0, max_len_a=1, max_len_b=0, ) else: self.search = search.BeamSearch(tgt_dict) @torch.no_grad()
MIT License
nestauk/old_nesta_daps
nesta/packages/arxiv/collect_arxiv.py
update_existing_articles
python
def update_existing_articles(article_batch, engine): Session = sessionmaker(engine) session = Session() logging.info(f"Updating a batch of {len(article_batch)} existing articles") for article in article_batch: if article.get('institutes'): raise ValueError("Institute links cannot be written using this method. Use add_article_institutes instead") article_categories = [dict(article_id=article['id'], category_id=cat_id) for article in article_batch for cat_id in article.pop('categories', [])] article_fields_of_study = [dict(article_id=article['id'], fos_id=fos_id) for article in article_batch for fos_id in article.pop('fields_of_study', [])] for row in article_batch: if 'institute_match_attempted' not in row.keys(): row.update({'institute_match_attempted': False}) logging.debug("bulk update mapping on articles") session.bulk_update_mappings(Article, article_batch) if article_categories: article_cats_table = Base.metadata.tables['arxiv_article_categories'] art_ids = {a['id'] for a in article_batch} logging.debug("core orm delete on categories") session.execute(article_cats_table.delete() .where(article_cats_table.columns['article_id'].in_(art_ids))) logging.debug("core orm insert on categories") session.execute(article_cats_table.insert(), article_categories) if article_fields_of_study: article_fos_table = Base.metadata.tables['arxiv_article_fields_of_study'] art_ids = {a['id'] for a in article_batch} logging.debug("core orm delete on fields of study") session.execute(article_fos_table.delete() .where(article_fos_table.columns['article_id'].in_(art_ids))) logging.debug("core orm insert on fields of study") session.execute(Base.metadata.tables['arxiv_article_fields_of_study'].insert(), article_fields_of_study) session.commit() session.close()
Updates existing articles from a list of dictionaries. Bulk method is used for non relationship fields, with the relationship fields updated using the core orm method. Args: article_batch (:obj:`list` of `dict`): articles to add to database engine (:obj:`sqlalchemy.engine`): connection engine to use
https://github.com/nestauk/old_nesta_daps/blob/4b3ae79922cebde0ad33e08ac4c40b9a10e8e7c3/nesta/packages/arxiv/collect_arxiv.py#L365-L426
from collections import defaultdict import datetime import json import logging import pandas as pd import re import requests from retrying import retry import s3fs from sqlalchemy.orm import sessionmaker from sqlalchemy.orm.exc import NoResultFound import time import xml.etree.ElementTree as ET from nesta.packages.mag.query_mag_api import prepare_title from nesta.packages.misc_utils.batches import split_batches from nesta.core.orms.orm_utils import get_mysql_engine, try_until_allowed from nesta.core.orms.orm_utils import db_session, _filter_out_duplicates from nesta.core.orms.arxiv_orm import Base, Article, Category OAI = "{http://www.openarchives.org/OAI/2.0/}" ARXIV = "{http://arxiv.org/OAI/arXiv/}" DELAY = 10 API_URL = 'http://export.arxiv.org/oai2' def _category_exists(session, cat_id): try: session.query(Category).filter(Category.id == cat_id).one() except NoResultFound: return False return True def _add_category(session, cat_id, description): logging.info(f"adding {cat_id} to database") session.add(Category(id=cat_id, description=description)) session.commit() def load_arxiv_categories(db_config, db, bucket, cat_file): target = f's3://{bucket}/{cat_file}' categories = pd.read_csv(target) engine = get_mysql_engine(db_config, "mysqldb", db) try_until_allowed(Base.metadata.create_all, engine) Session = try_until_allowed(sessionmaker, engine) session = try_until_allowed(Session) logging.info(f'found {session.query(Category).count()} existing categories') for idx, data in categories.iterrows(): if not _category_exists(session, data['id']): _add_category(session, cat_id=data['id'], description=data['description']) session.close() @retry(stop_max_attempt_number=10) def _arxiv_request(url, delay=DELAY, **kwargs): params = dict(verb='ListRecords', **kwargs) r = requests.get(url, params=params) r.raise_for_status() time.sleep(delay) try: root = ET.fromstring(r.text) except ET.ParseError as e: logging.error(r.text) raise e return root def total_articles(): root = _arxiv_request(API_URL, metadataPrefix='arXiv') token = root.find(OAI+'ListRecords').find(OAI+"resumptionToken") list_size = token.attrib['completeListSize'] return int(list_size) def request_token(): root = _arxiv_request(API_URL, metadataPrefix='arXiv') token = root.find(OAI+'ListRecords').find(OAI+"resumptionToken") resumption_token = token.text.split("|")[0] logging.info(f"resumptionToken: {resumption_token}") return resumption_token def xml_to_json(element, tag, prefix=''): tag = ''.join([prefix, tag]) all_data = [{field.tag[len(prefix):]: field.text for field in fields.getiterator() if field.tag != tag} for fields in element.getiterator(tag)] return json.dumps(all_data) def arxiv_batch(resumption_token=None, **kwargs): if resumption_token is not None: root = _arxiv_request(API_URL, resumptionToken=resumption_token) else: root = _arxiv_request(API_URL, metadataPrefix='arXiv', **kwargs) records = root.find(OAI+'ListRecords') output = [] if records is None: raise ValueError('No new records to collect from arXiv') for record in records.findall(OAI+"record"): header = record.find(OAI+'header') header_id = header.find(OAI+'identifier').text row = dict(datestamp=header.find(OAI+'datestamp').text) logging.debug(f"article {header_id} datestamp: {row['datestamp']}") meta = record.find(OAI+'metadata') if meta is None: logging.warning(f"No metadata for article {header_id}") else: info = meta.find(ARXIV+'arXiv') fields = ['id', 'created', 'updated', 'title', 'categories', 'journal-ref', 'doi', 'msc-class', 'abstract'] for field in fields: try: row[field.replace('-', '_')] = info.find(ARXIV+field).text except AttributeError: logging.debug(f"{field} not found in article {header_id}") for date_field in ['datestamp', 'created', 'updated']: try: date = row[date_field] row[date_field] = datetime.datetime.strptime(date, '%Y-%m-%d').date() except ValueError: del row[date_field] except KeyError: pass try: row['categories'] = row['categories'].split(' ') row['title'] = row['title'].strip() row['authors'] = xml_to_json(info, 'author', prefix=ARXIV) except KeyError: pass row['article_source'] = 'arxiv' output.append(row) new_token = root.find(OAI+'ListRecords').find(OAI+"resumptionToken") if new_token is None or new_token.text is None: resumption_token = None logging.info(f"Hit end of arXiv data. {len(output)} records in this final batch.") else: if resumption_token is None: total_articles = new_token.attrib.get('completeListSize', 0) logging.info(f"Total records to retrieve in batches: {total_articles}") resumption_token = new_token.text logging.info(f"next resumptionCursor: {resumption_token.split('|')[1]}") return output, resumption_token def retrieve_arxiv_batch_rows(start_cursor, end_cursor, token): resumption_token = '|'.join([token, str(start_cursor)]) while resumption_token is not None and start_cursor < end_cursor: batch, resumption_token = arxiv_batch(resumption_token) if resumption_token is not None: start_cursor = int(resumption_token.split("|")[1]) for row in batch: yield row def retrieve_all_arxiv_rows(**kwargs): resumption_token = None while True: try: batch, resumption_token = arxiv_batch(resumption_token, **kwargs) except ValueError as e: logging.info(e) break for row in batch: yield row if resumption_token is None: break def extract_last_update_date(prefix, updates): date_pattern = r'(\d{4}-\d{2}-\d{2})' pattern = re.compile(f'^{prefix}_{date_pattern}$') matches = [re.search(pattern, update) for update in updates] dates = [] for match in matches: try: dates.append(datetime.datetime.strptime(match.group(1), '%Y-%m-%d')) except (AttributeError, ValueError): pass try: return sorted(dates)[-1] except IndexError: raise ValueError("Latest date could not be identified") class BatchedTitles(): def __init__(self, ids, batch_size, session): self.ids = ids self.batch_size = batch_size self.session = session self.title_articles_lookup = defaultdict(list) def __getitem__(self, key): matching_articles = self.title_articles_lookup.get(key) if matching_articles is None: raise KeyError(f"Title not found in lookup {key}") else: return matching_articles def __iter__(self): for batch_of_ids in split_batches(self.ids, self.batch_size): self.title_articles_lookup.clear() for article in (self.session.query(Article) .filter(Article.id.in_(batch_of_ids)) .all()): self.title_articles_lookup[prepare_title(article.title)].append(article.id) for title in self.title_articles_lookup: yield title def add_new_articles(article_batch, session): logging.info(f"Inserting a batch of {len(article_batch)} new Articles") objs, existing_objs, failed_objs = _filter_out_duplicates(session, Base, Article, article_batch, low_memory=True) articles = [Article(**obj) for obj in objs] session.add_all(articles) session.commit()
MIT License
tyrylu/pyfmodex
pyfmodex/system.py
System.attach_file_system
python
def attach_file_system(self, user_open, user_close, user_read, user_seek): if user_open: user_open = FILE_OPEN_CALLBACK(user_open) if user_close: user_close = FILE_CLOSE_CALLBACK(user_close) if user_read: user_read = FILE_READ_CALLBACK(user_read) if user_seek: user_seek = FILE_SEEK_CALLBACK(user_seek) self._call_fmod( "FMOD_System_AttachFileSystem", user_open, user_close, user_read, user_seek ) self._user_open = user_open self._user_close = user_close self._user_read = user_read self._user_seek = user_seek
Piggyback' on FMOD file reading routines to capture data as it's read. This allows users to capture data as FMOD reads it, which may be useful for extracting the raw data that FMOD reads for hard to support sources (for example Internet streams). To detach, pass None as the callback parameters. Note: This function is not to replace FMOD's file system. For this functionality, see :py:meth:`set_file_system`. :param callable user_open: Callback for after a file is opened. :param callable user_close: Callback for after a file is closed. :param callable user_read: Callback for after a read operation. :param callable user_seek: Callback for after a seek operation.
https://github.com/tyrylu/pyfmodex/blob/b3f89fdb40ecebe528b229a3c5310ec7d7f66f55/pyfmodex/system.py#L280-L311
from ctypes import * from .callback_prototypes import ROLLOFF_CALLBACK, SYSTEM_CALLBACK from .enums import OUTPUTTYPE, PLUGINTYPE, SPEAKERMODE, TIMEUNIT, RESULT from .flags import INIT_FLAGS, MODE from .fmodobject import FmodObject from .globalvars import DLL as _dll from .globalvars import get_class from .structobject import Structobject as so from .structures import * from .utils import * class Listener: def __init__(self, sptr, aaidee): pos_vec = VECTOR() vel_vec = VECTOR() fwd_vec = VECTOR() up_vec = VECTOR() self._sysptr = sptr self._id = aaidee ckresult( _dll.FMOD_System_Get3DListenerAttributes( self._sysptr, aaidee, byref(pos_vec), byref(vel_vec), byref(fwd_vec), byref(up_vec), ) ) self._pos = pos_vec self._vel = vel_vec self._fwd = fwd_vec self._up = up_vec self._rolloff_callback = None @property def position(self): return self._pos.to_list() @position.setter def position(self, poslist): self._pos = VECTOR.from_list(poslist) self._commit() @property def velocity(self): return self._vel.to_list() @velocity.setter def velocity(self, vellist): self._vel = VECTOR.from_list(vellist) self._commit() @property def forward(self): return self._fwd.to_list() @forward.setter def forward(self, fwdlist): self._fwd = VECTOR.from_list(fwdlist) self._commit() @property def up(self): return self._up.to_list() @up.setter def up(self, uplist): self._up = VECTOR.from_list(uplist) self._commit() def _commit(self): ckresult( _dll.FMOD_System_Set3DListenerAttributes( self._sysptr, self._id, byref(self._pos), byref(self._vel), byref(self._fwd), byref(self._up), ) ) class DSPBufferSizeInfo: def __init__(self, sptr, size, count): self._sysptr = sptr self._size = size self._count = count @property def size(self): return self._size @size.setter def size(self, size): ckresult(_dll.FMOD_System_SetDSPBufferSize(self._sysptr, size, self._count)) self._size = size @property def count(self): return self._count @count.setter def count(self, count): ckresult(_dll.FMOD_System_SetDSPBufferSize(self._sysptr, self._size, count)) self._count = count class ThreedSettings: def __init__(self, sptr, dopplerscale, distancefactor, rolloffscale): self._sysptr = sptr self._distancefactor = distancefactor self._dopplerscale = dopplerscale self._rolloffscale = rolloffscale @property def distance_factor(self): return self._distancefactor @distance_factor.setter def distance_factor(self, factor): ckresult( _dll.FMOD_System_Set3DSettings( self._sysptr, c_float(self._dopplerscale), c_float(factor), c_float(self._rolloffscale), ) ) self._distancefactor = factor @property def doppler_scale(self): return self._dopplerscale @doppler_scale.setter def doppler_scale(self, scale): ckresult( _dll.FMOD_System_Set3DSettings( self._sysptr, c_float(scale), c_float(self._distancefactor), c_float(self._rolloffscale), ) ) self._dopplerscale = scale @property def rolloff_scale(self): return self._rolloffscale @rolloff_scale.setter def rolloff_scale(self, rscale): ckresult( _dll.FMOD_System_Set3DSettings( self._sysptr, c_float(self._distancefactor), c_float(self._dopplerscale), c_float(rscale), ) ) self._rolloffscale = rscale class System(FmodObject): def __init__(self, ptr=None, header_version=0x20200): self._system_callbacks = {} if ptr is None: self._ptr = c_void_p() try: ckresult(_dll.FMOD_System_Create(byref(self._ptr))) except FmodError as exc: if exc.result is not RESULT.HEADER_MISMATCH: raise ckresult(_dll.FMOD_System_Create(byref(self._ptr), header_version)) else: self._ptr = ptr self._user_open = None self._user_close = None self._user_read = None self._user_seek = None def attach_channel_group_to_port( self, port_type, port_index, group, passthru=False ): ckresult( _dll.FMOD_System_AttachChannelGroupToPort( self._ptr, port_type, port_index, group._ptr, passthru ) )
MIT License
audunarn/anystructure
ANYstructure_local/calc_structure.py
Structure.get_moment_of_intertia
python
def get_moment_of_intertia(self, efficent_se=None): tf1 = self.plate_th b1 = self.spacing if efficent_se==None else efficent_se h = self.flange_th+self.web_height+self.plate_th tw = self.web_th hw = self.web_height tf2 = self.flange_th b2 = self.flange_width Ax = tf1 * b1 + tf2 * b2 + (h-tf1-tf2) * tw Iyc = (1 / 12) * (b1 * math.pow(tf1, 3) + b2 * math.pow(tf2, 3) + tw * math.pow(hw, 3)) ez = (tf1 * b1 * (h - tf1 / 2) + hw * tw * (tf2 + hw / 2) + tf2 * b2 * (tf2 / 2)) / Ax Iy = Iyc + (tf1 * b1 * math.pow(tf2 + hw + tf1 / 2, 2) + tw * hw * math.pow(tf2 + hw / 2, 2) + tf2 * b2 * math.pow(tf2 / 2, 2)) - Ax * math.pow(ez, 2) return Iy
Returning moment of intertia. :return:
https://github.com/audunarn/anystructure/blob/6ea9914bc763ab4131c5d8ecedfcfd6cb193d211/ANYstructure_local/calc_structure.py#L335-L353
from scipy.special import gammaln from scipy.stats import gamma as gammadist import numpy as np import ANYstructure_local.helper as hlp import os, time, datetime, json, random, math import ANYstructure_local.SN_curve_parameters as snc class Structure(): def __init__(self, main_dict, *args, **kwargs): super(Structure,self).__init__() self.main_dict = main_dict self.plate_th = main_dict['plate_thk'][0] self.web_height = main_dict['stf_web_height'][0] self.web_th = main_dict['stf_web_thk'][0] self.flange_width = main_dict['stf_flange_width'][0] self.flange_th = main_dict['stf_flange_thk'][0] self.mat_yield = main_dict['mat_yield'][0] self.mat_factor = main_dict['mat_factor'][0] self.span = main_dict['span'][0] self.spacing = main_dict['spacing'][0] self.structure_type = main_dict['structure_type'][0] self.sigma_y1=main_dict['sigma_y1'][0] self.sigma_y2=main_dict['sigma_y2'][0] self.sigma_x=main_dict['sigma_x'][0] self.tauxy=main_dict['tau_xy'][0] self.plate_kpp = main_dict['plate_kpp'][0] self.stf_kps = main_dict['stf_kps'][0] self.km1 = main_dict['stf_km1'][0] self.km2 = main_dict['stf_km2'][0] self.km3 = main_dict['stf_km3'][0] self.stiffener_type=main_dict['stf_type'][0] self.structure_types = main_dict['structure_types'][0] self.dynamic_variable_orientation = None if self.structure_type in self.structure_types['vertical']: self.dynamic_variable_orientation = 'z - vertical' elif self.structure_type in self.structure_types['horizontal']: self.dynamic_variable_orientation = 'x - horizontal' self._puls_method = main_dict['puls buckling method'][0] self._puls_boundary = main_dict['puls boundary'][0] self._puls_stf_end = main_dict['puls stiffener end'][0] self._puls_sp_or_up = main_dict['puls sp or up'][0] self._puls_up_boundary = main_dict['puls up boundary'][0] self._zstar_optimization = main_dict['zstar_optimization'][0] try: self.girder_lg=main_dict['girder_lg'][0] except KeyError: self.girder_lg = 10 try: self.pressure_side = main_dict['press_side'][0] except KeyError: self.pressure_side = 'p' def __str__(self): return str( '\n Plate field span: ' + str(round(self.span,3)) + ' meters' + '\n Stiffener spacing: ' + str(self.spacing*1000)+' mm'+ '\n Plate thickness: ' + str(self.plate_th*1000)+' mm'+ '\n Stiffener web height: ' + str(self.web_height*1000)+' mm'+ '\n Stiffener web thickness: ' + str(self.web_th*1000)+' mm'+ '\n Stiffener flange width: ' + str(self.flange_width*1000)+' mm'+ '\n Stiffener flange thickness: ' + str(self.flange_th*1000)+' mm'+ '\n Material yield: ' + str(self.mat_yield/1e6)+' MPa'+ '\n Structure/stiffener type: ' + str(self.structure_type)+'/'+(self.stiffener_type)+ '\n Dynamic load varible_ ' + str(self.dynamic_variable_orientation)+ '\n Plate fixation paramter,kpp: ' + str(self.plate_kpp) + ' ' + '\n Stf. fixation paramter,kps: ' + str(self.stf_kps) + ' ' + '\n Global stress, sig_y1/sig_y2: ' + str(round(self.sigma_y1,3))+'/'+str(round(self.sigma_y2,3))+ ' MPa' + '\n Global stress, sig_x: ' + str(round(self.sigma_x,3)) + ' MPa' + '\n Global shear, tau_xy: ' + str(round(self.tauxy,3)) + ' MPa' + '\n km1,km2,km3: ' + str(self.km1)+'/'+str(self.km2)+'/'+str(self.km3)+ '\n Pressure side (p-plate/s-stf): ' + str(self.pressure_side) + ' ') def get_beam_string(self): base_name = self.stiffener_type+ '_' + str(round(self.web_height*1000, 0)) + 'x' + str(round(self.web_th*1000, 0)) if self.stiffener_type == 'FB': ret_str = base_name else: ret_str = base_name + '__' + str(round(self.flange_width*1000, 0)) + 'x' + str(round(self.flange_th*1000, 0)) ret_str = ret_str.replace('.', '_') return ret_str def get_structure_types(self): return self.structure_types def get_z_opt(self): return self._zstar_optimization def get_puls_method(self): return self._puls_method def get_puls_boundary(self): return self._puls_boundary def get_puls_stf_end(self): return self._puls_stf_end def get_puls_sp_or_up(self): return self._puls_sp_or_up def get_puls_up_boundary(self): return self._puls_up_boundary def get_one_line_string(self): return 'pl_'+str(round(self.spacing*1000, 1))+'x'+str(round(self.plate_th*1000,1))+' stf_'+self.stiffener_type+ str(round(self.web_height*1000,1))+'x'+str(round(self.web_th*1000,1))+'+' +str(round(self.flange_width*1000,1))+'x'+ str(round(self.flange_th*1000,1)) def get_report_stresses(self): return 'sigma_y1: '+str(round(self.sigma_y1,1))+' sigma_y2: '+str(round(self.sigma_y2,1))+ ' sigma_x: ' + str(round(self.sigma_x,1))+' tauxy: '+ str(round(self.tauxy,1)) def get_extended_string(self): return 'span: '+str(round(self.span,4))+' structure type: '+ self.structure_type + ' stf. type: ' + self.stiffener_type + ' pressure side: ' + self.pressure_side def get_sigma_y1(self): return self.sigma_y1 def get_sigma_y2(self): return self.sigma_y2 def get_sigma_x(self): return self.sigma_x def get_tau_xy(self): return self.tauxy def get_s(self): return self.spacing def get_pl_thk(self): return self.plate_th def get_web_h(self): return self.web_height def get_web_thk(self): return self.web_th def get_fl_w(self): return self.flange_width def get_fl_thk(self): return self.flange_th def get_fy(self): return self.mat_yield def get_mat_factor(self): return self.mat_factor def get_span(self): return self.span def get_lg(self): return self.girder_lg def get_kpp(self): return self.plate_kpp def get_kps(self): return self.stf_kps def get_km1(self): return self.km1 def get_km2(self): return self.km2 def get_km3(self): return self.km3 def get_side(self): return self.pressure_side def get_tuple(self): return (self.spacing, self.plate_th, self.web_height, self.web_th, self.flange_width, self.flange_th, self.span, self.girder_lg, self.stiffener_type) def get_section_modulus(self, efficient_se = None, dnv_table = False): b1 = self.spacing if efficient_se==None else efficient_se tf1 = self.plate_th tf2 = self.flange_th b2 = self.flange_width h = self.flange_th+self.web_height+self.plate_th tw = self.web_th hw = self.web_height Ax = tf1 * b1 + tf2 * b2 + hw * tw assert Ax != 0, 'Ax cannot be 0' ez = (tf1 * b1 * tf1 / 2 + hw * tw * (tf1 + hw / 2) + tf2 * b2 * (tf1 + hw + tf2 / 2)) / Ax Iyc = (1 / 12) * (b1 * math.pow(tf1, 3) + b2 * math.pow(tf2, 3) + tw * math.pow(hw, 3)) Iy = Iyc + (tf1 * b1 * math.pow(tf1 / 2, 2) + tw * hw * math.pow(tf1+hw / 2, 2) + tf2 * b2 * math.pow(tf1+hw+tf2 / 2, 2)) - Ax * math.pow(ez, 2) Wey1 = Iy / (h - ez) Wey2 = Iy / ez return Wey1, Wey2 def get_plasic_section_modulus(self): tf1 = self.plate_th tf2 = self.flange_th b1 = self.spacing b2 = self.flange_width h = self.flange_th+self.web_height+self.plate_th tw = self.web_th hw = self.web_height Ax = tf1 * b1 + tf2 * b2 + (h-tf1-tf2) * tw ezpl = (Ax/2-b1*tf1)/tw+tf1 az1 = h-ezpl-tf1 az2 = ezpl-tf2 Wy1 = b1*tf1*(az1+tf1/2) + (tw/2)*math.pow(az1,2) Wy2 = b2*tf2*(az2+tf2/2)+(tw/2)*math.pow(az2,2) return Wy1+Wy2 def get_shear_center(self): tf1 = self.plate_th tf2 = self.flange_th b1 = self.spacing b2 = self.flange_width h = self.flange_th+self.web_height+self.plate_th tw = self.web_th hw = self.web_height Ax = tf1 * b1 + tf2 * b2 + (h-tf1-tf2) * tw ez = (b2*tf2*tf2/2 + tw*hw*(tf2+hw/2)+tf1*b1*(tf2+hw+tf1/2)) / Ax Iz1 = tf1 * math.pow(b1, 3) Iz2 = tf2 * math.pow(b2, 3) ht = h - tf1 / 2 - tf2 / 2 return (Iz1 * ht) / (Iz1 + Iz2) + tf2 / 2 - ez
MIT License
prooffreader/pytabby
src/pytabby/formatting.py
format_menu
python
def format_menu(config, current_tab_number, line_length, message=None): tabs = config["tabs"] menu = [""] if len(tabs) > 1: menu += _format_headers(tabs, current_tab_number, line_length) current_tab = tabs[current_tab_number] items = current_tab["items"] max_choice_len = 0 for item in items: max_choice_len = max(max_choice_len, len(item["item_choice_displayed"])) for item in items: choice = item["item_choice_displayed"] description = item["item_description"] spacer = " " * (max_choice_len - len(choice)) menu.append("[{0}{1}] {2}".format(choice, spacer, description)) if message is not None: menu.append(message) return "\n".join(menu)
Creates menu to be displayed to user, called from menu.Menu only, not by user Args: config (dict): the config dict passed to the Menu instantiator, after normalization current_tab_number (int): number of currently selected tab (always 0 for single-tabbed menus) line_length (int): value from config message (str or None): a message to print from Menu.message Returns: (str) menu to send to stdout
https://github.com/prooffreader/pytabby/blob/9bb7e5dcb2ef6d4b2a7e2a983402cbf4542d9d41/src/pytabby/formatting.py#L7-L44
MIT License
fabiocaccamo/python-fsutil
fsutil/__init__.py
create_dir
python
def create_dir(path, overwrite=False): if not overwrite: assert_not_exists(path) make_dirs(path)
Create directory at the given path. If overwrite is not allowed and path exists, an OSError is raised.
https://github.com/fabiocaccamo/python-fsutil/blob/261c11990253901129d7df23f1dc32d33dc96be4/fsutil/__init__.py#L187-L194
from fsutil.metadata import ( __author__, __copyright__, __description__, __email__, __license__, __title__, __version__, ) import datetime as dt import errno import glob import hashlib import os import requests import shutil import sys import zipfile try: from urlparse import urlsplit except ImportError: from urllib.parse import urlsplit PY2 = bool(sys.version_info.major == 2) SIZE_UNITS = ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'] def assert_dir(path): if not is_dir(path): raise OSError('Invalid directory path: {}'.format(path)) def assert_exists(path): if not exists(path): raise OSError('Invalid item path: {}'.format(path)) def assert_file(path): if not is_file(path): raise OSError('Invalid file path: {}'.format(path)) def assert_not_dir(path): if is_dir(path): raise OSError('Invalid path, directory already exists: {}'.format(path)) def assert_not_exists(path): if exists(path): raise OSError('Invalid path, item already exists: {}'.format(path)) def assert_not_file(path): if is_file(path): raise OSError('Invalid path, file already exists: {}'.format(path)) def _clean_dir_empty_dirs(path): for basepath, dirnames, _ in os.walk(path, topdown=False): for dirname in dirnames: dirpath = os.path.join(basepath, dirname) if is_empty_dir(dirpath): remove_dir(dirpath) def _clean_dir_empty_files(path): for basepath, _, filenames in os.walk(path, topdown=False): for filename in filenames: filepath = os.path.join(basepath, filename) if is_empty_file(filepath): remove_file(filepath) def clean_dir(path, dirs=True, files=True): if files: _clean_dir_empty_files(path) if dirs: _clean_dir_empty_dirs(path) def convert_size_bytes_to_string(size): size = float(size) units = SIZE_UNITS factor = 0 factor_limit = len(units) - 1 while (size >= 1024) and (factor <= factor_limit): size /= 1024 factor += 1 s_format = '{:.2f} {}' if (factor > 1) else '{:.0f} {}' s = s_format.format(size, units[factor]) return s def convert_size_string_to_bytes(size): units = [item.lower() for item in SIZE_UNITS] parts = size.strip().replace(' ', ' ').split(' ') amount = float(parts[0]) unit = parts[1] factor = units.index(unit.lower()) if not factor: return amount return int((1024 ** factor) * amount) def copy_dir(path, dest, overwrite=False, **kwargs): assert_not_file(path) dirname = os.path.basename(os.path.normpath(path)) dest = os.path.join(dest, dirname) if not overwrite: assert_not_exists(dest) else: if not (sys.version_info.major >= 3 and sys.version_info.minor >= 8): if is_dir(dest): remove_dir(dest) copy_dir_content(path, dest, **kwargs) def copy_dir_content(path, dest, **kwargs): assert_dir(path) if sys.version_info.major >= 3 and sys.version_info.minor >= 8: make_dirs(dest) kwargs.setdefault('dirs_exist_ok', True) shutil.copytree(path, dest, **kwargs) def copy_file(path, dest, overwrite=False, **kwargs): assert_file(path) if not overwrite: assert_not_exists(dest) make_dirs_for_file(dest) shutil.copy2(path, dest, **kwargs)
MIT License
hunch/hunch-gift-app
django/contrib/gis/db/backends/base.py
SpatialRefSysMixin.projected
python
def projected(self): if gdal.HAS_GDAL: return self.srs.projected else: return self.wkt.startswith('PROJCS')
Is this Spatial Reference projected?
https://github.com/hunch/hunch-gift-app/blob/8c7cad24cc0d9900deb4175e6b768c64a3d7adcf/django/contrib/gis/db/backends/base.py#L210-L215
import re from django.conf import settings from django.contrib.gis import gdal class BaseSpatialOperations(object): distance_functions = {} geometry_functions = {} geometry_operators = {} geography_operators = {} geography_functions = {} gis_terms = {} truncate_params = {} postgis = False spatialite = False mysql = False oracle = False spatial_version = None select = None geography = False area = False centroid = False difference = False distance = False distance_sphere = False distance_spheroid = False envelope = False force_rhr = False mem_size = False bounding_circle = False num_geom = False num_points = False perimeter = False perimeter3d = False point_on_surface = False polygonize = False reverse = False scale = False snap_to_grid = False sym_difference = False transform = False translate = False union = False collect = False extent = False extent3d = False make_line = False unionagg = False geohash = False geojson = False gml = False kml = False svg = False from_text = False from_wkb = False def convert_extent(self, box): raise NotImplementedError('Aggregate extent not implemented for this spatial backend.') def convert_extent3d(self, box): raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.') def convert_geom(self, geom_val, geom_field): raise NotImplementedError('Aggregate method not implemented for this spatial backend.') def geo_quote_name(self, name): if isinstance(name, unicode): name = name.encode('ascii') return "'%s'" % name def geo_db_type(self, f): raise NotImplementedError def get_distance(self, f, value, lookup_type): raise NotImplementedError('Distance operations not available on this spatial backend.') def get_geom_placeholder(self, f, value): raise NotImplementedError def spatial_aggregate_sql(self, agg): raise NotImplementedError('Aggregate support not implemented for this spatial backend.') def spatial_lookup_sql(self, lvalue, lookup_type, value, field): raise NotImplmentedError def geometry_columns(self): raise NotImplementedError def spatial_ref_sys(self): raise NotImplementedError class SpatialRefSysMixin(object): spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),') units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$') @property def srs(self): if gdal.HAS_GDAL: if hasattr(self, '_srs'): return self._srs.clone() else: try: self._srs = gdal.SpatialReference(self.wkt) return self.srs except Exception, msg: pass try: self._srs = gdal.SpatialReference(self.proj4text) return self.srs except Exception, msg: pass raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg)) else: raise Exception('GDAL is not installed.') @property def ellipsoid(self): if gdal.HAS_GDAL: return self.srs.ellipsoid else: m = self.spheroid_regex.match(self.wkt) if m: return (float(m.group('major')), float(m.group('flattening'))) else: return None @property def name(self): return self.srs.name @property def spheroid(self): return self.srs['spheroid'] @property def datum(self): return self.srs['datum'] @property
MIT License
bukun/torcms
torcms/model/reply_model.py
MReply.count_of_certain
python
def count_of_certain(): return TabReply.select().count(None)
Get the count of certain kind.
https://github.com/bukun/torcms/blob/5d7480865fd46e706b84f5f65a5c24cd03bb2142/torcms/model/reply_model.py#L71-L76
import datetime import tornado.escape from config import CMS_CFG from torcms.core import tools from torcms.model.core_tab import TabReply, TabUser2Reply from torcms.model.replyid_model import TabReplyid class MReply(): @staticmethod def get_by_uid(uid): recs = TabReply.select().where(TabReply.uid == uid) if recs.count(): return recs.get() return None @staticmethod def update_vote(reply_id, count): entry = TabReply.update(vote=count).where(TabReply.uid == reply_id) entry.execute() @staticmethod def create_reply(post_data): uid = tools.get_uuid() TabReply.create( uid=uid, post_id=post_data['post_id'], user_name=post_data['user_name'], user_id=post_data['user_id'], category=post_data['category'] if 'category' in post_data else '0', timestamp=tools.timestamp(), date=datetime.datetime.now(), cnt_md=tornado.escape.xhtml_escape(post_data['cnt_reply']), cnt_html=tools.markdown2html(post_data['cnt_reply']), vote=0) return uid @staticmethod def query_by_post(postid): return TabReply.select().where((TabReply.post_id == postid) & (TabReply.category != '1')).order_by( TabReply.timestamp.desc()) @staticmethod def get_by_zan(reply_id): return TabUser2Reply.select().where( TabUser2Reply.reply_id == reply_id).count() @staticmethod def query_all(): return TabReply.select().order_by(TabReply.timestamp.desc()) @staticmethod def delete(del_id): return TabReply.delete().where(TabReply.post_id == del_id) @staticmethod
MIT License
epsy/sigtools
sigtools/support.py
bind_callsig
python
def bind_callsig(sig, args, kwargs): assigned = {} varkwargs = next( (param for param in sig.parameters.values() if param.kind == param.VAR_KEYWORD), None) if varkwargs: assigned[varkwargs.name] = {} params = iter(sig.parameters.values()) args_ = iter(args) i = 0 for (i, posarg), param in zip(enumerate(args_, 1), params): if param.kind in (param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD): assigned[param.name] = posarg elif param.kind == param.VAR_POSITIONAL: assigned[param.name] = (posarg,) + tuple(args_) break else: raise TypeError('too many positional arguments') else: if args[:i] != args: raise TypeError('too many positional arguments') for key, value in kwargs.items(): if key in sig.parameters: param = sig.parameters[key] if param.kind == param.POSITIONAL_ONLY: raise TypeError('{0!r} is positional-only'.format(key)) elif param.kind in (param.POSITIONAL_OR_KEYWORD, param.KEYWORD_ONLY): if key in assigned: raise TypeError('{0!r} was specified twice'.format(key)) assigned[key] = value continue if varkwargs: assigned[varkwargs.name][key] = value else: raise TypeError('unknown parameter {0!r}'.format(key)) for param in sig.parameters.values(): if param.name not in assigned: if param.kind == param.VAR_POSITIONAL: assigned[param.name] = () elif param.default != param.empty: assigned[param.name] = param.default else: raise TypeError('omitted required parameter {0!r}'.format( param.name)) return assigned
Returns a dict with each parameter name from ``sig`` mapped to values from ``args``, ``kwargs`` as if a function with ``sig`` was called with ``(*args, **kwargs)``. Similar to `inspect.Signature.bind`.
https://github.com/epsy/sigtools/blob/fa205804a089726a1d360a92bfc6c6f61d73f226/sigtools/support.py#L245-L300
import re import sys import itertools from warnings import warn from sigtools import _util, modifiers, signatures, specifiers __all__ = [ 's', 'f', 'read_sig', 'func_code', 'make_func', 'func_from_sig', 'make_up_callsigs', 'bind_callsig', 'sort_callsigs', 'test_func_sig_coherent', ] try: zip = itertools.izip except AttributeError: pass re_paramname = re.compile( r'^' r'\s*([^:=]+)' r'\s*(?::(.+?))?' r'\s*(?:=(.+))?' r'$') re_posoarg = re.compile(r'^<(.*)>$') def read_sig(sig_str, ret=None): names = [] return_annotation = ret annotations = {} posoarg_n = [] kwoarg_n = [] params = [] found_star = False varargs = None varkwargs = None default_index = None for i, param in enumerate(sig_str.split(',')): if not param: continue arg, annotation, default = re_paramname.match(param).groups() is_posoarg = re_posoarg.match(arg) if is_posoarg: name = arg = is_posoarg.group(1) posoarg_n.append(name) else: name = arg.lstrip('*') if annotation: annotations[name.lstrip('*')] = annotation if default: if default_index is None: if found_star: default_index = i - 1 else: default_index = i insert = arg + '=' + default else: insert = arg if arg == '/': posoarg_n.extend(names) elif arg.startswith('*'): found_star = True if name: params.append(insert) if arg.startswith('**'): varkwargs = name else: varargs = name elif found_star: kwoarg_n.append(arg) if not default and default_index is not None: params.insert(default_index, insert) default_index += 1 else: if params and params[-1].startswith('*'): params.insert(-1, insert) else: params.append(insert) names.append(name) else: params.append(insert) names.append(name) if varargs: names.append(varargs) if varkwargs: names.append(varkwargs) return ( names, return_annotation, annotations, posoarg_n, kwoarg_n, ', '.join(params)) def func_code(names, return_annotation, annotations, posoarg_n, kwoarg_n, params, pre='', name='func'): code = [pre] if return_annotation and annotations: code.append('@modifiers.annotate({0}, {1})'.format( return_annotation, ', '.join( '{0}={1}'.format(key, value) for key, value in annotations.items()))) elif return_annotation: code.append('@modifiers.annotate({0})'.format(return_annotation)) elif annotations: code.append('@modifiers.annotate({0})'.format( ', '.join('{0}={1}'.format(key, value) for key, value in annotations.items()))) if posoarg_n: code.append('@modifiers.posoargs({0})'.format( ', '.join("'{0}'".format(name) for name in posoarg_n))) if kwoarg_n: code.append('@modifiers.kwoargs({0})'.format( ', '.join("'{0}'".format(name) for name in kwoarg_n))) code.append('def {0}({1}):'.format(name, params)) code.append(' return {{{0}}}'.format( ', '.join('{0!r}: {0}'.format(name) for name in names))) return '\n'.join(code) def make_func(code, locals=None, name='func'): if locals is None: locals = {} exec(code, globals(), locals) return locals[name] @modifiers.autokwoargs def f(pre='', locals=None, name='func', *args, **kwargs): return make_func( func_code(*read_sig(*args, **kwargs), pre=pre, name=name), locals=locals, name=name) def s(*args, **kwargs): return specifiers.signature(f(*args, **kwargs)) def func_from_sig(sig): ret, sep, sig_str = str(sig).rpartition(' -> ') ret = ret if sep else None return f(sig_str[1:-1], ret) def make_up_callsigs(sig, extra=2): pospars, pokpars, varargs, kwopars, varkwargs = signatures.sort_params(sig) names = [ arg.name for arg in itertools.chain( pospars, pokpars, kwopars.values() )] for i in range(extra): names.append('__make_up_callsigs__extra_{0}'.format(i)) args = [ tuple(names[:i]) for i in range(len(names) + 1) ] if varargs: names.append(varargs.name) if varkwargs: names.append(varkwargs.name) kwargs = [ dict((name, name) for name in names_) for names_ in itertools.chain.from_iterable( itertools.combinations(names, i) for i in range(len(names) + 1) ) ] ret = list(itertools.product(args, kwargs)) return ret
MIT License
ucbdrive/3d-vehicle-tracking
faster-rcnn.pytorch/demo.py
parse_args
python
def parse_args(): parser = argparse.ArgumentParser(description='Train a Fast R-CNN network') parser.add_argument('--dataset', dest='dataset', help='training dataset', default='pascal_voc', type=str) parser.add_argument('--cfg', dest='cfg_file', help='optional config file', default='cfgs/vgg16.yml', type=str) parser.add_argument('--net', dest='net', help='vgg16, res50, res101, res152', default='res101', type=str) parser.add_argument('--set', dest='set_cfgs', help='set config keys', default=None, nargs=argparse.REMAINDER) parser.add_argument('--load_dir', dest='load_dir', help='directory to load models', default="/srv/share/jyang375/models") parser.add_argument('--image_dir', dest='image_dir', help='directory to load images for demo', default="images") parser.add_argument('--cuda', dest='cuda', help='whether use CUDA', action='store_true') parser.add_argument('--mGPUs', dest='mGPUs', help='whether use multiple GPUs', action='store_true') parser.add_argument('--cag', dest='class_agnostic', help='whether perform class_agnostic bbox regression', action='store_true') parser.add_argument('--parallel_type', dest='parallel_type', help='which part of model to parallel, 0: all, ' '1: model before roi pooling', default=0, type=int) parser.add_argument('--checksession', dest='checksession', help='checksession to load model', default=1, type=int) parser.add_argument('--checkepoch', dest='checkepoch', help='checkepoch to load network', default=1, type=int) parser.add_argument('--checkpoint', dest='checkpoint', help='checkpoint to load network', default=10021, type=int) parser.add_argument('--bs', dest='batch_size', help='batch_size', default=1, type=int) parser.add_argument('--vis', dest='vis', help='visualization mode', action='store_true') parser.add_argument('--webcam_num', dest='webcam_num', help='webcam ID number', default=-1, type=int) args = parser.parse_args() return args
Parse input arguments
https://github.com/ucbdrive/3d-vehicle-tracking/blob/8ee189f6792897651bb56bb2950ce07c9629a89d/faster-rcnn.pytorch/demo.py#L37-L93
from __future__ import absolute_import from __future__ import division from __future__ import print_function import _init_paths import os import sys import numpy as np import argparse import pprint import time import cv2 import torch from torch.autograd import Variable from matplotlib.pyplot import imread from model.utils.config import cfg, cfg_from_file, cfg_from_list from model.rpn.bbox_transform import clip_boxes from model.roi_layers.nms import nms from model.rpn.bbox_transform import bbox_transform_inv from model.utils.net_utils import vis_detections from model.utils.blob import im_list_to_blob from model.faster_rcnn.vgg16 import vgg16 from model.faster_rcnn.resnet import resnet try: xrange except NameError: xrange = range
BSD 3-Clause New or Revised License
juliaregistries/tagbot
tagbot/action/repo.py
Repo.create_dispatch_event
python
def create_dispatch_event(self, payload: Mapping[str, object]) -> None: self._repo.create_repository_dispatch("TagBot", payload)
Create a repository dispatch event.
https://github.com/juliaregistries/tagbot/blob/a42e7df2d9b63e8017c19a96fd26da319fdfec40/tagbot/action/repo.py#L434-L437
import json import os import re import subprocess import sys import traceback import docker import pexpect import requests import toml from base64 import b64decode from datetime import datetime, timedelta from stat import S_IREAD, S_IWRITE, S_IEXEC from subprocess import DEVNULL from tempfile import mkdtemp, mkstemp from typing import Dict, List, Mapping, MutableMapping, Optional, TypeVar, Union, cast from urllib.parse import urlparse from github import Github, GithubException, InputGitAuthor, UnknownObjectException from github.PullRequest import PullRequest from gnupg import GPG from semver import VersionInfo from .. import logger from . import TAGBOT_WEB, Abort, InvalidProject from .changelog import Changelog from .git import Git RequestException = requests.RequestException T = TypeVar("T") class Repo: def __init__( self, *, repo: str, registry: str, github: str, github_api: str, token: str, changelog: str, changelog_ignore: List[str], ssh: bool, gpg: bool, draft: bool, registry_ssh: str, user: str, email: str, lookback: int, branch: Optional[str], github_kwargs: Optional[Dict[str, object]] = None, ) -> None: if github_kwargs is None: github_kwargs = {} if not urlparse(github).scheme: github = f"https://{github}" if not urlparse(github_api).scheme: github_api = f"https://{github_api}" self._gh_url = github self._gh_api = github_api self._gh = Github( token, base_url=self._gh_api, per_page=100, **github_kwargs ) self._repo = self._gh.get_repo(repo, lazy=True) self._registry_name = registry try: self._registry = self._gh.get_repo(registry) except UnknownObjectException: if not registry_ssh: raise Abort(f"Registry {registry} is not accessible") self._registry_ssh_key = registry_ssh logger.debug("Will access registry via Git clone") self._clone_registry = True except Exception: if "pytest" in sys.modules: self._registry = self._gh.get_repo(registry, lazy=True) self._clone_registry = False else: raise else: self._clone_registry = False self._token = token self._changelog = Changelog(self, changelog, changelog_ignore) self._ssh = ssh self._gpg = gpg self._draft = draft self._user = user self._email = email self._git = Git(self._gh_url, repo, token, user, email) self._lookback = timedelta(days=lookback, hours=1) self.__registry_clone_dir: Optional[str] = None self.__release_branch = branch self.__project: Optional[MutableMapping[str, object]] = None self.__registry_path: Optional[str] = None def _project(self, k: str) -> str: if self.__project is not None: return str(self.__project[k]) for name in ["Project.toml", "JuliaProject.toml"]: try: contents = self._only(self._repo.get_contents(name)) break except UnknownObjectException: pass else: raise InvalidProject("Project file was not found") self.__project = toml.loads(contents.decoded_content.decode()) return str(self.__project[k]) @property def _registry_clone_dir(self) -> str: if self.__registry_clone_dir is not None: return self.__registry_clone_dir repo = mkdtemp(prefix="tagbot_registry_") self._git.command("init", repo, repo=None) self.configure_ssh(self._registry_ssh_key, None, repo=repo) url = f"git@{urlparse(self._gh_url).hostname}:{self._registry_name}.git" self._git.command("remote", "add", "origin", url, repo=repo) self._git.command("fetch", "origin", repo=repo) self._git.command("checkout", self._git.default_branch(repo=repo), repo=repo) self.__registry_clone_dir = repo return repo @property def _registry_path(self) -> Optional[str]: if self.__registry_path is not None: return self.__registry_path try: uuid = self._project("uuid") except KeyError: raise InvalidProject("Project file has no UUID") if self._clone_registry: with open(os.path.join(self._registry_clone_dir, "Registry.toml")) as f: registry = toml.load(f) else: contents = self._only(self._registry.get_contents("Registry.toml")) registry = toml.loads(contents.decoded_content.decode()) if uuid in registry["packages"]: self.__registry_path = registry["packages"][uuid]["path"] return self.__registry_path return None @property def _release_branch(self) -> str: return self.__release_branch or self._repo.default_branch def _only(self, val: Union[T, List[T]]) -> T: return val[0] if isinstance(val, list) else val def _maybe_decode_private_key(self, key: str) -> str: return key if "PRIVATE KEY" in key else b64decode(key).decode() def _create_release_branch_pr(self, version: str, branch: str) -> None: self._repo.create_pull( title=f"Merge release branch for {version}", body="", head=branch, base=self._repo.default_branch, ) def _registry_pr(self, version: str) -> Optional[PullRequest]: if self._clone_registry: return None name = self._project("name") uuid = self._project("uuid") head = f"registrator/{name.lower()}/{uuid[:8]}/{version}" logger.debug(f"Looking for PR from branch {head}") now = datetime.now() registry = self._registry owner = registry.owner.login logger.debug(f"Trying to find PR by registry owner first ({owner})") prs = registry.get_pulls(head=f"{owner}:{head}", state="closed") for pr in prs: if pr.merged and now - pr.merged_at < self._lookback: return pr logger.debug("Did not find registry PR by registry owner") prs = registry.get_pulls(state="closed") for pr in prs: if now - cast(datetime, pr.closed_at) > self._lookback: break if pr.merged and pr.head.ref == head: return pr return None def _commit_sha_from_registry_pr(self, version: str, tree: str) -> Optional[str]: pr = self._registry_pr(version) if not pr: logger.info("Did not find registry PR") return None m = re.search("- Commit: ([a-f0-9]{32})", pr.body) if not m: logger.info("Registry PR body did not match") return None commit = self._repo.get_commit(m[1]) if commit.commit.tree.sha == tree: return commit.sha else: logger.warning("Tree SHA of commit from registry PR does not match") return None def _commit_sha_of_tree_from_branch( self, branch: str, tree: str, since: datetime ) -> Optional[str]: for commit in self._repo.get_commits(sha=branch, since=since): if commit.commit.tree.sha == tree: return commit.sha return None def _commit_sha_of_tree(self, tree: str) -> Optional[str]: since = datetime.now() - self._lookback sha = self._commit_sha_of_tree_from_branch(self._release_branch, tree, since) if sha: return sha for branch in self._repo.get_branches(): if branch.name == self._release_branch: continue sha = self._commit_sha_of_tree_from_branch(branch.name, tree, since) if sha: return sha return self._git.commit_sha_of_tree(tree) def _commit_sha_of_tag(self, version: str) -> Optional[str]: try: ref = self._repo.get_git_ref(f"tags/{version}") except UnknownObjectException: return None ref_type = getattr(ref.object, "type", None) if ref_type == "commit": return ref.object.sha elif ref_type == "tag": tag = self._repo.get_git_tag(ref.object.sha) return tag.object.sha else: return None def _commit_sha_of_release_branch(self) -> str: branch = self._repo.get_branch(self._release_branch) return branch.commit.sha def _filter_map_versions(self, versions: Dict[str, str]) -> Dict[str, str]: valid = {} for version, tree in versions.items(): version = f"v{version}" expected = self._commit_sha_from_registry_pr(version, tree) if not expected: expected = self._commit_sha_of_tree(tree) if not expected: logger.warning( f"No matching commit was found for version {version} ({tree})" ) continue sha = self._commit_sha_of_tag(version) if sha: if sha != expected: msg = f"Existing tag {version} points at the wrong commit (expected {expected})" logger.error(msg) else: logger.info(f"Tag {version} already exists") continue valid[version] = expected return valid def _versions(self, min_age: Optional[timedelta] = None) -> Dict[str, str]: if self._clone_registry: return self._versions_clone(min_age=min_age) root = self._registry_path if not root: logger.debug("Package is not registered") return {} kwargs = {} if min_age: until = datetime.now() - min_age commits = self._registry.get_commits(until=until) for commit in commits: kwargs["ref"] = commit.commit.sha break else: logger.debug("No registry commits were found") return {} try: contents = self._only( self._registry.get_contents(f"{root}/Versions.toml", **kwargs) ) except UnknownObjectException: logger.debug(f"Versions.toml was not found ({kwargs})") return {} versions = toml.loads(contents.decoded_content.decode()) return {v: versions[v]["git-tree-sha1"] for v in versions} def _versions_clone(self, min_age: Optional[timedelta] = None) -> Dict[str, str]: registry = self._registry_clone_dir if min_age: default_sha = self._git.command("rev-parse", "HEAD", repo=registry) earliest = datetime.now() - min_age shas = self._git.command("log", "--format=%H", repo=registry).split("\n") for sha in shas: dt = self._git.time_of_commit(sha, repo=registry) if dt < earliest: self._git.command("checkout", sha, repo=registry) break else: logger.debug("No registry commits were found") return {} try: root = self._registry_path if not root: logger.debug("Package is not registered") return {} path = os.path.join(registry, root, "Versions.toml") if not os.path.isfile(path): logger.debug("Versions.toml was not found") return {} with open(path) as f: versions = toml.load(f) return {v: versions[v]["git-tree-sha1"] for v in versions} finally: if min_age: self._git.command("checkout", default_sha, repo=registry) def _pr_exists(self, branch: str) -> bool: owner = self._repo.owner.login for pr in self._repo.get_pulls(head=f"{owner}:{branch}"): return True return False def _run_url(self) -> str: url = f"{self._repo.html_url}/actions" run = os.getenv("GITHUB_RUN_ID") if run: url += f"/runs/{run}" return url def _image_id(self) -> str: host = os.getenv("HOSTNAME", "") if not host: logger.warning("HOSTNAME is not set") return "Unknown" client = docker.from_env() container = client.containers.get(host) return container.image.id def _report_error(self, trace: str) -> None: if self._repo.private or os.getenv("GITHUB_ACTIONS") != "true": logger.debug("Not reporting") return logger.debug("Reporting error") data = { "image": self._image_id(), "repo": self._repo.full_name, "run": self._run_url(), "stacktrace": trace, } resp = requests.post(f"{TAGBOT_WEB}/report", json=data) output = json.dumps(resp.json(), indent=2) logger.info(f"Response ({resp.status_code}): {output}") def is_registered(self) -> bool: try: root = self._registry_path except InvalidProject as e: logger.debug(e.message) return False if not root: return False if self._clone_registry: with open( os.path.join(self._registry_clone_dir, root, "Package.toml") ) as f: package = toml.load(f) else: contents = self._only(self._registry.get_contents(f"{root}/Package.toml")) package = toml.loads(contents.decoded_content.decode()) gh = cast(str, urlparse(self._gh_url).hostname).replace(".", r"\.") if "@" in package["repo"]: pattern = rf"{gh}:(.*?)(?:\.git)?$" else: pattern = rf"{gh}/(.*?)(?:\.git)?$" m = re.search(pattern, package["repo"]) if not m: return False return cast(bool, m[1].casefold() == self._repo.full_name.casefold()) def new_versions(self) -> Dict[str, str]: current = self._versions() logger.debug(f"There are {len(current)} total versions") old = self._versions(min_age=self._lookback) logger.debug(f"There are {len(current) - len(old)} new versions") versions = {} for v in sorted(current.keys(), key=VersionInfo.parse): if v not in old: versions[v] = current[v] return self._filter_map_versions(versions)
MIT License
kneron/onnx_convertor
keras-onnx/onnx_keras/helper.py
constructConstantNode
python
def constructConstantNode(name, data, output=None): if data is None: raise ValueError("data param cannot be None.") tensor = O.helper.make_tensor( name + 'tensor', convertKerasType(data.dtype), data.shape, data.ravel()) if output is None: output = name node = O.helper.make_node( "Constant", [], [output], name=name, value=tensor) node_list = [node] info = O.helper.make_tensor_value_info( name, convertKerasType(data.dtype), data.shape ) value_infos = [info] return node_list, value_infos
Construct a constant node for weights and other uses. # Arguments: name: Name of the node. Usually a node name with usage, e.g."conv0_weight" data: The data in numpy format output: The output name of current node. By default, it is the current node name.
https://github.com/kneron/onnx_convertor/blob/ecf24114d64be274a2df0d120f370358717ee0a0/keras-onnx/onnx_keras/helper.py#L153-L185
import logging import numpy as np import onnx as O from onnx import TensorProto from .exceptions import OnnxNotSupport logger = logging.getLogger("onnx-keras") batchReplace = False custom_name2type = dict() custom_type2opid = dict() data_format = None compatibility = False dtype = int(TensorProto.FLOAT) warning_msgs = [] known_tensors = dict() opid_counter = 0 final_output_change = [] is_sequential = False duplicate_weights = False RNN_start = False RNN_start_node = None def set_compatibility(enable_compatibility): global compatibility compatibility = enable_compatibility def set_duplicate_weights(enable_duplicate): global duplicate_weights duplicate_weights = enable_duplicate def set_custom_layer(custom_list): global custom_name2type global custom_type2opid global opid_counter for custom_layer in custom_list: opid = opid_counter opid_counter += 1 custom_type2opid[custom_layer["layer_type"]] = opid for custom_name in custom_layer["layer_names"]: custom_name2type[custom_name] = custom_layer["layer_type"] def warning_once(msg): global warning_msgs if msg not in warning_msgs: warning_msgs.append(msg) logger.warning(msg) def getKerasLayerType(layer): return str(type(layer)).split('.')[-1][:-2] def hasActivation(activation): return activation.__name__ != 'linear' def convertKerasType(dtype): if dtype not in O.mapping.NP_TYPE_TO_TENSOR_TYPE: logger.warning("Unknown data type %s is treated as float", str(dtype)) dtype = np.dtype('float32') return O.mapping.NP_TYPE_TO_TENSOR_TYPE[dtype] def formatShape(shape_in): global batchReplace shape = np.array(shape_in).tolist() if shape[0] is None: if not batchReplace: logger.info('Replace None batch size with 1.') batchReplace = True shape[0] = 1 for i in range(len(shape)): if shape[i] is None: raise OnnxNotSupport("Dimension none in shape " + str(shape)) else: shape[i] = int(shape[i]) return shape def convertShape(shape_in): global RNN_start shape_in = formatShape(shape_in) shape_size = len(shape_in) if shape_size < 3: return shape_in if data_format == 'channels_first': return shape_in shape_out = [] shape_out.append(shape_in[0]) index_so_far = 1 if RNN_start: shape_out.append(shape_in[index_so_far]) index_so_far += 1 shape_out.append(shape_in[shape_size - 1]) start = index_so_far end = shape_size -1 for i in range(start, end): shape_out.append(shape_in[i]) return shape_out def getPadding(size, kernel_size, strides): if size[0] % strides[0] == 0: pad_h = max(kernel_size[0] - strides[0], 0) else: pad_h = max(kernel_size[0] - (size[0] % strides[0]), 0) if size[1] % strides[1] == 0: pad_w = max(kernel_size[1] - strides[1], 0) else: pad_w = max(kernel_size[1] - (size[1] % strides[1]), 0) return [pad_h//2, pad_w//2, pad_h-pad_h//2, pad_w-pad_w//2] def getConstantNodeByName(tensor_name, weight=None): global known_tensors if tensor_name in known_tensors: logger.debug(tensor_name + " value_info is reused.") return [], [] else: if weight is None: raise ValueError("Unexpected None value") nodes, values = constructConstantNode(tensor_name, weight) known_tensors[tensor_name] = weight.shape return nodes, values
MIT License
akusok/scikit-elm
skelm/utils.py
PairwiseRandomProjection.__init__
python
def __init__(self, n_components=100, pairwise_metric='l2', n_jobs=None, random_state=None): self.n_components = n_components self.pairwise_metric = pairwise_metric self.n_jobs = n_jobs self.random_state = random_state
Pairwise distances projection with random centroids. Parameters ---------- n_components : int Number of components (centroids) in the projection. Creates the same number of output features. pairwise_metric : str A valid pairwise distance metric, see pairwise-distances_. .. _pairwise-distances: https://scikit-learn.org/stable/modules/generated/sklearn.metrics.pairwise_distances.html#sklearn.metrics.pairwise_distances n_jobs : int or None, optional, default=None Number of jobs to use in distance computations, or `None` for no parallelism. Passed to _pairwise-distances function. random_state Used for random generation of centroids.
https://github.com/akusok/scikit-elm/blob/799ecbddc3a4feffac52be0f71b441705cd445a1/skelm/utils.py#L41-L63
import scipy as sp from enum import Enum from sklearn.metrics import pairwise_distances from sklearn.base import BaseEstimator, TransformerMixin from sklearn.utils.validation import check_array, check_is_fitted, check_random_state class HiddenLayerType(Enum): RANDOM = 1 SPARSE = 2 PAIRWISE = 3 def dummy(x): return x def flatten(items): for x in items: if hasattr(x, '__iter__') and not isinstance(x, (str, bytes)): yield from flatten(x) else: yield x def _is_list_of_strings(obj): return obj is not None and all(isinstance(elem, str) for elem in obj) def _dense(X): if sp.sparse.issparse(X): return X.todense() else: return X class PairwiseRandomProjection(BaseEstimator, TransformerMixin):
MIT License
chryswoods/acquire
Acquire/Identity/_authorisation.py
Authorisation.identifiers
python
def identifiers(self): return {"user_guid": self.user_guid()}
Return a dictionary of the full set of identifiers attached to this authorisation (e.g. user_guid, group_guid(s) etc.)
https://github.com/chryswoods/acquire/blob/bf8a0465a531f3b485cb2a14c69dc9aea79451fd/Acquire/Identity/_authorisation.py#L195-L199
__all__ = ["Authorisation"] class Authorisation: def __init__(self, resource=None, user=None, testing_key=None, testing_user_guid=None): if resource is not None: resource = str(resource) self._signature = None self._last_validated_datetime = None self._scope = None self._permissions = None self._pubcert = None if resource is not None: if user is None and testing_key is None: raise ValueError( "You must pass in an authenticated user who will " "provide authorisation for resource '%s'" % resource) from Acquire.ObjectStore import get_datetime_now as _get_datetime_now from Acquire.ObjectStore import create_uuid as _create_uuid if user is not None: from Acquire.Client import User as _User if not isinstance(user, _User): raise TypeError("The passed user must be of type User") elif not user.is_logged_in(): raise PermissionError( "The passed user '%s' must be authenticated to enable " "you to generate an authorisation for the account") self._user_uid = user.uid() self._session_uid = user.session_uid() self._identity_url = user.identity_service().canonical_url() self._identity_uid = user.identity_service_uid() self._auth_datetime = _get_datetime_now() self._uid = _create_uuid(short_uid=True, include_date=self._auth_datetime) self._siguid = user.signing_key().sign(self._uid) message = self._get_message(resource) self._signature = user.signing_key().sign(message) self._last_validated_datetime = _get_datetime_now() self._last_verified_resource = resource self._last_verified_key = None if user.guid() != self.user_guid(): raise PermissionError( "We do not yet support a single user being identified " "by multiple identity services: %s versus %s" % (user.guid(), self.user_guid())) elif testing_key is not None: self._user_uid = "some user uid" self._session_uid = "some session uid" self._identity_url = "some identity_url" self._identity_uid = "some identity uid" self._auth_datetime = _get_datetime_now() self._uid = _create_uuid(short_uid=True, include_date=self._auth_datetime) self._is_testing = True self._testing_key = testing_key if testing_user_guid is not None: parts = testing_user_guid.split("@") self._user_uid = parts[0] self._identity_uid = parts[1] message = self._get_message(resource) self._signature = testing_key.sign(message) self._siguid = testing_key.sign(self._uid) self._last_validated_datetime = _get_datetime_now() self._last_verified_resource = resource self._last_verified_key = testing_key.public_key() def is_null(self): return self._signature is None def _get_message(self, resource=None, matched_resource=False): from Acquire.ObjectStore import datetime_to_string as _datetime_to_string if matched_resource: resource = self._last_verified_resource if resource is None: return "%s|%s|%s|%s" % ( self._user_uid, self._session_uid, self._identity_uid, _datetime_to_string(self._auth_datetime)) else: return "%s|%s|%s|%s|%s" % ( self._user_uid, self._session_uid, self._identity_uid, str(resource), _datetime_to_string(self._auth_datetime)) def __str__(self): try: return "Authorisation(signature=%s)" % self._signature except: return "Authorisation()" def __repr__(self): return self.__str__() def __eq__(self, other): if isinstance(other, self.__class__): return self._signature == other._signature else: return False def __ne__(self, other): return not self.__eq__(other) def _fix_integer(self, value, max_value): max_value = int(max_value) if value is None: return max_value else: try: value = int(value) except: return max_value if value <= 0 or value > max_value: return max_value else: return value def from_user(self, user_uid, service_uid): return (user_uid == self._user_uid) and (service_uid == self._identity_uid) def uid(self): if self.is_null(): return None else: return self._uid def user_uid(self): if self.is_null(): return None else: return self._user_uid def user_guid(self): return "%s@%s" % (self.user_uid(), self.identity_uid())
Apache License 2.0
city-bureau/city-scrapers
city_scrapers/spiders/chi_ssa_52.py
ChiSsa52Spider._parse_links
python
def _parse_links(self, item): return []
Parse or generate links.
https://github.com/city-bureau/city-scrapers/blob/b295d0aa612e3979a9fccab7c5f55ecea9ed074c/city_scrapers/spiders/chi_ssa_52.py#L134-L136
import re from datetime import datetime from difflib import SequenceMatcher from city_scrapers_core.constants import COMMISSION from city_scrapers_core.items import Meeting from city_scrapers_core.spiders import CityScrapersSpider class ChiSsa52Spider(CityScrapersSpider): name = "chi_ssa_52" agency = "Chicago Special Service Area #52 51st Street" timezone = "America/Chicago" start_urls = ["https://www.51ststreetchicago.com/about.html"] def parse(self, response): items = response.css("div.paragraph")[3:4] title = items.css("strong::text").get() meeting = items.css("ul")[0] item = (title, meeting) for meet in meeting.css("li"): meet = self._clean_meet(meet) meeting = Meeting( title=self._parse_title(title), description=self._parse_description(item), classification=self._parse_classification(item), start=self._parse_start(meet), end=self._parse_end(item), all_day=self._parse_all_day(item), time_notes=self._parse_time_notes(item), location=self._parse_location(item), links=self._parse_links(item), source=self._parse_source(response), ) meeting["status"] = self._get_status(meeting) meeting["id"] = self._get_id(meeting) yield meeting def _clean_meet(self, meet): meet = meet.css("::text").get() meet = meet.replace("\xa0", "") clean_str = re.sub(r"[^\w:]+", " ", meet) meet_info = clean_str.split() return meet_info def _check_am_pm(self, time): time = time.split(":") hour = time[0] minutes = time[1] if int(hour) >= 8 and int(hour) <= 12: return f"{hour}:{minutes} AM" return f"{hour}:{minutes} PM" def _parse_title(self, item): return "Commission" def _parse_description(self, item): return "" def _parse_classification(self, item): return COMMISSION def _parse_start(self, item): months = [ "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE", "JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", ] time = item[4] time = self._check_am_pm(time) try: date = datetime.strptime( f"{item[2]} {item[1]} {item[3]} {time}", "%d %B %Y %I:%M %p", ) except ValueError: for month in months: ratio = SequenceMatcher(None, month, item[1]).ratio() if ratio > 0.5: date = datetime.strptime( f"{item[2]} {month} {item[3]} {time}", "%d %B %Y %I:%M %p", ) return date def _parse_end(self, item): return None def _parse_time_notes(self, item): return "" def _parse_all_day(self, item): return False def _parse_location(self, item): return { "address": "220 E 51st St Chicago, IL 60615", "name": "51st Street Business Association", }
MIT License
jonathanfeng/new_horizons
venv/lib/python3.7/site-packages/jinja2/compiler.py
has_safe_repr
python
def has_safe_repr(value): if value is None or value is NotImplemented or value is Ellipsis: return True if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: return True if type(value) in (tuple, list, set, frozenset): for item in value: if not has_safe_repr(item): return False return True elif type(value) is dict: for key, value in iteritems(value): if not has_safe_repr(key): return False if not has_safe_repr(value): return False return True return False
Does the node have a safe representation?
https://github.com/jonathanfeng/new_horizons/blob/0ec21c8f8423932611e1e0bf24548dcef912bc54/venv/lib/python3.7/site-packages/jinja2/compiler.py#L92-L110
from collections import namedtuple from functools import update_wrapper from itertools import chain from keyword import iskeyword as is_python_keyword from markupsafe import escape from markupsafe import Markup from . import nodes from ._compat import imap from ._compat import iteritems from ._compat import izip from ._compat import NativeStringIO from ._compat import range_type from ._compat import string_types from ._compat import text_type from .exceptions import TemplateAssertionError from .idtracking import Symbols from .idtracking import VAR_LOAD_ALIAS from .idtracking import VAR_LOAD_PARAMETER from .idtracking import VAR_LOAD_RESOLVE from .idtracking import VAR_LOAD_UNDEFINED from .nodes import EvalContext from .optimizer import Optimizer from .utils import concat from .visitor import NodeVisitor operators = { "eq": "==", "ne": "!=", "gt": ">", "gteq": ">=", "lt": "<", "lteq": "<=", "in": "in", "notin": "not in", } if hasattr(dict, "iteritems"): dict_item_iter = "iteritems" else: dict_item_iter = "items" code_features = ["division"] try: exec("from __future__ import generator_stop") code_features.append("generator_stop") except SyntaxError: pass try: exec("def f(): yield from x()") except SyntaxError: supports_yield_from = False else: supports_yield_from = True def optimizeconst(f): def new_func(self, node, frame, **kwargs): if self.optimized and not frame.eval_ctx.volatile: new_node = self.optimizer.visit(node, frame.eval_ctx) if new_node != node: return self.visit(new_node, frame) return f(self, node, frame, **kwargs) return update_wrapper(new_func, f) def generate( node, environment, name, filename, stream=None, defer_init=False, optimized=True ): if not isinstance(node, nodes.Template): raise TypeError("Can't compile non template nodes") generator = environment.code_generator_class( environment, name, filename, stream, defer_init, optimized ) generator.visit(node) if stream is None: return generator.stream.getvalue()
MIT License
pranjaldatta/pyvision
pyvision/misc/mtcnn/stage_two.py
pad
python
def pad(bboxes, width, height): x, y, ex, ey = [bboxes[:, i] for i in range(4)] w, h = ex - x + 1.0, ey - y + 1.0 num_boxes = bboxes.shape[0] dx, dy = np.zeros((num_boxes,)), np.zeros((num_boxes,)) edx, edy = w.copy() - 1.0, h.copy() - 1.0 ind = np.where(x < 0.0)[0] dx[ind] = 0.0 - x[ind] x[ind] = 0.0 ind = np.where(y < 0.0)[0] dy[ind] = 0.0 - y[ind] y[ind] = 0.0 ind = np.where(ex > width - 1.0 )[0] edx[ind] = w[ind] + width - 2.0 - ex[ind] ex[ind] = width - 1.0 ind = np.where(ey > height - 1.0)[0] edy[ind] = h[ind] + height - 2.0 - ey[ind] ey[ind] = height - 1.0 return_list = [dy, edy, dx, edx, y, ey, x, ex, w, h] return_list = [r.astype('int32') for r in return_list] return return_list
Output: dy, dx, edy, edx: Coordinates of cut boxes y, x, ey, ex: Coordinates of box in image h, w: Heights and widths of boxes.
https://github.com/pranjaldatta/pyvision/blob/ad57b27cf790c267772402e47bd9e140ba6f549e/pyvision/misc/mtcnn/stage_two.py#L40-L78
from .utils.utils import preprocess import numpy as np from PIL import Image from .utils.visualize import show_boxes def get_image_boxes(bounding_boxes, img, size=24): num_boxes = len(bounding_boxes) w, h = img.size [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] = pad(bounding_boxes, w, h) img_boxes = np.zeros((num_boxes, 3, size, size), 'float32') for i in range(num_boxes): img_box = np.zeros((tmph[i], tmpw[i], 3), 'uint8') img_array = np.asarray(img, 'uint8') try: img_box[dy[i]:(edy[i] + 1), dx[i]:(edx[i] + 1), :] = img_array[y[i]:(ey[i] + 1), x[i]:(ex[i] + 1), :] except ValueError as ve: print("Value error at index {}".format(i)) img_box = Image.fromarray(img_box) img_box = img_box.resize((size, size), Image.BILINEAR) img_box = np.asarray(img_box, 'float32') img_boxes[i, :, :, :] = preprocess(img_box) return img_boxes
BSD 3-Clause New or Revised License
botfront/rasa-for-botfront
scripts/release.py
write_version_to_pyproject
python
def write_version_to_pyproject(version: Version) -> None: pyproject_file = pyproject_file_path() try: data = toml.load(pyproject_file) data["tool"]["poetry"]["version"] = str(version) with pyproject_file.open("w", encoding="utf8") as f: toml.dump(data, f) except (FileNotFoundError, TypeError): print(f"Unable to update {pyproject_file}: file not found.") sys.exit(1) except toml.TomlDecodeError: print(f"Unable to parse {pyproject_file}: incorrect TOML file.") sys.exit(1) check_call(["git", "add", str(pyproject_file.absolute())])
Dump a new version into the pyproject.toml.
https://github.com/botfront/rasa-for-botfront/blob/6e0e48d0059e197b5f686df1e27935769c3641b7/scripts/release.py#L74-L90
import argparse import os import re import sys from pathlib import Path from subprocess import CalledProcessError, check_call, check_output from typing import Text, Set import questionary import toml from pep440_version_utils import Version, is_valid_version VERSION_FILE_PATH = "rasa/version.py" PYPROJECT_FILE_PATH = "pyproject.toml" REPO_BASE_URL = "https://github.com/RasaHQ/rasa" RELEASE_BRANCH_PREFIX = "prepare-release-" PRERELEASE_FLAVORS = ("alpha", "rc") RELEASE_BRANCH_PATTERN = re.compile(r"^\d+\.\d+\.x$") def create_argument_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(description="prepare the next library release") parser.add_argument( "--next_version", type=str, help="Either next version number or 'major', 'minor', 'micro', 'alpha', 'rc'", ) return parser def project_root() -> Path: return Path(os.path.dirname(__file__)).parent def version_file_path() -> Path: return project_root() / VERSION_FILE_PATH def pyproject_file_path() -> Path: return project_root() / PYPROJECT_FILE_PATH def write_version_file(version: Version) -> None: with version_file_path().open("w") as f: f.write( f"# this file will automatically be changed,\n" f"# do not add anything but the version number here!\n" f'__version__ = "{version}"\n' ) check_call(["git", "add", str(version_file_path().absolute())])
Apache License 2.0
youngbin-ro/multi2oie
carb/oieReader.py
OieReader.split_to_corpus
python
def split_to_corpus(self, corpus_fn, out_fn): raw_sents = [line.strip() for line in open(corpus_fn)] with open(out_fn, 'w') as fout: for line in self.get_tabbed().split('\n'): data = line.split('\t') sent = data[0] if sent in raw_sents: fout.write(line + '\n')
Given a corpus file name, containing a list of sentences print only the extractions pertaining to it to out_fn in a tab separated format: sent, prob, pred, arg1, arg2, ...
https://github.com/youngbin-ro/multi2oie/blob/3964e9c68aa9721800791de02d1ec7bdcb5fe3a7/carb/oieReader.py#L12-L24
class OieReader: def read(self, fn, includeNominal): raise Exception("Don't run me") def count(self): return sum([len(extractions) for _, extractions in self.oie.items()])
MIT License
hyperion-rt/hyperion
hyperion/model/image.py
Image.lon_max
python
def lon_max(self): return self._lon_max
Upper extent of the image in the x direction (in degrees).
https://github.com/hyperion-rt/hyperion/blob/2159b39f5cafaeb653f2541502eef194a78102d6/hyperion/model/image.py#L222-L226
import numpy as np import six from ..util.functions import FreezableClass, is_numpy_array from ..util.constants import c class Image(FreezableClass): def __init__(self, nu, val=None, unc=None, units=None): self.nu = nu self.val = val self.unc = unc self.units = units self.x_min = None self.x_max = None self.y_min = None self.y_max = None self.lon_min = None self.lon_max = None self.lat_min = None self.lat_max = None self.d_min = None self.d_max = None self.distance = None self.pix_area_sr = None self.inside_observer = False self._freeze() @property def nu(self): return self._nu @nu.setter def nu(self, value): if type(value) in [list, tuple]: value = np.array(value) if value is None: self._nu = value elif isinstance(value, np.ndarray) and value.ndim == 1: self._nu = value else: raise TypeError("nu should be a 1-d sequence") @property def val(self): return self._val @val.setter def val(self, value): if type(value) in [list, tuple]: value = np.array(value) if value is None: self._val = value elif isinstance(value, np.ndarray) and value.ndim >= 1: if self.nu is not None and len(self.nu) != value.shape[-1]: raise ValueError("the last dimension of the value array should match the length of the nu array (expected {0} but found {1})".format(len(self.nu), value.shape[-1])) else: if hasattr(self, 'unc') and self.unc is not None: if value.shape != self.unc.shape: raise ValueError("dimensions should match that of unc") self._val = value else: raise TypeError("val should be a multi-dimensional array") @property def unc(self): return self._unc @unc.setter def unc(self, value): if type(value) in [list, tuple]: value = np.array(value) if value is None: self._unc = value elif isinstance(value, np.ndarray) and value.ndim >= 1: if self.nu is not None and len(self.nu) != value.shape[-1]: raise ValueError("the last dimension of the unc array should match the length of the nu array (expected {0} but found {1})".format(len(self.nu), value.shape[-1])) else: if hasattr(self, 'val') and self.val is not None: if value.shape != self.val.shape: raise ValueError("dimensions should match that of val") self._unc = value else: raise TypeError("unc should be a multi-dimensional array") @property def unit(self): return self._unit @unit.setter def unit(self, value): if value is None or isinstance(value, six.string_types): self._unit = value else: raise ValueError("unit should be a string") @property def wav(self): if self.nu is None: return None else: return c / self.nu * 1e4 def __iter__(self): if self.unc is None: return (x for x in [self.wav, self.val]) else: return (x for x in [self.wav, self.val, self.unc]) @property def x_min(self): return self._x_min @x_min.setter def x_min(self, value): if value is None or (np.isscalar(value) and np.isreal(value)): self._x_min = value else: raise ValueError("x_min should be a real scalar value") @property def x_max(self): return self._x_max @x_max.setter def x_max(self, value): if value is None or (np.isscalar(value) and np.isreal(value)): self._x_max = value else: raise ValueError("x_max should be a real scalar value") @property def y_min(self): return self._y_min @y_min.setter def y_min(self, value): if value is None or (np.isscalar(value) and np.isreal(value)): self._y_min = value else: raise ValueError("y_min should be a real scalar value") @property def y_max(self): return self._y_max @y_max.setter def y_max(self, value): if value is None or (np.isscalar(value) and np.isreal(value)): self._y_max = value else: raise ValueError("y_max should be a real scalar value") @property def lon_min(self): return self._lon_min @lon_min.setter def lon_min(self, value): if value is None or (np.isscalar(value) and np.isreal(value)): self._lon_min = value else: raise ValueError("lon_min should be a real scalar value") @property
BSD 2-Clause Simplified License
shallowtoil/drol
pysot/utils/bbox.py
cxy_wh_2_rect1
python
def cxy_wh_2_rect1(pos, sz): return np.array([pos[0]-sz[0]/2+1, pos[1]-sz[1]/2+1, sz[0], sz[1]])
convert (cx, cy, w, h) to (x1, y1, w, h), 1-index
https://github.com/shallowtoil/drol/blob/4aebe575394bc035e9924c8711c7d5d76bfef37a/pysot/utils/bbox.py#L97-L100
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from collections import namedtuple import numpy as np Corner = namedtuple('Corner', 'x1 y1 x2 y2') BBox = Corner Center = namedtuple('Center', 'x y w h') def corner2center(corner): if isinstance(corner, Corner): x1, y1, x2, y2 = corner return Center((x1 + x2) * 0.5, (y1 + y2) * 0.5, (x2 - x1), (y2 - y1)) else: x1, y1, x2, y2 = corner[0], corner[1], corner[2], corner[3] x = (x1 + x2) * 0.5 y = (y1 + y2) * 0.5 w = x2 - x1 h = y2 - y1 return x, y, w, h def center2corner(center): if isinstance(center, Center): x, y, w, h = center return Corner(x - w * 0.5, y - h * 0.5, x + w * 0.5, y + h * 0.5) else: x, y, w, h = center[0], center[1], center[2], center[3] x1 = x - w * 0.5 y1 = y - h * 0.5 x2 = x + w * 0.5 y2 = y + h * 0.5 return x1, y1, x2, y2 def IoU(rect1, rect2): x1, y1, x2, y2 = rect1[0], rect1[1], rect1[2], rect1[3] tx1, ty1, tx2, ty2 = rect2[0], rect2[1], rect2[2], rect2[3] xx1 = np.maximum(tx1, x1) yy1 = np.maximum(ty1, y1) xx2 = np.minimum(tx2, x2) yy2 = np.minimum(ty2, y2) ww = np.maximum(0, xx2 - xx1) hh = np.maximum(0, yy2 - yy1) area = (x2-x1) * (y2-y1) target_a = (tx2-tx1) * (ty2 - ty1) inter = ww * hh iou = inter / (area + target_a - inter) return iou def cxy_wh_2_rect(pos, sz): return np.array([pos[0]-sz[0]/2, pos[1]-sz[1]/2, sz[0], sz[1]]) def rect_2_cxy_wh(rect): return np.array([rect[0]+rect[2]/2, rect[1]+rect[3]/2]), np.array([rect[2], rect[3]])
Apache License 2.0
gabstopper/smc-python
smc/core/interfaces.py
QoS.dscp_marking_and_throttling
python
def dscp_marking_and_throttling(self, qos_policy): self._interface.data.update(qos_limit=-1, qos_mode='dscp', qos_policy_ref=qos_policy.href)
Enable DSCP marking and throttling on the interface. This requires that you provide a QoS policy to which identifies DSCP tags and how to prioritize that traffic. :param QoSPolicy qos_policy: the qos policy to apply to the interface
https://github.com/gabstopper/smc-python/blob/54386c8a710727cc1acf69334a57b155d2f5408c/smc/core/interfaces.py#L369-L379
import copy from smc.base.model import SubElement, lookup_class, ElementCache from smc.api.exceptions import InterfaceNotFound from smc.core.route import del_invalid_routes from smc.core.sub_interfaces import ( NodeInterface, SingleNodeInterface, ClusterVirtualInterface, InlineInterface, CaptureInterface, get_sub_interface, SubInterfaceCollection) from smc.compat import string_types from smc.elements.helpers import zone_helper, logical_intf_helper from smc.elements.network import Zone from smc.base.structs import BaseIterable from smc.policy.qos import QoSPolicy class InterfaceOptions(object): def __init__(self, engine): self._engine = engine self.interface = InterfaceEditor(engine) @property def primary_mgt(self): return self.interface.find_mgmt_interface('primary_mgt') @property def backup_mgt(self): return self.interface.find_mgmt_interface('backup_mgt') @property def primary_heartbeat(self): return self.interface.find_mgmt_interface('primary_heartbeat') @property def backup_heartbeat(self): return self.interface.find_mgmt_interface('backup_heartbeat') @property def outgoing(self): return self.interface.find_mgmt_interface('outgoing') @property def auth_request(self): return self.interface.find_mgmt_interface('auth_request') def set_auth_request(self, interface_id, address=None): self.interface.set_auth_request(interface_id, address) self._engine.update() def set_primary_heartbeat(self, interface_id): self.interface.set_unset(interface_id, 'primary_heartbeat') self._engine.update() def set_backup_heartbeat(self, interface_id): self.interface.set_unset(interface_id, 'backup_heartbeat') self._engine.update() def set_primary_mgt(self, interface_id, auth_request=None, address=None): intfattr = ['primary_mgt', 'outgoing'] if self.interface.engine.type in ('virtual_fw',): intfattr.remove('primary_mgt') for attribute in intfattr: self.interface.set_unset(interface_id, attribute, address) if auth_request is not None: self.interface.set_auth_request(auth_request) else: self.interface.set_auth_request(interface_id, address) self._engine.update() def set_backup_mgt(self, interface_id): self.interface.set_unset(interface_id, 'backup_mgt') self._engine.update() def set_outgoing(self, interface_id): self.interface.set_unset(interface_id, 'outgoing') self._engine.update() class QoS(object): def __init__(self, interface): self._interface = interface def disable(self): self._interface.data.update(qos_limit=-1, qos_mode='no_qos', qos_policy_ref=None) @property def qos_policy(self): return QoSPolicy.from_href(self._interface.data.get( 'qos_policy_ref', None)) @property def qos_mode(self): return self._interface.data.get('qos_mode', 'no_qos') @property def qos_limit(self): return self._interface.data.get('qos_limit', -1) def statistics_only(self): self._interface.data.update(qos_limit=-1, qos_mode='statistics_only', qos_policy_ref=None) def full_qos(self, qos_limit, qos_policy): self._interface.data.update(qos_limit=qos_limit, qos_mode='full_qos', qos_policy_ref=qos_policy.href)
Apache License 2.0
shaohua0116/demo2program
models/baselines/model_induction.py
PoolingAttentionWrapper.__init__
python
def __init__(self, cell, attention_mechanism, attention_layer_size=None, alignment_history=False, cell_input_fn=None, output_attention=True, initial_cell_state=None, pooling='avgpool', name=None): super(PoolingAttentionWrapper, self).__init__( cell, attention_mechanism, attention_layer_size=None, alignment_history=alignment_history, cell_input_fn=cell_input_fn, output_attention=output_attention, initial_cell_state=initial_cell_state, name=name) self.pooling = pooling if attention_layer_size is not None: if not isinstance(attention_layer_size, (int, float)): raise ValueError('attention_layer_size should be a single number.') self._attention_layers = tuple( layers_core.Dense( attention_layer_size, name="attention_layer", use_bias=False) for _ in range(len(self._attention_mechanisms))) self._attention_layer_size = attention_layer_size else: attention_layer_sizes = [ attention_mechanism.values.get_shape()[-1].value for attention_mechanism in self._attention_mechanisms] if max(attention_layer_sizes) != min(attention_layer_sizes): raise ValueError('When attention_layer_size is None, ' 'attention mechanisms should output with the ' 'same dimension.') self._attention_layers = None self._attention_layer_size = min(attention_layer_sizes)
Construct the `PoolingAttentionWrapper`. Args: attention_layer_size: single Python integer. Otherwise, this is same as the AtentionWrapper.
https://github.com/shaohua0116/demo2program/blob/23464a69bfbf6fac9752fd423d14b03d37d1d1c6/models/baselines/model_induction.py#L59-L105
from __future__ import absolute_import from __future__ import division from __future__ import print_function import math import numpy as np import tensorflow as tf import tensorflow.contrib.rnn as rnn import tensorflow.contrib.seq2seq as seq2seq from tensorflow.python.framework import ops from tensorflow.python.layers import core as layers_core from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from collections import namedtuple from models.util import log from models.ops import fc, conv2d SequenceLossOutput = namedtuple( 'SequenceLossOutput', 'mask loss output token_acc seq_acc syntax_acc ' + 'is_correct_syntax pred_tokens is_same_seq') def _compute_attention(attention_mechanism, cell_output, previous_alignments, attention_layer, reuse=False): with tf.variable_scope('compute_attention', reuse=reuse): alignments = attention_mechanism( cell_output, previous_alignments=previous_alignments) expanded_alignments = array_ops.expand_dims(alignments, 1) context = math_ops.matmul(expanded_alignments, attention_mechanism.values) context = array_ops.squeeze(context, [1]) if attention_layer is not None: with tf.variable_scope('compute_attention', reuse=reuse): attention = attention_layer(array_ops.concat([cell_output, context], 1)) else: attention = context return attention, alignments class PoolingAttentionWrapper(seq2seq.AttentionWrapper):
MIT License
locustio/locust
locust/stats.py
StatsEntry.extend
python
def extend(self, other): old_last_request_timestamp = self.last_request_timestamp if self.last_request_timestamp is not None and other.last_request_timestamp is not None: self.last_request_timestamp = max(self.last_request_timestamp, other.last_request_timestamp) elif other.last_request_timestamp is not None: self.last_request_timestamp = other.last_request_timestamp self.start_time = min(self.start_time, other.start_time) self.num_requests = self.num_requests + other.num_requests self.num_none_requests = self.num_none_requests + other.num_none_requests self.num_failures = self.num_failures + other.num_failures self.total_response_time = self.total_response_time + other.total_response_time self.max_response_time = max(self.max_response_time, other.max_response_time) if self.min_response_time is not None and other.min_response_time is not None: self.min_response_time = min(self.min_response_time, other.min_response_time) elif other.min_response_time is not None: self.min_response_time = other.min_response_time self.total_content_length = self.total_content_length + other.total_content_length for key in other.response_times: self.response_times[key] = self.response_times.get(key, 0) + other.response_times[key] for key in other.num_reqs_per_sec: self.num_reqs_per_sec[key] = self.num_reqs_per_sec.get(key, 0) + other.num_reqs_per_sec[key] for key in other.num_fail_per_sec: self.num_fail_per_sec[key] = self.num_fail_per_sec.get(key, 0) + other.num_fail_per_sec[key] if self.use_response_times_cache: last_time = self.last_request_timestamp and int(self.last_request_timestamp) or None if last_time and last_time > (old_last_request_timestamp and int(old_last_request_timestamp) or 0): self._cache_response_times(last_time)
Extend the data from the current StatsEntry with the stats from another StatsEntry instance.
https://github.com/locustio/locust/blob/58487b526075826584690cdd2ad4ab1218afa146/locust/stats.py#L424-L467
import datetime import hashlib import time from collections import namedtuple, OrderedDict from copy import copy from itertools import chain import os import csv import gevent from .exception import StopUser, CatchResponseError import logging console_logger = logging.getLogger("locust.stats_logger") try: STATS_NAME_WIDTH = max(min(os.get_terminal_size()[0] - 80, 80), 0) except OSError: STATS_NAME_WIDTH = 80 STATS_TYPE_WIDTH = 8 CONSOLE_STATS_INTERVAL_SEC = 2 HISTORY_STATS_INTERVAL_SEC = 5 CSV_STATS_INTERVAL_SEC = 1 CSV_STATS_FLUSH_INTERVAL_SEC = 10 CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW = 10 CachedResponseTimes = namedtuple("CachedResponseTimes", ["response_times", "num_requests"]) PERCENTILES_TO_REPORT = [0.50, 0.66, 0.75, 0.80, 0.90, 0.95, 0.98, 0.99, 0.999, 0.9999, 1.0] class RequestStatsAdditionError(Exception): pass def get_readable_percentiles(percentile_list): return [ f"{int(percentile * 100) if (percentile * 100).is_integer() else round(100 * percentile, 6)}%" for percentile in percentile_list ] def calculate_response_time_percentile(response_times, num_requests, percent): num_of_request = int((num_requests * percent)) processed_count = 0 for response_time in sorted(response_times.keys(), reverse=True): processed_count += response_times[response_time] if num_requests - processed_count <= num_of_request: return response_time return 0 def diff_response_time_dicts(latest, old): new = {} for t in latest: diff = latest[t] - old.get(t, 0) if diff: new[t] = diff return new class RequestStats: def __init__(self, use_response_times_cache=True): self.use_response_times_cache = use_response_times_cache self.entries = {} self.errors = {} self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache) self.history = [] @property def num_requests(self): return self.total.num_requests @property def num_none_requests(self): return self.total.num_none_requests @property def num_failures(self): return self.total.num_failures @property def last_request_timestamp(self): return self.total.last_request_timestamp @property def start_time(self): return self.total.start_time def log_request(self, method, name, response_time, content_length): self.total.log(response_time, content_length) self.get(name, method).log(response_time, content_length) def log_error(self, method, name, error): self.total.log_error(error) self.get(name, method).log_error(error) key = StatsError.create_key(method, name, error) entry = self.errors.get(key) if not entry: entry = StatsError(method, name, error) self.errors[key] = entry entry.occurred() def get(self, name, method): entry = self.entries.get((name, method)) if not entry: entry = StatsEntry(self, name, method, use_response_times_cache=self.use_response_times_cache) self.entries[(name, method)] = entry return entry def reset_all(self): self.total.reset() self.errors = {} for r in self.entries.values(): r.reset() self.history = [] def clear_all(self): self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache) self.entries = {} self.errors = {} self.history = [] def serialize_stats(self): return [ self.entries[key].get_stripped_report() for key in self.entries.keys() if not (self.entries[key].num_requests == 0 and self.entries[key].num_failures == 0) ] def serialize_errors(self): return dict([(k, e.to_dict()) for k, e in self.errors.items()]) class StatsEntry: name = None method = None num_requests = None num_none_requests = None num_failures = None total_response_time = None min_response_time = None max_response_time = None num_reqs_per_sec = None num_fail_per_sec = None response_times = None use_response_times_cache = False response_times_cache = None total_content_length = None start_time = None last_request_timestamp = None def __init__(self, stats, name, method, use_response_times_cache=False): self.stats = stats self.name = name self.method = method self.use_response_times_cache = use_response_times_cache self.reset() def reset(self): self.start_time = time.time() self.num_requests = 0 self.num_none_requests = 0 self.num_failures = 0 self.total_response_time = 0 self.response_times = {} self.min_response_time = None self.max_response_time = 0 self.last_request_timestamp = None self.num_reqs_per_sec = {} self.num_fail_per_sec = {} self.total_content_length = 0 if self.use_response_times_cache: self.response_times_cache = OrderedDict() self._cache_response_times(int(time.time())) def log(self, response_time, content_length): current_time = time.time() t = int(current_time) if self.use_response_times_cache and self.last_request_timestamp and t > int(self.last_request_timestamp): self._cache_response_times(t - 1) self.num_requests += 1 self._log_time_of_request(current_time) self._log_response_time(response_time) self.total_content_length += content_length def _log_time_of_request(self, current_time): t = int(current_time) self.num_reqs_per_sec[t] = self.num_reqs_per_sec.setdefault(t, 0) + 1 self.last_request_timestamp = current_time def _log_response_time(self, response_time): if response_time is None: self.num_none_requests += 1 return self.total_response_time += response_time if self.min_response_time is None: self.min_response_time = response_time self.min_response_time = min(self.min_response_time, response_time) self.max_response_time = max(self.max_response_time, response_time) if response_time < 100: rounded_response_time = round(response_time) elif response_time < 1000: rounded_response_time = round(response_time, -1) elif response_time < 10000: rounded_response_time = round(response_time, -2) else: rounded_response_time = round(response_time, -3) self.response_times.setdefault(rounded_response_time, 0) self.response_times[rounded_response_time] += 1 def log_error(self, error): self.num_failures += 1 t = int(time.time()) self.num_fail_per_sec[t] = self.num_fail_per_sec.setdefault(t, 0) + 1 @property def fail_ratio(self): try: return float(self.num_failures) / self.num_requests except ZeroDivisionError: if self.num_failures > 0: return 1.0 else: return 0.0 @property def avg_response_time(self): try: return float(self.total_response_time) / (self.num_requests - self.num_none_requests) except ZeroDivisionError: return 0 @property def median_response_time(self): if not self.response_times: return 0 median = median_from_dict(self.num_requests - self.num_none_requests, self.response_times) or 0 if median > self.max_response_time: median = self.max_response_time elif median < self.min_response_time: median = self.min_response_time return median @property def current_rps(self): if self.stats.last_request_timestamp is None: return 0 slice_start_time = max(int(self.stats.last_request_timestamp) - 12, int(self.stats.start_time or 0)) reqs = [ self.num_reqs_per_sec.get(t, 0) for t in range(slice_start_time, int(self.stats.last_request_timestamp) - 2) ] return avg(reqs) @property def current_fail_per_sec(self): if self.stats.last_request_timestamp is None: return 0 slice_start_time = max(int(self.stats.last_request_timestamp) - 12, int(self.stats.start_time or 0)) reqs = [ self.num_fail_per_sec.get(t, 0) for t in range(slice_start_time, int(self.stats.last_request_timestamp) - 2) ] return avg(reqs) @property def total_rps(self): if not self.stats.last_request_timestamp or not self.stats.start_time: return 0.0 try: return self.num_requests / (self.stats.last_request_timestamp - self.stats.start_time) except ZeroDivisionError: return 0.0 @property def total_fail_per_sec(self): if not self.stats.last_request_timestamp or not self.stats.start_time: return 0.0 try: return self.num_failures / (self.stats.last_request_timestamp - self.stats.start_time) except ZeroDivisionError: return 0.0 @property def avg_content_length(self): try: return self.total_content_length / self.num_requests except ZeroDivisionError: return 0
MIT License
deepchem/torchchem
contrib/MolDQN/utils.py
contains_scaffold
python
def contains_scaffold(mol, scaffold): pattern = Chem.MolFromSmiles(scaffold) matches = mol.GetSubstructMatches(pattern) return bool(matches)
Returns whether mol contains the given scaffold. NOTE: This is more advanced than simply computing scaffold equality (i.e. scaffold(mol_a) == scaffold(mol_b)). This method allows the target scaffold to be a subset of the (possibly larger) scaffold in mol. Args: mol: RDKit Mol. scaffold: String scaffold SMILES. Returns: Boolean whether scaffold is found in mol.
https://github.com/deepchem/torchchem/blob/b4cee54088c2d1d52c349c3ed67126bc86940ba8/contrib/MolDQN/utils.py#L80-L96
from __future__ import absolute_import from __future__ import division from __future__ import print_function from rdkit import Chem from rdkit.Chem import Descriptors from rdkit.Chem.Scaffolds import MurckoScaffold import torch.nn as nn from rdkit import DataStructs from rdkit.Chem import AllChem from rdkit.Chem import RDConfig import numpy as np import hyp import sys import os sys.path.append(os.path.join(RDConfig.RDContribDir, "SA_Score")) import sascorer def get_fingerprint(smiles, fingerprint_length, fingerprint_radius): if smiles is None: return np.zeros((hyp.fingerprint_length,)) molecule = Chem.MolFromSmiles(smiles) if molecule is None: return np.zeros((hyp.fingerprint_length,)) fingerprint = AllChem.GetMorganFingerprintAsBitVect( molecule, hyp.fingerprint_radius, hyp.fingerprint_length) arr = np.zeros((1,)) DataStructs.ConvertToNumpyArray(fingerprint, arr) return arr def atom_valences(atom_types): periodic_table = Chem.GetPeriodicTable() return [ max(list(periodic_table.GetValenceList(atom_type))) for atom_type in atom_types ] def get_scaffold(mol): return Chem.MolToSmiles( MurckoScaffold.GetScaffoldForMol(mol), isomericSmiles=True)
MIT License
dallinger/dallinger
demos/dlgr/demos/chatroom/experiment.py
CoordinationChatroom.__init__
python
def __init__(self, session=None): super(CoordinationChatroom, self).__init__(session) if session: self.setup()
Initialize the experiment.
https://github.com/dallinger/dallinger/blob/2bc309c422935d372a7568cc18340e3b5b3f6a21/demos/dlgr/demos/chatroom/experiment.py#L30-L34
import logging from dallinger import networks from dallinger.compat import unicode from dallinger.config import get_config from dallinger.experiment import Experiment from dallinger.nodes import Agent try: from .bots import Bot Bot = Bot except ImportError: pass logger = logging.getLogger(__file__) def extra_parameters(): config = get_config() config.register("network", unicode) config.register("repeats", int) config.register("n", int) class CoordinationChatroom(Experiment):
MIT License
qiskit/qiskit-aqua
qiskit/optimization/converters/quadratic_program_to_qubo.py
QuadraticProgramToQubo.convert
python
def convert(self, problem: QuadraticProgram) -> QuadraticProgram: msg = self.get_compatibility_msg(problem) if len(msg) > 0: raise QiskitOptimizationError('Incompatible problem: {}'.format(msg)) problem_ = self._ineq_to_eq.convert(problem) problem_ = self._int_to_bin.convert(problem_) problem_ = self._penalize_lin_eq_constraints.convert(problem_) return problem_
Convert a problem with linear equality constraints into new one with a QUBO form. Args: problem: The problem with linear equality constraints to be solved. Returns: The problem converted in QUBO format. Raises: QiskitOptimizationError: In case of an incompatible problem.
https://github.com/qiskit/qiskit-aqua/blob/5ccf0e20129880e78a57f2f78c59b9a362ebb208/qiskit/optimization/converters/quadratic_program_to_qubo.py#L50-L78
from typing import Optional, Union, List import numpy as np from .quadratic_program_converter import QuadraticProgramConverter from ..exceptions import QiskitOptimizationError from ..problems.quadratic_program import QuadraticProgram class QuadraticProgramToQubo(QuadraticProgramConverter): def __init__(self, penalty: Optional[float] = None) -> None: from ..converters.integer_to_binary import IntegerToBinary from ..converters.inequality_to_equality import InequalityToEquality from ..converters.linear_equality_to_penalty import LinearEqualityToPenalty self._int_to_bin = IntegerToBinary() self._ineq_to_eq = InequalityToEquality(mode='integer') self._penalize_lin_eq_constraints = LinearEqualityToPenalty(penalty=penalty)
Apache License 2.0
chaingreenorg/chaingreen-blockchain
chaingreen/consensus/block_rewards.py
calculate_pool_reward
python
def calculate_pool_reward(height: uint32) -> uint64: if height == 0: return uint64(0) elif height < 4 * _blocks_per_year: return uint64(int((7 / 8) * 500 * _mio_per_chaingreen)) elif height < 8 * _blocks_per_year: return uint64(int((7 / 8) * 250 * _mio_per_chaingreen)) elif height < 12 * _blocks_per_year: return uint64(int((7 / 8) * 125 * _mio_per_chaingreen)) elif height < 16 * _blocks_per_year: return uint64(int((7 / 8) * 72.5 * _mio_per_chaingreen)) elif height < 20 * _blocks_per_year: return uint64(int((7 / 8) * 31.25 * _mio_per_chaingreen)) elif height < 24 * _blocks_per_year: return uint64(int((7 / 8) * 15.625 * _mio_per_chaingreen)) elif height < 28 * _blocks_per_year: return uint64(int((7 / 8) * 7.8125 * _mio_per_chaingreen)) else: return uint64(0)
Returns the pool reward at a certain block height. The pool earns 7/8 of the reward in each block. If the farmer is solo farming, they act as the pool, and therefore earn the entire block reward. These halving events will not be hit at the exact times (3 years, etc), due to fluctuations in difficulty. They will likely come early, if the network space and VDF rates increase continuously.
https://github.com/chaingreenorg/chaingreen-blockchain/blob/43a0cd41fdefc93a891e70d2430ed330730715f3/chaingreen/consensus/block_rewards.py#L8-L33
from chaingreen.util.ints import uint32, uint64 _mio_per_chaingreen = 1000000000000 _blocks_per_year = 1681920
Apache License 2.0
walkerning/aw_nas
aw_nas/rollout/mutation.py
MutationRollout.random_sample
python
def random_sample(cls, population, parent_index, num_mutations=1, primitive_prob=0.5): search_space = population.search_space base_arch = search_space.rollout_from_genotype( population.get_model(parent_index).genotype).arch mutations = [] primitive_choices = collections.defaultdict(list) primitive_mutated = collections.defaultdict(int) node_choices = collections.defaultdict(list) node_mutated = collections.defaultdict(int) for _ in range(num_mutations): mutation_type = CellMutation.PRIMITIVE if np.random.rand() < primitive_prob else CellMutation.NODE cell = np.random.randint(low=0, high=search_space.num_cell_groups) step = np.random.randint(low=0, high=search_space.num_steps) connection = np.random.randint(low=0, high=search_space.num_node_inputs) if mutation_type == CellMutation.PRIMITIVE: if (cell, step, connection) in primitive_choices: choices = primitive_choices[(cell, step, connection)] else: ori = base_arch[cell][1][search_space.num_node_inputs * step + connection] num_prims = search_space._num_primitives if not search_space.cellwise_primitives else search_space._num_primitives_list[cell] choices = list(range(num_prims)) choices.remove(ori) primitive_choices[(cell, step, connection)] = choices expect(choices, ("There are no non-duplicate primitive mutation available" " anymore for ({}, {}, {}) after {} mutations").format( cell, step, connection, primitive_mutated[(cell, step, connection)])) new_choice = np.random.choice(choices) choices.remove(new_choice) base_arch[cell][1][search_space.num_node_inputs * step + connection] = new_choice primitive_mutated[(cell, step, connection)] += 1 else: if (cell, step, connection) in node_choices: choices = node_choices[(cell, step, connection)] else: ori = base_arch[cell][0][search_space.num_node_inputs * step + connection] choices = list(range(search_space.num_init_nodes + step)) choices.remove(ori) node_choices[(cell, step, connection)] = choices expect(choices, ("There are no non-duplicate input node mutation available" " anymore for ({}, {}, {}) after {} mutations").format( cell, step, connection, node_mutated[(cell, step, connection)])) new_choice = np.random.choice(choices) choices.remove(new_choice) base_arch[cell][0][search_space.num_node_inputs * step + connection] = new_choice node_mutated[(cell, step, connection)] += 1 mutations.append(CellMutation(search_space, mutation_type, cell, step, connection, modified=new_choice)) return cls(population, parent_index, mutations, search_space)
Random sample a MutationRollout with mutations. Duplication is checked for multiple mutations.
https://github.com/walkerning/aw_nas/blob/8a32196ce342b8ad9e3885895735d1286e25beba/aw_nas/rollout/mutation.py#L346-L406
import os import copy import glob import shutil import collections import six import yaml import numpy as np from aw_nas import utils from aw_nas.utils import expect from aw_nas.base import Component from aw_nas.rollout.base import BaseRollout from aw_nas.common import get_genotype_substr, genotype_from_str, ConfigTemplate from aw_nas.utils import getLogger class ModelRecord(object): def __init__(self, genotype, config, search_space, info_path=None, checkpoint_path=None, finished=False, confidence=None, perfs=None): self._genotype = genotype self.search_space = search_space self.config = config self.info_path = info_path self.checkpoint_path = checkpoint_path self.finished = finished self.confidence = confidence self.perfs = perfs def __repr__(self): return ("ModelRecord({_genotype}, info_path={info_path}, ckpt_path={ckpt_path}, " "finished={finished}, perfs={perfs}").format( _genotype=self._genotype, info_path=self.info_path, ckpt_path=self.checkpoint_path, finished=self.finished, perfs=self.perfs) @property def genotype(self): return genotype_from_str(self._genotype, self.search_space) def save(self, path): meta_info = collections.OrderedDict() meta_info["genotypes"] = get_genotype_substr(str(self.genotype)) meta_info["config"] = dict(self.config) meta_info["checkpoint_path"] = self.checkpoint_path meta_info["finished"] = self.finished meta_info["confidence"] = self.confidence meta_info["perfs"] = self.perfs self.info_path = path with open(path, "w") as o_stream: yaml.safe_dump(meta_info, stream=o_stream, default_flow_style=False) def save_config(self, fname): with open(fname, "w") as c_f: yaml.safe_dump(dict(self.config), c_f) @classmethod def init_from_file(cls, path, search_space): with open(path, "r") as meta_f: meta_info = yaml.safe_load(meta_f) record = cls( str(genotype_from_str(meta_info["genotypes"], search_space)), meta_info["config"], search_space, os.path.abspath(path), meta_info["checkpoint_path"], finished=meta_info["finished"], confidence=meta_info.get("confidence", None), perfs=meta_info["perfs"]) return record class Population(Component): def __init__(self, search_space, model_records, cfg_template, next_index=None): super(Population, self).__init__(schedule_cfg=None) self.search_space = search_space self._model_records = model_records self.genotype_records = collections.OrderedDict([ (ind, genotype_from_str( record.genotype, self.search_space)) for ind, record in six.iteritems(self._model_records)]) self._size = len(model_records) self.cfg_template = cfg_template if next_index is None: self._next_index = np.max(list(model_records.keys())) + 1 if model_records else 0 else: self._next_index = next_index self.start_save_index = self._next_index def __getstate__(self): state = super(Population, self).__getstate__().copy() del state["genotype_records"] return state def __setstate__(self, state): super(Population, self).__setstate__(state) self.genotype_records = collections.OrderedDict([ (ind, genotype_from_str( record.genotype, self.search_space)) for ind, record in six.iteritems(self._model_records)]) @property def model_records(self): return self._model_records @property def next_index(self): return self._next_index @property def size(self): return self._size def get_model(self, index): return self.model_records[index] def add_model(self, model_record, index=None): index = self._next_index if index is None else index self.model_records[index] = model_record self.genotype_records[index] = genotype_from_str(model_record.genotype, self.search_space) self._next_index += 1 self._size += 1 return index def save(self, path, start_index=None): path = utils.makedir(path) backuped = 0 saved = 0 start_save_index = self.start_save_index if start_index is None else start_index for ind, record in six.iteritems(self.model_records): if ind < start_save_index: continue save_path = os.path.join(path, "{}.yaml".format(ind)) if os.path.exists(save_path): backup_dir = utils.makedir(os.path.join(path, "overwrite_backup")) backup_path = os.path.join(backup_dir, "{}.yaml".format(ind)) self.logger.warning("%s already exists; overwrite and backup to %s", save_path, backup_path) shutil.copyfile(save_path, backup_path) backuped += 1 record.save(save_path) saved += 1 self.logger.info("Saving start from index %d. %d/%d records saved " "(%d records overwrited and backuped). By default " "next save will start from index %d.", self.start_save_index, saved, len(self.model_records), backuped, self._next_index) self.start_save_index = self._next_index return saved def contain_rollout(self, rollout): return rollout.genotype in self.genotype_records.values() def remove_age(self, args): def remove_perf(self, args): def __repr__(self): return "{}(size={}, search_space={}, next_index={})".format( self.__class__.__name__, self.size, self.search_space, self.next_index) @classmethod def init_from_dirs(cls, dirs, search_space=None, cfg_template_file=None): assert dirs, "No dirs specified!" if cfg_template_file is None: cfg_template_file = os.path.join(dirs[0], "template.yaml") with open(cfg_template_file, "r") as cfg_f: cfg_template = ConfigTemplate(yaml.safe_load(cfg_f)) getLogger("population").info("Read the template config from %s", cfg_template_file) model_records = collections.OrderedDict() if search_space is None: from aw_nas.common import get_search_space search_space = get_search_space(cfg_template["search_space_type"], **cfg_template["search_space_cfg"]) for _, dir_ in enumerate(dirs): meta_files = glob.glob(os.path.join(dir_, "*.yaml")) for fname in meta_files: if "template.yaml" in fname: continue index = int(os.path.basename(fname).rsplit(".", 1)[0]) expect(index not in model_records, "There are duplicate index: {}. rename or soft-link the files".format(index)) model_records[index] = ModelRecord.init_from_file(fname, search_space) getLogger("population").info("Parsed %d directories, total %d model records loaded.", len(dirs), len(model_records)) return Population(search_space, model_records, cfg_template) class CellMutation(object): NODE = 0 PRIMITIVE = 1 def __init__(self, search_space, mutation_type, cell, step, connection, modified=None): assert mutation_type in {CellMutation.PRIMITIVE, CellMutation.NODE}, "invalid mutation_type" self.search_space = search_space self.mutation_type = mutation_type self.cell = cell self.step = step self.connection = connection self.modified = modified self.node = modified if self.mutation_type == CellMutation.NODE else None self.primitive = modified if self.mutation_type == CellMutation.PRIMITIVE else None if self.primitive: self.primitive_str = search_space.cell_shared_primitives[self.cell][self.primitive] if search_space.cellwise_primitives else search_space.shared_primitives[self.primitive] def apply(self, arch): arch[self.cell][self.mutation_type] [self.search_space.num_node_inputs * self.step + self.connection] = self.modified return arch def __repr__(self): return "Mutation({}, {}, {}, {}, {}{})".format( self.cell, self.step, self.connection, "primitive" if self.mutation_type == CellMutation.PRIMITIVE else "node", self.modified, ", {}".format(self.primitive_str) if self.mutation_type == CellMutation.PRIMITIVE else "" ) class MutationRollout(BaseRollout): NAME = "mutation" def __init__(self, population, parent_index, mutations, search_space, candidate_net=None): super(MutationRollout, self).__init__() self.population = population self.parent_index = parent_index self.mutations = mutations self.search_space = search_space self.candidate_net = candidate_net self.arch = self.apply_mutation( self.search_space, self.search_space.rollout_from_genotype( self.population.get_model(parent_index).genotype).arch, self.mutations ) self._genotype = None self.model_record = ModelRecord( str(self.genotype), self.population.cfg_template.create_cfg(self.genotype), search_space, perfs=self.perf) def __getstate__(self): state = self.__dict__.copy() if "_genotype" in state: del state["_genotype"] return state @classmethod def apply_mutation(cls, search_space, arch, mutations): arch = copy.deepcopy(arch) for mutation in mutations: mutation.apply(arch) return arch def set_candidate_net(self, c_net): self.candidate_net = c_net def set_ckpt_path(self, path): assert self.model_record is not None self.model_record.checkpoint_path = path def set_perf(self, value, name="reward"): assert self.model_record self.perf[name] = value if not self.model_record.perfs is self.perf: self.model_record.perfs[name] = value return self def genotype_list(self): return list(self.genotype._asdict().items()) def plot_arch(self, filename, label="", edge_labels=None): return self.search_space.plot_arch(self.genotype_list(), filename, label=label, edge_labels=edge_labels) @property def genotype(self): if self._genotype is None: self._genotype = self.search_space.genotype(self.arch) return self._genotype @classmethod
MIT License
rapid7/vm-console-client-python
rapid7vmconsole/models/syslog_alert.py
SyslogAlert.notification
python
def notification(self): return self._notification
Gets the notification of this SyslogAlert. # noqa: E501 The type of alert. # noqa: E501 :return: The notification of this SyslogAlert. # noqa: E501 :rtype: str
https://github.com/rapid7/vm-console-client-python/blob/55e1f573967bce27cc9a2d10c12a949b1142c2b3/rapid7vmconsole/models/syslog_alert.py#L250-L258
import pprint import re import six class SyslogAlert(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'enabled': 'bool', 'enabled_scan_events': 'ScanEvents', 'enabled_vulnerability_events': 'VulnerabilityEvents', 'id': 'int', 'links': 'list[Link]', 'maximum_alerts': 'int', 'name': 'str', 'notification': 'str', 'server': 'str' } attribute_map = { 'enabled': 'enabled', 'enabled_scan_events': 'enabledScanEvents', 'enabled_vulnerability_events': 'enabledVulnerabilityEvents', 'id': 'id', 'links': 'links', 'maximum_alerts': 'maximumAlerts', 'name': 'name', 'notification': 'notification', 'server': 'server' } def __init__(self, enabled=None, enabled_scan_events=None, enabled_vulnerability_events=None, id=None, links=None, maximum_alerts=None, name=None, notification=None, server=None): self._enabled = None self._enabled_scan_events = None self._enabled_vulnerability_events = None self._id = None self._links = None self._maximum_alerts = None self._name = None self._notification = None self._server = None self.discriminator = None self.enabled = enabled if enabled_scan_events is not None: self.enabled_scan_events = enabled_scan_events if enabled_vulnerability_events is not None: self.enabled_vulnerability_events = enabled_vulnerability_events if id is not None: self.id = id if links is not None: self.links = links if maximum_alerts is not None: self.maximum_alerts = maximum_alerts self.name = name self.notification = notification self.server = server @property def enabled(self): return self._enabled @enabled.setter def enabled(self, enabled): if enabled is None: raise ValueError("Invalid value for `enabled`, must not be `None`") self._enabled = enabled @property def enabled_scan_events(self): return self._enabled_scan_events @enabled_scan_events.setter def enabled_scan_events(self, enabled_scan_events): self._enabled_scan_events = enabled_scan_events @property def enabled_vulnerability_events(self): return self._enabled_vulnerability_events @enabled_vulnerability_events.setter def enabled_vulnerability_events(self, enabled_vulnerability_events): self._enabled_vulnerability_events = enabled_vulnerability_events @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def links(self): return self._links @links.setter def links(self, links): self._links = links @property def maximum_alerts(self): return self._maximum_alerts @maximum_alerts.setter def maximum_alerts(self, maximum_alerts): if maximum_alerts is not None and maximum_alerts < 1: raise ValueError("Invalid value for `maximum_alerts`, must be a value greater than or equal to `1`") self._maximum_alerts = maximum_alerts @property def name(self): return self._name @name.setter def name(self, name): if name is None: raise ValueError("Invalid value for `name`, must not be `None`") self._name = name @property
MIT License
babylonhealth/simba
simba/utils/vmf.py
fit_concentration
python
def fit_concentration(X): X = np.array(X) n, d = X.shape R = la.norm(X.sum(axis=0)) / n Rs = R**2 return R * (d - Rs) / (1.0 - Rs)
Computes the vMF MLE sol for the concentration parameter NOTE: This is an approximate solution to a transcendental eq :param X [nxd ndarray]: Design matrix of normalised word vectors :return [float]: MLE concentration parameter solution
https://github.com/babylonhealth/simba/blob/b0124281c0efd59b088520e28add36d1038bce3b/simba/utils/vmf.py#L27-L38
from scipy.special import ive import numpy as np import numpy.linalg as la def to_cartesian(phi): d = len(phi) X = np.cos(phi[0]) for i in range(1, d): X += np.prod(np.sin(phi[:i]), axis=1) * np.cos(phi[i]) X += np.prod(np.sin(phi)) return X def fit_mean_direction(X): return X.sum(axis=0) / la.norm(X.sum(axis=0))
Apache License 2.0
hewlettpackard/python-hponeview
hpOneView/resources/storage/storage_volume_attachments.py
StorageVolumeAttachments.remove_extra_presentations
python
def remove_extra_presentations(self, resource, timeout=-1): uri = self.URI + "/repair" custom_headers = {'Accept-Language': 'en_US'} return self._client.create(resource, uri=uri, timeout=timeout, custom_headers=custom_headers)
Removes extra presentations from a specified server profile. Args: resource (dict): Object to create timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: Associated storage attachment resource.
https://github.com/hewlettpackard/python-hponeview/blob/678d53b338f6bc7af7adb63153d7d8d99dc94ac0/hpOneView/resources/storage/storage_volume_attachments.py#L98-L113
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from future import standard_library standard_library.install_aliases() from hpOneView.resources.resource import ResourceClient class StorageVolumeAttachments(object): URI = '/rest/storage-volume-attachments' def __init__(self, con): self._connection = con self._client = ResourceClient(con, self.URI) def get_all(self, start=0, count=-1, filter='', sort=''): return self._client.get_all(start, count, filter=filter, sort=sort) def get_extra_unmanaged_storage_volumes(self, start=0, count=-1, filter='', sort=''): uri = self.URI + "/repair?alertFixType=ExtraUnmanagedStorageVolumes" return self._client.get_all(start=start, count=count, filter=filter, sort=sort, uri=uri)
MIT License
breakingbytes/simkit
simkit/core/data_readers.py
NumPyGenFromTxtReader.load_data
python
def load_data(self, filename, *args, **kwargs): header_param = self.parameters.get('header') data_param = self.parameters['data'] dtype = data_param.get('dtype') _utf8_list_to_ascii_tuple(dtype) if dtype else None delimiter = data_param.get('delimiter') skip_header = data_param.get('skip_header') usecols = data_param.get('usecols') names = data_param.get('names') names = [str(_) for _ in names] if names else None excludelist = data_param.get('excludelist') deletechars = data_param.get('deletechars') data_units = data_param.get('units', {}) if not (dtype or names): raise UnnamedDataError(filename) data = {} with open(filename, 'r') as fid: if header_param: data.update(_read_header(fid, header_param)) fid.seek(0) data_data = np.genfromtxt(fid, dtype, delimiter=delimiter, skip_header=skip_header, usecols=usecols, names=names, excludelist=excludelist, deletechars=deletechars) data.update(_apply_units(data_data, data_units, fid.name)) return data
load data from text file. :param filename: name of file to read :type filename: str :returns: data read from file using :func:`numpy.genfromtxt` :rtype: dict :raises: :exc:`~simkit.core.exceptions.UnnamedDataError`
https://github.com/breakingbytes/simkit/blob/c247b6ecf46d727703c03cb0d987e35fd054eaa6/simkit/core/data_readers.py#L456-L498
from past.builtins import basestring from io import StringIO from simkit.core import UREG, Q_ from simkit.core.exceptions import ( UnnamedDataError, MixedTextNoMatchError ) from xlrd import open_workbook import csv import numpy as np import json import os import time import re EFG_PATTERN = '([-+]?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][-+]?\\d+)?)' RE_METH = ['search', 'match', 'findall', 'split'] class DataReader(object): is_file_reader = True def __init__(self, parameters, meta=None): self.parameters = parameters self.meta = meta def load_data(self, *args, **kwargs): raise NotImplementedError('load_data') def apply_units_to_cache(self, data): raise NotImplementedError('apply_units_to_cache') class JSONReader(DataReader): def __init__(self, parameters, meta=None): super(JSONReader, self).__init__(parameters, meta) self.orig_data_reader = meta.data_reader def load_data(self, filename, *args, **kwargs): if not filename.endswith('.json'): filename += '.json' with open(filename, 'r') as fid: json_data = json.load(fid) if (not self.orig_data_reader or isinstance(self, self.orig_data_reader)): return self.apply_units_to_cache(json_data['data']) utc_mod_time = json_data.get('utc_mod_time') orig_data_reader_obj = self.orig_data_reader(self.parameters, self.meta) if utc_mod_time: utc_mod_time = time.struct_time(utc_mod_time) orig_filename = filename[:-5] if utc_mod_time < time.gmtime(os.path.getmtime(orig_filename)): os.remove(filename) return orig_data_reader_obj.load_data(orig_filename) return orig_data_reader_obj.apply_units_to_cache(json_data['data']) def apply_units_to_cache(self, data): for k, val in self.parameters.items(): if 'units' in val: data[k] = Q_(data[k], val.get('units')) return data class XLRDReader(DataReader): def load_data(self, filename, *args, **kwargs): workbook = open_workbook(filename, verbosity=True) data = {} for param, pval in self.parameters.items(): sheet = pval['extras']['sheet'] worksheet = workbook.sheet_by_name(sheet) prng0, prng1 = pval['extras']['range'] punits = str(pval.get('units') or '') if prng0 is None: prng0 = [] if prng1 is None: prng1 = [] if isinstance(prng0, int) and isinstance(prng1, int): datum = worksheet.cell_value(prng0, prng1) elif isinstance(prng0, list) and isinstance(prng1, int): datum = worksheet.col_values(prng1, *prng0) elif isinstance(prng0, int) and isinstance(prng1, list): datum = worksheet.row_values(prng0, *prng1) else: datum = [] for col in xrange(prng0[1], prng1[1]): datum.append(worksheet.col_values(col, prng0[0], prng1[0])) try: npdatum = np.array(datum, dtype=np.float) except ValueError as err: if not datum: data[param] = None elif all(isinstance(_, basestring) for _ in datum): data[param] = datum elif all(not _ for _ in datum): data[param] = None else: raise err else: data[param] = npdatum * UREG(punits) return data def apply_units_to_cache(self, data): for param, pval in self.parameters.items(): try: data[param] *= UREG(str(pval.get('units') or '')) except TypeError: continue return data class NumPyLoadTxtReader(DataReader): def load_data(self, filename, *args, **kwargs): header_param = self.parameters.get('header') data_param = self.parameters['data'] dtype = data_param['dtype'] _utf8_list_to_ascii_tuple(dtype) if dtype else None delimiter = data_param.get('delimiter') skiprows = data_param.get('skiprows') data_units = data_param.get('units', {}) data = {} with open(filename, 'r') as fid: if header_param: data.update(_read_header(fid, header_param)) fid.seek(0) data_data = np.loadtxt(fid, dtype, delimiter=delimiter, skiprows=skiprows) data.update(_apply_units(data_data, data_units, fid.name)) return data def apply_units_to_cache(self, data): return _apply_units_to_numpy_data_readers(self.parameters, data) class NumPyGenFromTxtReader(DataReader):
BSD 3-Clause New or Revised License
mrtango/rispy
rispy/parser.py
BaseParser.is_header
python
def is_header(self, line: str) -> bool: return False
Determine whether a line is a header and should be skipped. Only operates on lines outside of the reference.
https://github.com/mrtango/rispy/blob/d795f51264823ce666b36dd1538d3c8955f6d26f/rispy/parser.py#L242-L247
from collections import defaultdict from abc import ABC, abstractmethod from pathlib import Path from typing import Dict, List, TextIO, Union, Optional import re from .config import LIST_TYPE_TAGS, TAG_KEY_MAPPING, WOK_TAG_KEY_MAPPING, WOK_LIST_TYPE_TAGS __all__ = ["load", "loads", "BaseParser", "WokParser", "RisParser"] class NextLine(Exception): pass class BaseParser(ABC): START_TAG: str END_TAG: str = "ER" PATTERN: str DEFAULT_IGNORE: List[str] = [] DEFAULT_MAPPING: Dict DEFAULT_LIST_TAGS: List[str] def __init__( self, *, mapping: Optional[Dict] = None, list_tags: Optional[List[str]] = None, ignore: Optional[List[str]] = None, skip_missing_tags: bool = False, skip_unknown_tags: bool = False, enforce_list_tags: bool = True, ): self.pattern = re.compile(self.PATTERN) self.mapping = mapping if mapping is not None else self.DEFAULT_MAPPING self.list_tags = list_tags if list_tags is not None else self.DEFAULT_LIST_TAGS self.ignore = ignore if ignore is not None else self.DEFAULT_IGNORE self.skip_missing_tags = skip_missing_tags self.skip_unknown_tags = skip_unknown_tags self.enforce_list_tags = enforce_list_tags def parse(self, text: str) -> List[Dict]: clean_body = self.clean_text(text) lines = clean_body.split("\n") return list(self._parse_lines(lines)) def _parse_lines(self, lines): self.in_ref = False self.current = {} self.last_tag = None for line_number, line in enumerate(lines): if not line.strip(): continue if self.is_tag(line): try: yield self._parse_tag(line, line_number) self.current = {} self.in_ref = False self.last_tag = None except NextLine: continue else: try: yield self._parse_other(line, line_number) except NextLine: continue def _parse_tag(self, line, line_number): tag = self.get_tag(line) if tag in self.ignore: raise NextLine if tag == self.END_TAG: return self.current if tag == self.START_TAG: if self.in_ref: raise IOError(f"Missing end of record tag in line {line_number}:\n {line}") self._add_tag(tag, line) self.in_ref = True raise NextLine if not self.in_ref: raise IOError(f"Invalid start tag in line {line_number}:\n {line}") if tag in self.mapping: self._add_tag(tag, line) raise NextLine elif not self.skip_unknown_tags: self._add_unknown_tag(tag, line) raise NextLine raise NextLine def _parse_other(self, line, line_number): if self.skip_missing_tags: raise NextLine if self.in_ref: if self.last_tag is None: raise IOError(f"Expected tag in line {line_number}:\n {line}") self._add_tag(self.last_tag, line, all_line=True) raise NextLine if self.is_header(line): raise NextLine raise IOError(f"Expected start tag in line {line_number}:\n {line}") def _add_single_value(self, name, value, is_multi=False): if not is_multi: if self.enforce_list_tags or name not in self.current: ignore_this_if_has_one = value self.current.setdefault(name, ignore_this_if_has_one) else: self._add_list_value(name, value) return value_must_exist_or_is_bug = self.current[name] self.current[name] = " ".join((value_must_exist_or_is_bug, value)) def _add_list_value(self, name, value): try: self.current[name].append(value) except KeyError: self.current[name] = [value] except AttributeError: if not isinstance(self.current[name], str): raise must_exist = self.current[name] self.current[name] = [must_exist] + [value] def _add_tag(self, tag, line, all_line=False): self.last_tag = tag name = self.mapping[tag] if all_line: new_value = line.strip() else: new_value = self.get_content(line) if tag not in self.list_tags: self._add_single_value(name, new_value, is_multi=all_line) return self._add_list_value(name, new_value) def _add_unknown_tag(self, tag, line): name = self.mapping["UK"] value = self.get_content(line) if name not in self.current: self.current[name] = defaultdict(list) self.current[name][tag].append(value) def clean_text(self, text: str) -> str: text = text.lstrip("\ufeff") return text def get_tag(self, line: str) -> str: return line[0:2] def is_tag(self, line: str) -> bool: return bool(self.pattern.match(line)) @abstractmethod def get_content(self, line: str) -> str: raise NotImplementedError
MIT License
wikimedia/pywikibot
pywikibot/site/_apisite.py
APISite.logout
python
def logout(self): if self.is_oauth_token_available(): pywikibot.warning('Using OAuth suppresses logout function') req_params = {'action': 'logout'} with suppress(Error): req_params['token'] = self.tokens['csrf'] uirequest = self._simple_request(**req_params) uirequest.submit() self._loginstatus = _LoginStatus.NOT_LOGGED_IN del self.userinfo self.tokens = TokenWallet(self) self._paraminfo = api.ParamInfo(self) api._invalidate_superior_cookies(self.family)
Logout of the site and load details for the logged out user. Also logs out of the global account if linked to the user. https://www.mediawiki.org/wiki/API:Logout :raises APIError: Logout is not available when OAuth enabled.
https://github.com/wikimedia/pywikibot/blob/5097f5b9a7ef9d39f35f17edd11faf3086a01d1d/pywikibot/site/_apisite.py#L407-L432
import datetime import mimetypes import os import re import time import typing from collections import OrderedDict, defaultdict, namedtuple from collections.abc import Iterable from contextlib import suppress from textwrap import fill from typing import Optional, Union from warnings import warn import pywikibot import pywikibot.family from pywikibot.backports import List from pywikibot.comms.http import get_authentication from pywikibot.data import api from pywikibot.exceptions import ( AbuseFilterDisallowedError, APIError, ArticleExistsConflictError, CaptchaError, CascadeLockedPageError, CircularRedirectError, EditConflictError, Error, InconsistentTitleError, InterwikiRedirectPageError, IsNotRedirectPageError, LockedNoPageError, LockedPageError, NoCreateError, NoPageError, NoUsernameError, PageCreatedConflictError, PageDeletedConflictError, PageRelatedError, PageSaveRelatedError, SiteDefinitionError, SpamblacklistError, TitleblacklistError, UnknownExtensionError, UploadError, ) from pywikibot.login import LoginStatus as _LoginStatus from pywikibot.site._basesite import BaseSite from pywikibot.site._decorators import need_right, need_version from pywikibot.site._extensions import ( EchoMixin, FlowMixin, GeoDataMixin, GlobalUsageMixin, LinterMixin, PageImagesMixin, ProofreadPageMixin, ThanksFlowMixin, ThanksMixin, UrlShortenerMixin, WikibaseClientMixin, ) from pywikibot.site._generators import GeneratorsMixin from pywikibot.site._interwikimap import _InterwikiMap from pywikibot.site._namespace import Namespace from pywikibot.site._siteinfo import Siteinfo from pywikibot.site._tokenwallet import TokenWallet from pywikibot.tools import ( MediaWikiVersion, compute_file_hash, deprecated, issue_deprecation_warning, merge_unique_dicts, normalize_username, ) __all__ = ('APISite', ) _logger = 'wiki.apisite' _mw_msg_cache = defaultdict(dict) class APISite( BaseSite, EchoMixin, FlowMixin, GeneratorsMixin, GeoDataMixin, GlobalUsageMixin, LinterMixin, PageImagesMixin, ProofreadPageMixin, ThanksFlowMixin, ThanksMixin, UrlShortenerMixin, WikibaseClientMixin, ): def __init__(self, code, fam=None, user=None): super().__init__(code, fam, user) self._msgcache = {} self._loginstatus = _LoginStatus.NOT_ATTEMPTED self._siteinfo = Siteinfo(self) self._paraminfo = api.ParamInfo(self) self._interwikimap = _InterwikiMap(self) self.tokens = TokenWallet(self) def __getstate__(self): new = super().__getstate__() del new['tokens'] del new['_interwikimap'] return new def __setstate__(self, attrs): super().__setstate__(attrs) self._interwikimap = _InterwikiMap(self) self.tokens = TokenWallet(self) def interwiki(self, prefix): return self._interwikimap[prefix].site def interwiki_prefix(self, site): assert site is not None, 'Site must not be None' prefixes = set() for url in site._interwiki_urls(): prefixes.update(self._interwikimap.get_by_url(url)) if not prefixes: raise KeyError( "There is no interwiki prefix to '{}'".format(site)) return sorted(prefixes, key=lambda p: (len(p), p)) def local_interwiki(self, prefix): return self._interwikimap[prefix].local @classmethod def fromDBName(cls, dbname, site=None): if not site: site = pywikibot.Site('meta', 'meta') req = site._request(expiry=datetime.timedelta(days=10), parameters={'action': 'sitematrix'}) data = req.submit() for key, val in data['sitematrix'].items(): if key == 'count': continue if 'code' in val: lang = val['code'] for site in val['site']: if site['dbname'] == dbname: if site['code'] == 'wiki': site['code'] = 'wikipedia' return pywikibot.Site(lang, site['code']) else: for site in val: if site['dbname'] == dbname: return pywikibot.Site(url=site['url'] + '/w/index.php') raise ValueError('Cannot parse a site out of {}.'.format(dbname)) def _generator(self, gen_class, type_arg: Optional[str] = None, namespaces=None, total: Optional[int] = None, **args): req_args = {'site': self} if 'g_content' in args: req_args['g_content'] = args.pop('g_content') if 'parameters' in args: req_args.update(args) else: req_args['parameters'] = args if type_arg is not None: gen = gen_class(type_arg, **req_args) else: gen = gen_class(**req_args) if namespaces is not None: gen.set_namespace(namespaces) gen.set_maximum_items(total) return gen @staticmethod def _request_class(kwargs): if 'expiry' in kwargs and kwargs['expiry'] is not None: return api.CachedRequest return api.Request def _request(self, **kwargs): if 'expiry' in kwargs and kwargs['expiry'] is None: del kwargs['expiry'] return self._request_class(kwargs)(site=self, **kwargs) def _simple_request(self, **kwargs): return self._request_class({'parameters': kwargs}).create_simple( self, **kwargs) def logged_in(self): if not hasattr(self, '_userinfo'): return False if 'anon' in self.userinfo or not self.userinfo.get('id'): return False if not self.userinfo.get('name'): return False if self.userinfo['name'] != self.username(): return False return True def is_oauth_token_available(self): auth_token = get_authentication(self.base_url('')) return auth_token is not None and len(auth_token) == 4 def login(self, autocreate: bool = False, user: Optional[str] = None): if self._loginstatus == _LoginStatus.IN_PROGRESS: pywikibot.log( '{!r}.login() called when a previous login was in progress.' .format(self)) if self.logged_in(): self._loginstatus = _LoginStatus.AS_USER return self._loginstatus = _LoginStatus.IN_PROGRESS if user: self._username = normalize_username(user) try: del self.userinfo if self.userinfo['name'] == self.user(): return except APIError: pass except NoUsernameError as e: if not autocreate: raise e if self.is_oauth_token_available(): if self.userinfo['name'] == self.username(): error_msg = ('Logging in on {} via OAuth failed' .format(self)) elif self.username() is None: error_msg = ('No username has been defined in your ' 'user-config.py: you have to add in this ' 'file the following line:\n' 'usernames[{family!r}][{lang!r}]= {username!r}' .format(family=self.family, lang=self.lang, username=self.userinfo['name'])) else: error_msg = ('Logged in on {site} via OAuth as {wrong}, but ' 'expect as {right}' .format(site=self, wrong=self.userinfo['name'], right=self.username())) raise NoUsernameError(error_msg) login_manager = api.LoginManager(site=self, user=self.username()) if login_manager.login(retry=True, autocreate=autocreate): self._username = login_manager.username del self.userinfo assert self.userinfo['name'] == self.username(), '{} != {}'.format(self.userinfo['name'], self.username()) self._loginstatus = _LoginStatus.AS_USER else: self._loginstatus = _LoginStatus.NOT_LOGGED_IN def _relogin(self): del self.userinfo self._loginstatus = _LoginStatus.NOT_LOGGED_IN self.login()
MIT License
ethereum/py-evm
eth/tools/fixtures/loading.py
load_json_fixture
python
def load_json_fixture(fixture_path: str) -> Dict[str, Any]: with open(fixture_path) as fixture_file: file_fixtures = json.load(fixture_file) return file_fixtures
Loads a fixture file, caching the most recent files it loaded.
https://github.com/ethereum/py-evm/blob/21759ee681315f7099b14893b6ac6d1a5e659bc0/eth/tools/fixtures/loading.py#L54-L60
import functools import json import os from typing import ( Any, Callable, Dict, Iterable, Tuple, ) from eth_utils.toolz import ( curry, identity, ) from eth_utils import to_tuple from ._utils import ( recursive_find_files, require_pytest, ) def find_fixture_files(fixtures_base_dir: str) -> Iterable[str]: all_fixture_paths = recursive_find_files(fixtures_base_dir, "*.json") return all_fixture_paths @to_tuple def find_fixtures(fixtures_base_dir: str) -> Iterable[Tuple[str, str]]: all_fixture_paths = find_fixture_files(fixtures_base_dir) for fixture_path in sorted(all_fixture_paths): with open(fixture_path) as fixture_file: fixtures = json.load(fixture_file) for fixture_key in sorted(fixtures.keys()): yield (fixture_path, fixture_key) @functools.lru_cache(maxsize=16)
MIT License
q-stream/q-stream
Player.py
Ui_Form.mediaStateChanged
python
def mediaStateChanged(self, state): print('[ ! CHANGING MEDIA STATE ]') if self.mediaPlayer.state() == QMediaPlayer.PlayingState: self.play_button.setProperty('play',False) self.play_button.setStyle(self.play_button.style()) else: self.play_button.setProperty('play',True) self.play_button.setStyle(self.play_button.style())
Toggle between play and pause of `Video` State
https://github.com/q-stream/q-stream/blob/461249f203c09c4343052158890d8bb2af0b8423/Player.py#L965-L973
from lib import playlist from PyQt5 import QtCore, QtGui, QtWidgets, QtMultimediaWidgets, QtMultimedia from PyQt5.QtGui import QFocusEvent, QIcon, QFont, QPalette, QColor, QMoveEvent, QKeySequence, QPainter, QImage from PyQt5.QtCore import QDir, QModelIndex, QUrl, QSize, Qt, QPoint, QRect, pyqtSignal from PyQt5.QtMultimedia import QMediaContent, QMediaPlayer, QVideoFrame, QAbstractVideoBuffer, QVideoSurfaceFormat, QAbstractVideoSurface from PyQt5.QtMultimediaWidgets import QVideoWidget from PyQt5.QtWidgets import (QApplication, QFileDialog, QHBoxLayout, QLabel, QPushButton, QSizePolicy, QSlider, QStyle, QVBoxLayout, QWidget, QStatusBar, QShortcut, QDialog) import os import requests, json, pickle import pyautogui, uuid, getpass class window(QWidget): import time @staticmethod def leaveEvent(event): if ui.isMini: ui.frame_2.hide() ui.pos_frame.hide() ui.frame.hide() @staticmethod def enterEvent(event): ui.frame_2.show() ui.pos_frame.show() ui.frame.show() class Videowidget(QVideoWidget): def __init__(self,master): super().__init__(parent = master) @staticmethod def mouseMoveEvent (event): if event.buttons() == Qt.LeftButton: Form.move(event.globalPos() - QPoint(int(Form.geometry().width() / 2), int(Form.geometry().height() / 2))) event.accept() @staticmethod def mouseDoubleClickEvent (event): if event.buttons() == Qt.LeftButton: ui.fullscreen_video() class PosSlider(QSlider): def __init__(self,master): super().__init__(parent = master) def mousePressEvent (self, event): if event.buttons() == Qt.LeftButton: ui.mediaPlayer.setPosition(QStyle.sliderValueFromPosition(self.minimum(), self.maximum(), event.x(), self.width())) def mouseMoveEvent (self, event): if event.buttons() == Qt.LeftButton: ui.mediaPlayer.setPosition(QStyle.sliderValueFromPosition(self.minimum(), self.maximum(), event.x(), self.width())) class VolSlider(QSlider): def __init__(self,master): super().__init__(parent = master) def mousePressEvent (self, event): if event.buttons() == Qt.LeftButton: self.setValue(QStyle.sliderValueFromPosition(self.minimum(), self.maximum(), event.x(), self.width())) def mouseMoveEvent (self, event): if event.buttons() == Qt.LeftButton: self.setValue(QStyle.sliderValueFromPosition(self.minimum(), self.maximum(), event.x(), self.width())) class Ui_Form(object): currentMedia = dict() playlist = [] def setupUi(self, Form): Form.setObjectName("Form") Form.resize(640, 400) Form.setMinimumSize(QtCore.QSize(200, 199)) Form.setStyleSheet("background-color:black;") self.isOnline = False self.isMini = False self.isOnTop = True self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.VideoSurface) self.verticalLayout_2 = QtWidgets.QVBoxLayout(Form) self.verticalLayout_2.setContentsMargins(0, 0, 0, 0) self.verticalLayout_2.setSpacing(0) self.verticalLayout_2.setObjectName("verticalLayout_2") self.mainFrame = QtWidgets.QFrame(Form) self.mainFrame.setMinimumSize(QtCore.QSize(200, 82)) self.mainFrame.setFrameShape(QtWidgets.QFrame.StyledPanel) self.mainFrame.setFrameShadow(QtWidgets.QFrame.Raised) self.mainFrame.setObjectName("mainFrame") self.verticalLayout = QtWidgets.QVBoxLayout(self.mainFrame) self.verticalLayout.setContentsMargins(0, 0, 0, 0) self.verticalLayout.setSpacing(0) self.verticalLayout.setObjectName("verticalLayout") self.frame_3 = QtWidgets.QFrame(self.mainFrame) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.frame_3.sizePolicy().hasHeightForWidth()) self.frame_3.setSizePolicy(sizePolicy) self.frame_3.setFrameShape(QtWidgets.QFrame.StyledPanel) self.frame_3.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_3.setObjectName("frame_3") self.horizontalLayout_6 = QtWidgets.QHBoxLayout(self.frame_3) self.horizontalLayout_6.setContentsMargins(0, 0, 0, 0) self.horizontalLayout_6.setObjectName("horizontalLayout_6") self.Player_name = QtWidgets.QLabel(self.frame_3) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.Player_name.sizePolicy().hasHeightForWidth()) self.Player_name.setSizePolicy(sizePolicy) self.Player_name.setStyleSheet("QLabel\n" " {\n" " font: 12pt \"Helvetica\";\n" " color: white;\n" " border: 0px solid #076100;\n" " }") self.Player_name.setObjectName("status_label") self.horizontalLayout_6.addWidget(self.Player_name) spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_6.addItem(spacerItem) self.minimize_button = QtWidgets.QPushButton(self.frame_3) self.minimize_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.minimize_button.setText("") icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap("icon_sets/minimize.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.minimize_button.setIconSize(QSize(25,17)) self.minimize_button.setMaximumSize(QSize(25,17)) self.minimize_button.setIcon(icon) self.minimize_button.setIconSize(QtCore.QSize(27, 20)) self.minimize_button.setObjectName("minimize_button") self.horizontalLayout_6.addWidget(self.minimize_button) self.maximize_button = QtWidgets.QPushButton(self.frame_3) self.maximize_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.maximize_button.setText("") icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap("icon_sets/maximize.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.maximize_button.setIcon(icon1) self.maximize_button.setIconSize(QSize(25,17)) self.maximize_button.setMaximumSize(QSize(25,17)) self.maximize_button.setIconSize(QtCore.QSize(27, 20)) self.maximize_button.setObjectName("maximize_button") self.horizontalLayout_6.addWidget(self.maximize_button) self.cross_button = QtWidgets.QPushButton(self.frame_3) self.cross_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.cross_button.setText("") self.cross_button.setIconSize(QSize(25,17)) self.cross_button.setMaximumSize(QSize(25,17)) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap("icon_sets/cross.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.cross_button.setIcon(icon2) self.cross_button.setIconSize(QtCore.QSize(27, 20)) self.cross_button.setObjectName("cross_button") self.horizontalLayout_6.addWidget(self.cross_button) self.verticalLayout.addWidget(self.frame_3) self.video_playback = Videowidget(self.frame_3) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(200) sizePolicy.setHeightForWidth(self.video_playback.sizePolicy().hasHeightForWidth()) self.video_playback.setSizePolicy(sizePolicy) self.video_playback.setMinimumSize(QtCore.QSize(200, 100)) self.video_playback.setMouseTracking(False) self.video_playback.setTabletTracking(False) self.video_playback.setAcceptDrops(False) self.video_playback.setAutoFillBackground(False) self.video_playback.setObjectName("video_playback") self.video_playback.setStyleSheet("background-color:grey") self.verticalLayout.addWidget(self.video_playback) self.pos_frame = QtWidgets.QFrame(self.mainFrame) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(10) sizePolicy.setHeightForWidth(self.pos_frame.sizePolicy().hasHeightForWidth()) self.pos_frame.setSizePolicy(sizePolicy) self.pos_frame.setFrameShape(QtWidgets.QFrame.StyledPanel) self.pos_frame.setFrameShadow(QtWidgets.QFrame.Raised) self.pos_frame.setObjectName("pos_frame") self.horizontalLayout = QtWidgets.QHBoxLayout(self.pos_frame) self.horizontalLayout.setObjectName("horizontalLayout") self.position_slider = PosSlider(self.pos_frame) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(100) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.position_slider.sizePolicy().hasHeightForWidth()) self.position_slider.setSizePolicy(sizePolicy) self.position_slider.setMouseTracking(True) self.position_slider.setAutoFillBackground(False) self.position_slider.setStyleSheet("QSlider::handle:horizontal \n" " {\n" " background: transparent;\n" " width: 8px;\n" " }\n" "QSlider::groove:horizontal {\n" " border: 1px solid white;\n" " height: 8px;\n" " background: qlineargradient(y1: 0, y2: 1,stop: 0 #2e3436, stop: 1.0 #353941);\n" "}\n" " QSlider::sub-page:horizontal {\n" " background:qlineargradient( y1: 0, y2: 1,\n" " stop: 0 #42a6db, stop: 1 #0074e0); \n" " border: 1px solid white;\n" " height: 8px;\n" "}\n" "QSlider::handle:horizontal:hover {\n" " background: black;\n" " height: 8px;\n" " width: 8px;\n" " border: 1px solid white;\n" " }\n" "") self.position_slider.setOrientation(QtCore.Qt.Horizontal) self.position_slider.setInvertedAppearance(False) self.position_slider.setInvertedControls(False) self.position_slider.setObjectName("position_slider") self.horizontalLayout.addWidget(self.position_slider) self.time_status = QtWidgets.QLabel(self.pos_frame) font = QtGui.QFont() font.setFamily("Arial Rounded MT Bold") font.setPointSize(8) font.setBold(False) font.setItalic(False) font.setWeight(50) self.time_status.setFont(font) self.time_status.setStyleSheet("QLabel\n" " {\n" " \n" " font: 8pt \"Arial Rounded MT Bold\";\n" " color: white;\n" " border: 0px solid #076100;\n" "\n" " }") self.time_status.setObjectName("time_status") self.horizontalLayout.addWidget(self.time_status) self.backslash = QtWidgets.QLabel(self.pos_frame) font = QtGui.QFont() font.setFamily("Arial Rounded MT Bold") font.setPointSize(8) font.setBold(False) font.setItalic(False) font.setWeight(50) self.backslash.setFont(font) self.backslash.setStyleSheet("QLabel\n" " {\n" " \n" " font: 8pt \"Arial Rounded MT Bold\";\n" " color: white;\n" " border: 0px solid #076100;\n" "\n" " }") self.backslash.setObjectName("backslash") self.horizontalLayout.addWidget(self.backslash) self.duration_status = QtWidgets.QLabel(self.pos_frame) font = QtGui.QFont() font.setFamily("Arial Rounded MT Bold") font.setPointSize(8) font.setBold(False) font.setItalic(False) font.setWeight(50) self.duration_status.setFont(font) self.duration_status.setStyleSheet("QLabel\n" " {\n" " \n" " font: 8pt \"Arial Rounded MT Bold\";\n" " color: white;\n" " border: 0px solid #076100;\n" "\n" " }") self.duration_status.setObjectName("duration_status") self.horizontalLayout.addWidget(self.duration_status) self.verticalLayout.addWidget(self.pos_frame) self.frame_2 = QtWidgets.QFrame(self.mainFrame) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(22) self.frame_2.setSizePolicy(sizePolicy) self.frame_2.setContentsMargins(0,0,0,0) self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel) self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised) self.frame_2.setObjectName("frame_2") self.frame_2.setStyleSheet("") self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.frame_2) self.horizontalLayout_4.setObjectName("horizontalLayout_4") self.play_button = QtWidgets.QPushButton(self.frame_2) self.play_button.setEnabled(False) self.play_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.play_button.setStyleSheet("QPushButton[play=true]{image:url(icon_sets/play/play.png);width:22px;height:22px}\n" "QPushButton[play=false]{image:url(icon_sets/pause/pause.png);width:22px;height:22px }\n" ) self.play_button.setProperty("play", True) self.play_button.setText("") self.play_button.setObjectName("play_button") self.horizontalLayout_4.addWidget(self.play_button) self.playback_button = QtWidgets.QPushButton(self.frame_2) self.playback_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.playback_button.setStyleSheet("QPushButton{image:url(icon_sets/playback/playback.png);width:22px;height:22px }\n") self.playback_button.setText("") self.playback_button.setObjectName("playback_button") self.horizontalLayout_4.addWidget(self.playback_button) self.always_on_top_button = QtWidgets.QPushButton(self.frame_2) self.always_on_top_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.always_on_top_button.setStyleSheet("QPushButton[top=false]{image : url(icon_sets/always_on_top/top_off.png) }\n" "QPushButton[top=true]{image : url(icon_sets/always_on_top/top_on.png) }\n" ) self.always_on_top_button.setProperty("top", True) self.always_on_top_button.setText("") self.always_on_top_button.setObjectName("always_on_top_button") self.horizontalLayout_4.addWidget(self.always_on_top_button) self.miniplayer_button = QtWidgets.QPushButton(self.frame_2) self.miniplayer_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.miniplayer_button.setStyleSheet("QPushButton[mini=false]{image : url(icon_sets/standard_player/standard.png) }\n" "QPushButton[mini=true]{image : url(icon_sets/mini_player/mini.png) }\n" ) self.miniplayer_button.setProperty("mini",False) self.miniplayer_button.setContentsMargins(0,0,0,0) self.miniplayer_button.setText("") self.miniplayer_button.setObjectName("miniplayer_button") self.horizontalLayout_4.addWidget(self.miniplayer_button) self.open_File_button = QtWidgets.QPushButton(self.frame_2) self.open_File_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.open_File_button.setStyleSheet("QPushButton{image:url(icon_sets/new_file/new_file.png);width:22px;height:22px }\n") self.open_File_button.setText("") self.open_File_button.setObjectName("playback_button") self.horizontalLayout_4.addWidget(self.open_File_button) self.add_to_playlist_button = QtWidgets.QPushButton(self.frame_2) self.add_to_playlist_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.add_to_playlist_button.setStyleSheet("QPushButton{image:url(icon_sets/playlist/add_to_playlist.png);width:22px;height:22px }\n") self.add_to_playlist_button.setText("") self.add_to_playlist_button.setObjectName("add_to_playlist_button") self.horizontalLayout_4.addWidget(self.add_to_playlist_button) self.playlist_button = QtWidgets.QPushButton(self.frame_2) self.playlist_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.playlist_button.setStyleSheet("QPushButton{image:url(icon_sets/playlist/playlist_icon.png);width:22px;height:22px }\n") self.playlist_button.setText("") self.playlist_button.setObjectName("playlist_button") self.horizontalLayout_4.addWidget(self.playlist_button) spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_4.addItem(spacerItem1) self.screenshot_button = QtWidgets.QPushButton(self.frame_2) self.screenshot_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.screenshot_button.setStyleSheet("QPushButton{image : url(icon_sets/snapshot/snapshot.png);width:22px;height:22px} \n" ) self.screenshot_button.setText("") self.screenshot_button.setObjectName("screenshot_button") self.horizontalLayout_4.addWidget(self.screenshot_button) ''' Video Setting button for Video subs and dubs''' self.setting_button = QtWidgets.QPushButton(self.frame_2) self.setting_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.setting_button.setStyleSheet("QPushButton{image : url(icon_sets/settings/settings.png) }\n" ) self.setting_button.setText("") self.setting_button.setObjectName("setting_button") self.horizontalLayout_4.addWidget(self.setting_button) self.Quality_box = QtWidgets.QComboBox(self.frame_2) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.Quality_box.sizePolicy().hasHeightForWidth()) self.Quality_box.setSizePolicy(sizePolicy) self.Quality_box.setStyleSheet("QComboBox\n" " {\n" " border-image :url(icon_sets/quality/border.png);\n" " color: #fcffff;\n" " font-size: 8pt;\n" " font-weight: bold;\n" " background-color: #353941;\n" " }\n" " QComboBox QAbstractItemView \n" " {\n" " background: #fcffff;\n" " border: 2px solid darkgray;\n" " selection-background-color: #353941;\n" " }\n" "QComboBox::drop-down {\n" " border: 0px;\n" " subcontrol-origin: padding;\n" " subcontrol-position: top right;\n" "\n" " border-top-right-radius: 3px;\n" " border-bottom-right-radius: 3px;\n" "}\n" ) self.Quality_box.setIconSize(QtCore.QSize(0, 0)) self.Quality_box.setDuplicatesEnabled(False) self.Quality_box.setObjectName("comboBox") self.Quality_box.addItem(" -") self.horizontalLayout_4.addWidget(self.Quality_box) self.volume_button = QtWidgets.QPushButton(self.frame_2) self.volume_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.volume_button.setText("") icon9 = QtGui.QIcon() icon9.addPixmap(QtGui.QPixmap("icon_sets/volume/volume1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.volume_button.setIcon(icon9) self.volume_button.setIconSize(QtCore.QSize(22, 22)) self.volume_button.setCheckable(True) self.volume_button.setObjectName("volume_button") self.horizontalLayout_4.addWidget(self.volume_button) self.volumeslider = VolSlider(self.frame_2) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.volumeslider.sizePolicy().hasHeightForWidth()) self.volumeslider.setSizePolicy(sizePolicy) self.volumeslider.setAutoFillBackground(False) self.volumeslider.setStyleSheet("QSlider::handle:horizontal \n" " {\n" " background: transparent;\n" " width: 5px;\n" " }\n" "QSlider::groove:horizontal {\n" " border: 1px solid #444444;\n" " height: 5px;\n" " background: qlineargradient(y1: 0, y2: 1,stop: 0 grey, stop: 1.0 grey);\n" "}\n" " QSlider::sub-page:horizontal {\n" " background:qlineargradient( y1: 0, y2: 1,\n" " stop: 0 #42a6db, stop: 1 #0074e0); \n" " border: 1px solid #777;\n" " height: 5px;\n" "}\n" "QSlider::handle:horizontal:hover {\n" " background: #353941;\n" " height: 5px;\n" " width: 5px;\n" " border: 1px solid #0074e0;\n" " }\n" "QSlider::sub-page:horizontal:disabled{background:qlineargradient( y1: 0, y2: 1,\n" " stop: 0 #909090, stop: 1 #A8A8A8 );}\n" "") self.volumeslider.setOrientation(QtCore.Qt.Horizontal) self.volumeslider.setObjectName("volume_slider") self.horizontalLayout_4.addWidget(self.volumeslider) self.volume_percentage = QtWidgets.QLabel(self.frame_2) self.volume_percentage.setStyleSheet("QLabel\n" " {\n" " font: 7pt \"Arial Rounded MT Bold\";\n" " color: white;\n" " border: 0px solid #076100;\n" " }") self.volume_percentage.setObjectName("volume_status") self.volume_percentage.setMinimumWidth(35) self.volume_percentage.setText(" 20 %") self.horizontalLayout_4.addWidget(self.volume_percentage) self.verticalLayout.addWidget(self.frame_2) self.frame = QtWidgets.QFrame(self.mainFrame) font = QtGui.QFont() font.setFamily("Lucida Console") self.frame.setFont(font) self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel) self.frame.setFrameShadow(QtWidgets.QFrame.Raised) self.frame.setObjectName("frame") self.horizontalLayout_5 = QtWidgets.QHBoxLayout(self.frame) self.horizontalLayout_5.setObjectName("horizontalLayout_5") sizegrip_2 = QtWidgets.QSizeGrip(Form) sizegrip_2.setStyleSheet("image:url(icon_sets/.png)") self.horizontalLayout_5.addWidget(sizegrip_2, 0, QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft) self.url_box = QtWidgets.QComboBox(self.frame) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(200) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.url_box.sizePolicy().hasHeightForWidth()) self.url_box.setSizePolicy(sizePolicy) self.url_box.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor)) self.url_box.setMouseTracking(False) self.url_box.setAcceptDrops(False) self.url_box.setWhatsThis("") self.url_box.setAccessibleDescription("") self.url_box.setAutoFillBackground(True) self.url_box.setStyleSheet("QComboBox\n" " {\n" " border: 2px solid #0074e0;\n" " color: #fcffff;\n" " font-size: 10pt;\n" " font-family: Arial;\n" " font-weight: Bold;\n" " background-color: #353941;\n" " border-radius: 5px;\n" " }\n" " QComboBox QAbstractItemView \n" " {\n" " background: #dddddd;\n" " border: 2px solid darkgray;\n" " selection-background-color: #5a5a5a;\n" " }\n" "QComboBox::down-arrow {\n" " width : 20px\n" " background-color: #5f85db\n" "}\n" "QComboBox::down-arrow:pressed\n" "{\n" "background-color : #5f85db;\n" "}\n" "QComboBox::drop-down {\n" " subcontrol-origin: padding;\n" " subcontrol-position: top right;\n" " width: 20px;\n" " \n" " border-top-right-radius: 3px;\n" " border-bottom-right-radius: 3px;\n" "}") self.url_box.setInputMethodHints(QtCore.Qt.ImhUrlCharactersOnly) self.url_box.setEditable(True) self.url_box.setMaxVisibleItems(100) self.url_box.setMaxCount(100) self.url_box.setInsertPolicy(QtWidgets.QComboBox.InsertAfterCurrent) self.url_box.setSizeAdjustPolicy(QtWidgets.QComboBox.AdjustToContentsOnFirstShow) self.url_box.setMinimumContentsLength(2) self.url_box.setIconSize(QtCore.QSize(20, 20)) self.url_box.setDuplicatesEnabled(False) self.url_box.setFrame(True) self.url_box.setObjectName("url_box") self.horizontalLayout_5.addWidget(self.url_box) self.playOnline_button = QtWidgets.QPushButton(self.frame) self.playOnline_button.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.playOnline_button.setStyleSheet("QPushButton{image : url(icon_sets/globe.png) }\n") self.playOnline_button.setText("") self.playOnline_button.setObjectName("playOnline_button") self.horizontalLayout_5.addWidget(self.playOnline_button) self.verticalLayout.addWidget(self.frame) sizegrip_1 = QtWidgets.QSizeGrip(Form) sizegrip_1.setStyleSheet("image:url(icon_sets/size.png);width:15; height:18;") self.horizontalLayout_5.addWidget(sizegrip_1, 0, QtCore.Qt.AlignBottom | QtCore.Qt.AlignRight) self.verticalLayout_2.addWidget(self.mainFrame) self.retranslateUi(Form) self.url_box.setCurrentIndex(-1) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): _translate = QtCore.QCoreApplication.translate Form.setWindowTitle(_translate("Form", "Q-Stream Player")) self.Player_name.setText(_translate("Form", "Q-Stream Player")) self.time_status.setText(_translate("Form", "00:00:00")) self.backslash.setText(_translate("Form", "/")) self.duration_status.setText(_translate("Form", "00:00:00")) self.volumeslider.setRange(0, 100) self.volumeslider.setValue(20) self.mediaPlayer.setVolume(20) self.position_slider.setRange(0, 100) self.miniplayer_button.clicked.connect(self.setupMiniPlayer) self.position_slider.sliderMoved.connect(self.setPosition) self.position_slider.sliderMoved.connect(self.handleLabel) self.volume_button.clicked.connect(self.mute) self.volumeslider.valueChanged.connect(self.setVolume) self.screenshot_button.clicked.connect(self.screenshot) self.playback_button.clicked.connect(self.stopplayback) self.always_on_top_button.clicked.connect(self.checkOnTop) self.play_button.clicked.connect(self.play) self.open_File_button.clicked.connect(self.openFile) self.add_to_playlist_button.clicked.connect(self.addToPlaylist) self.playlist_button.clicked.connect(self.playlistWidget) self.setting_button.clicked.connect(self.handleSetting) self.Quality_box.currentTextChanged.connect(self.handleQuality) self.cross_button.clicked.connect(self.exit) self.maximize_button.clicked.connect(self.max) self.minimize_button.clicked.connect(self.min) self.playOnline_button.clicked.connect(self.onlineThread) shortcut = QShortcut(QKeySequence('Esc'), self.video_playback) shortcut.activated.connect(self.EscFun) shortcut = QShortcut(QKeySequence('Space'), self.video_playback) shortcut.activated.connect(self.play) shortcut = QShortcut(QKeySequence('f'), self.video_playback) shortcut.activated.connect(self.fullscreen_video) shortcut = QShortcut(QKeySequence('c'), self.video_playback) shortcut.activated.connect(self.setupMiniPlayer) shortcut = QShortcut(QKeySequence('o'), self.video_playback) shortcut.activated.connect(self.openFile) shortcut = QShortcut(QKeySequence('a'), self.video_playback) shortcut.activated.connect(self.checkOnTop) shortcut = QShortcut(QKeySequence("Return"), self.video_playback) shortcut.activated.connect(self.onlineThread) shortcut = QShortcut(QKeySequence('m'), self.video_playback) shortcut.activated.connect(self.mute) shortcut = QShortcut(QKeySequence(Qt.Key_Right), self.video_playback) shortcut.activated.connect(self.forwardSlider) shortcut = QShortcut(QKeySequence(Qt.Key_Left), self.video_playback) shortcut.activated.connect(self.backSlider) self.volupshortcut = QShortcut(QKeySequence(Qt.Key_Up), self.video_playback) self.volupshortcut.activated.connect(self.volumeUp) self.voldownshortcut = QShortcut(QKeySequence(Qt.Key_Down), self.video_playback) self.voldownshortcut.activated.connect(self.volumeDown) shortcut = QShortcut(QKeySequence(Qt.ControlModifier + Qt.Key_Right), self.video_playback) shortcut.activated.connect(self.forwardSlider10) shortcut = QShortcut(QKeySequence(Qt.ControlModifier + Qt.Key_Left), self.video_playback) shortcut.activated.connect(self.backSlider10) shortcut = QShortcut(QKeySequence(Qt.AltModifier + Qt.Key_Left), self.video_playback) shortcut.activated.connect(self.backSlider5) shortcut = QShortcut(QKeySequence(Qt.AltModifier + Qt.Key_Right), self.video_playback) shortcut.activated.connect(self.forwardSlider5) items = self.load() self.url_box.addItems(items) self.url_box.setCurrentIndex(-1) btnSize = QSize(22,22) self.play_button.setMaximumSize(btnSize) self.playback_button.setMaximumSize(btnSize) self.screenshot_button.setMaximumSize(btnSize) self.always_on_top_button.setMaximumSize(btnSize) self.miniplayer_button.setMaximumSize(btnSize) self.setting_button.setMaximumSize(btnSize) self.volume_button.setMaximumSize(btnSize) self.horizontalLayout.setContentsMargins(10, 5, 9, 0) self.horizontalLayout_4.setContentsMargins(9, 0, 9, 0) self.horizontalLayout_4.setSpacing(4) self.horizontalLayout_5.setContentsMargins(0, 5, 5, 5) self.horizontalLayout_6.setContentsMargins(9, 0, 9, 0) self.mediaPlayer.setVideoOutput(self.video_playback) self.mediaPlayer.stateChanged.connect(self.mediaStateChanged) self.mediaPlayer.positionChanged.connect(self.positionChanged) self.mediaPlayer.positionChanged.connect(self.handleLabel) self.mediaPlayer.durationChanged.connect(self.durationChanged) def handleLabel(self): self.time_status.clear() mtime = QtCore.QTime(0, 0, 0, 0) self.time = mtime.addMSecs(self.mediaPlayer.position()) self.time_status.setText(self.time.toString()) def hide_all(self): self.frame_3.close() self.url_box.close() self.playOnline_button.close() self.playback_button.close() self.screenshot_button.close() self.Quality_box.close() self.volume_button.close() def show_all(self): self.frame_3.show() self.url_box.show() self.playOnline_button.show() self.playback_button.show() self.screenshot_button.show() self.Quality_box.show() self.volume_button.show() self.setting_button.show() def checkOnTop(self): self.isOnTop = not self.isOnTop if self.isOnTop: self.always_on_top_button.setProperty("top", True) self.always_on_top_button.setStyle(self.always_on_top_button.style()) Form.setWindowFlags(Form.windowFlags() | QtCore.Qt.WindowStaysOnTopHint) else: self.always_on_top_button.setProperty("top", False) self.always_on_top_button.setStyle(self.always_on_top_button.style()) Form.setWindowFlags(Form.windowFlags() & ~QtCore.Qt.WindowStaysOnTopHint) Form.show() def miniProperty(self): self.video_playback.setMinimumSize(QSize(200,100)) Form.setMinimumSize(QSize(200,175)) self.mainFrame.setMinimumSize(QSize(200,60)) def standardProperty(self): self.video_playback.setMinimumSize(QSize(200,100)) self.mainFrame.setMinimumSize(QSize(200,82)) Form.setMinimumSize(QSize(200,202)) def setupMiniPlayer(self): self.isMini = not self.isMini if self.isMini : self.miniplayer_button.setProperty("mini",True) self.miniplayer_button.setStyle(self.miniplayer_button.style()) self.hide_all() self.miniProperty() else: self.miniplayer_button.setProperty("mini",False) self.miniplayer_button.setStyle(self.miniplayer_button.style()) self.standardProperty() self.show_all() @staticmethod def load(): scorefile = "db.bat" if os.path.exists(scorefile): with open(scorefile, 'rb') as sf: scores = pickle.load(sf) else: scores = [] with open(scorefile, "wb") as sf: pickle.dump(scores, sf) return scores @staticmethod def scor_func(url): scorefile = "db.bat" if os.path.exists(scorefile): with open(scorefile, 'rb') as sf: scores = pickle.load(sf) else: scores = [] scores.append(url) with open(scorefile, "wb") as sf: if len(scores) > 100: print("here", scores) scores = scores[1:] pickle.dump(scores, sf) return scores def mute(self): if self.mediaPlayer.isMuted() : print('[ ! Full Volume]') self.mediaPlayer.setMuted(False) icon9 = QtGui.QIcon() icon9.addPixmap(QtGui.QPixmap("icon_sets/volume/volume1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.volume_button.setIcon(icon9) self.volume_button.setIconSize(QSize(22,22)) self.volumeslider.setEnabled(True) self.volupshortcut.setEnabled(True) self.voldownshortcut.setEnabled(True) else: print('[ ! Mute Volume]') self.mediaPlayer.setMuted(True) icon9 = QtGui.QIcon() icon9.addPixmap(QtGui.QPixmap("icon_sets/volume/volume2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.volume_button.setIcon(icon9) self.volume_button.setIconSize(QSize(22,22)) self.volumeslider.setEnabled(False) self.volupshortcut.setEnabled(False) self.voldownshortcut.setEnabled(False) def play(self): if self.mediaPlayer.isVideoAvailable(): if self.mediaPlayer.state() == QMediaPlayer.PlayingState: print("[ ! PAUSE PRESSED ]") self.mediaPlayer.pause() else: print("[ ! PLAY PRESSED ]") self.mediaPlayer.play() def onlineThread(self): import threading urlThread = threading.Thread(target= self.playOnline) print("Url Thread Start ") urlThread.start() print("Url Thread waiting to complete ") print("Url Thread complete ") def playOnline(self, playlist=False): if playlist: self.currentMedia = self.playlist[self.currentItem] self.currentMedia["error"] = False else: self.currentMedia = {"type": "url", "src": self.url_box.currentText(), "error": False} print('[ ! GETTING VIDEO ONLINE ]') fileName = self.currentMedia["src"] res = requests.get('https://q-stream-media-player.herokuapp.com/', params={"key": fileName}) try: self.streams = json.loads(res.text) try: self.mediaPlayer.setMedia(QMediaContent(QUrl(self.streams['best']))) self.play_video() self.isOnline = True self.addQuality() if self.url_box.findText(fileName, Qt.MatchExactly) < 0: self.url_box.addItem(fileName) self.scor_func(fileName) except KeyError: self.currentMedia["error"] = True print("[ ! Error Video Not Supported By platform]") except json.JSONDecodeError: self.currentMedia["error"] = True print("[ ! Error NoPluginError]") finally: self.url_box.clearEditText() self.url_box.clearFocus() def openFile(self): print('[ ! OPEN FILE ]') username = getpass.getuser() if sys.platform == 'win32': path = 'C:/Users/' + username + '/Videos/' elif sys.platform == 'linux' or sys.platform == 'Darwin': path = '/home/' + username + '/Videos/' fileName, _ = QFileDialog.getOpenFileName(self.video_playback, "Select media file", path, "Video Files (*.mp3 *.mp4 *.flv *.ts *.mts *.avi *.mkv)") if fileName: self.currentMedia = {"type": "file", "src": fileName, "error": False} self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(fileName))) self.play_video() def addToPlaylist(self): if self.currentMedia == {}: print("Select a media file/url first!!") return None elif self.currentMedia.get("error"): print("Current Media cannot be added in the playlist!!") elif self.currentMedia != {} and self.currentMedia.get("error", None) == None: print("Media has already been added to the playlist") else: del self.currentMedia["error"] self.playlist.append(self.currentMedia) def play_video(self): print('[ ! PLAYING VIDEO ]') self.play_button.setEnabled(True) if self.mediaPlayer.state() == QMediaPlayer.PlayingState: self.mediaPlayer.pause() else: self.mediaPlayer.play()
MIT License
openstack/horizon
horizon/tabs/views.py
TabbedTableView.load_tabs
python
def load_tabs(self): tab_group = self.get_tabs(self.request, **self.kwargs) tabs = tab_group.get_tabs() for tab in [t for t in tabs if issubclass(t.__class__, TableTab)]: self.table_classes.extend(tab.table_classes) for table in tab._tables.values(): self._table_dict[table._meta.name] = {'table': table, 'tab': tab}
Loads the tab group. It compiles the table instances for each table attached to any :class:`horizon.tabs.TableTab` instances on the tab group. This step is necessary before processing any tab or table actions.
https://github.com/openstack/horizon/blob/5e405d71926764b8aa60c75794b62f668f4e8122/horizon/tabs/views.py#L80-L93
from django import http from horizon import exceptions from horizon import tables from horizon.tabs.base import TableTab from horizon import views class TabView(views.HorizonTemplateView): tab_group_class = None _tab_group = None def __init__(self): if not self.tab_group_class: raise AttributeError("You must set the tab_group_class attribute " "on %s." % self.__class__.__name__) def get_tabs(self, request, **kwargs): if self._tab_group is None: self._tab_group = self.tab_group_class(request, **kwargs) return self._tab_group def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) try: tab_group = self.get_tabs(self.request, **kwargs) context["tab_group"] = tab_group context["tab_group"].load_tab_data() except Exception: exceptions.handle(self.request) return context def handle_tabbed_response(self, tab_group, context): if self.request.is_ajax(): if tab_group.selected: return http.HttpResponse(tab_group.selected.render()) return http.HttpResponse(tab_group.render()) return self.render_to_response(context) def get(self, request, *args, **kwargs): context = self.get_context_data(**kwargs) return self.handle_tabbed_response(context["tab_group"], context) class TabbedTableView(tables.MultiTableMixin, TabView): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.table_classes = [] self._table_dict = {}
Apache License 2.0
wuzheng-sjtu/fastfpn
libs/nets/pyramid_network.py
my_sigmoid
python
def my_sigmoid(x): return (tf.nn.sigmoid(x) - tf.cast(0.5, tf.float32)) * 6.0
add an active function for the box output layer, which is linear around 0
https://github.com/wuzheng-sjtu/fastfpn/blob/a60a618665b11481e95bd184073a2ac09febc9d4/libs/nets/pyramid_network.py#L77-L79
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf import tensorflow.contrib.slim as slim from libs.boxes.roi import roi_cropping from libs.layers import anchor_encoder from libs.layers import anchor_decoder from libs.layers import roi_encoder from libs.layers import roi_decoder from libs.layers import mask_encoder from libs.layers import mask_decoder from libs.layers import gen_all_anchors from libs.layers import ROIAlign from libs.layers import sample_rpn_outputs from libs.layers import sample_rpn_outputs_with_gt from libs.layers import assign_boxes _networks_map = { 'resnet50': {'C1':'resnet_v1_50/conv1/Relu:0', 'C2':'resnet_v1_50/block1/unit_2/bottleneck_v1', 'C3':'resnet_v1_50/block2/unit_3/bottleneck_v1', 'C4':'resnet_v1_50/block3/unit_5/bottleneck_v1', 'C5':'resnet_v1_50/block4/unit_3/bottleneck_v1', }, 'resnet101': {'C1': '', 'C2': '', 'C3': '', 'C4': '', 'C5': '', } } def _extra_conv_arg_scope_with_bn(weight_decay=0.00001, activation_fn=None, batch_norm_decay=0.997, batch_norm_epsilon=1e-5, batch_norm_scale=True): batch_norm_params = { 'decay': batch_norm_decay, 'epsilon': batch_norm_epsilon, 'scale': batch_norm_scale, 'updates_collections': tf.GraphKeys.UPDATE_OPS_EXTRA, } with slim.arg_scope( [slim.conv2d], weights_regularizer=slim.l2_regularizer(weight_decay), weights_initializer=slim.variance_scaling_initializer(), activation_fn=tf.nn.relu, normalizer_fn=slim.batch_norm, normalizer_params=batch_norm_params): with slim.arg_scope([slim.batch_norm], **batch_norm_params): with slim.arg_scope([slim.max_pool2d], padding='SAME') as arg_sc: return arg_sc def _extra_conv_arg_scope(weight_decay=0.00001, activation_fn=None, normalizer_fn=None): with slim.arg_scope( [slim.conv2d, slim.conv2d_transpose], padding='SAME', weights_regularizer=slim.l2_regularizer(weight_decay), weights_initializer=tf.truncated_normal_initializer(stddev=0.001), activation_fn=activation_fn, normalizer_fn=normalizer_fn,) as arg_sc: with slim.arg_scope( [slim.fully_connected], weights_regularizer=slim.l2_regularizer(weight_decay), weights_initializer=tf.truncated_normal_initializer(stddev=0.001), activation_fn=activation_fn, normalizer_fn=normalizer_fn) as arg_sc: return arg_sc
Apache License 2.0
blockchain-etl/polygon-etl
airflow/dags/polygonetl_airflow/gcs_utils.py
download_from_gcs
python
def download_from_gcs(bucket, object, filename): from google.cloud import storage storage_client = storage.Client() bucket = storage_client.get_bucket(bucket) blob_meta = bucket.get_blob(object) if blob_meta.size > 10 * MEGABYTE: blob = bucket.blob(object, chunk_size=10 * MEGABYTE) else: blob = bucket.blob(object) blob.download_to_filename(filename)
Download a file from GCS. Can download big files unlike gcs_hook.download which saves files in memory first
https://github.com/blockchain-etl/polygon-etl/blob/c1a8e09b6725f9f3c063097ba1069224b361d5ea/airflow/dags/polygonetl_airflow/gcs_utils.py#L45-L59
import logging import os MEGABYTE = 1024 * 1024 def upload_to_gcs(gcs_hook, bucket, object, filename, mime_type='application/octet-stream'): from apiclient.http import MediaFileUpload from googleapiclient import errors service = gcs_hook.get_conn() if os.path.getsize(filename) > 10 * MEGABYTE: media = MediaFileUpload(filename, mime_type, resumable=True) try: request = service.objects().insert(bucket=bucket, name=object, media_body=media) response = None while response is None: status, response = request.next_chunk() if status: logging.info("Uploaded %d%%." % int(status.progress() * 100)) return True except errors.HttpError as ex: if ex.resp['status'] == '404': return False raise else: media = MediaFileUpload(filename, mime_type) try: service.objects().insert(bucket=bucket, name=object, media_body=media).execute() return True except errors.HttpError as ex: if ex.resp['status'] == '404': return False raise
MIT License
scalyr/scalyr-agent-2
scalyr_agent/json_lib/objects.py
JsonObject.__setitem__
python
def __setitem__(self, key, value): self.__map[key] = value return self
Set the specified key to the specified value. @param key: The name of the field to set. @param value: The value for the field. @return: This object.
https://github.com/scalyr/scalyr-agent-2/blob/6d32b861889078f044c9ab3f1f7157f2c89ba04a/scalyr_agent/json_lib/objects.py#L91-L99
from __future__ import unicode_literals from __future__ import absolute_import __author__ = "czerwin@scalyr.com" if False: from typing import List from typing import Any import six from six.moves import range from scalyr_agent.json_lib.exceptions import JsonConversionException from scalyr_agent.json_lib.exceptions import JsonMissingFieldException class JsonObject(object): def __init__(self, content=None, **key_values): if content is None: self.__map = {} else: self.__map = content for key, value in six.iteritems(key_values): self.__map[six.ensure_text(key)] = value def to_json(self): def __repr__(self): return repr(self.__map) def __len__(self): return len(self.__map)
Apache License 2.0
beijbom/coralnet
project/images/models.py
Source.get_member_role
python
def get_member_role(self, user): perms = get_perms(user, self) for permType in [Source.PermTypes.ADMIN, Source.PermTypes.EDIT, Source.PermTypes.VIEW]: if permType.code in perms: return permType.verbose
Get a user's conceptual "role" in the source. If they have admin perms, their role is admin. Otherwise, if they have edit perms, their role is edit. Otherwise, if they have view perms, their role is view. Role is None if user is not a Source member.
https://github.com/beijbom/coralnet/blob/1f47f666a783f5ed4bcb5057513a4ae76e3d2d8c/project/images/models.py#L217-L232
import posixpath from django.conf import settings from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist from django.core.mail import mail_admins from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from easy_thumbnails.fields import ThumbnailerImageField from guardian.shortcuts import ( get_objects_for_user, get_users_with_perms, get_perms, assign_perm, remove_perm) from .model_utils import PointGen from accounts.utils import is_robot_user from annotations.model_utils import AnnotationAreaUtils from labels.models import LabelSet from lib.utils import rand_string class SourceManager(models.Manager): def get_by_natural_key(self, name): return self.get(name=name) class Source(models.Model): objects = SourceManager() class VisibilityTypes: PUBLIC = 'b' PUBLIC_VERBOSE = 'Public' PRIVATE = 'v' PRIVATE_VERBOSE = 'Private' name = models.CharField(max_length=200, unique=True) VISIBILITY_CHOICES = ( (VisibilityTypes.PUBLIC, VisibilityTypes.PUBLIC_VERBOSE), (VisibilityTypes.PRIVATE, VisibilityTypes.PRIVATE_VERBOSE), ) visibility = models.CharField( max_length=1, choices=VISIBILITY_CHOICES, default=VisibilityTypes.PUBLIC) create_date = models.DateTimeField( 'Date created', auto_now_add=True, editable=False) description = models.TextField() affiliation = models.CharField(max_length=200) labelset = models.ForeignKey( LabelSet, on_delete=models.PROTECT, null=True) key1 = models.CharField('Aux. metadata 1', max_length=50, default="Aux1") key2 = models.CharField('Aux. metadata 2', max_length=50, default="Aux2") key3 = models.CharField('Aux. metadata 3', max_length=50, default="Aux3") key4 = models.CharField('Aux. metadata 4', max_length=50, default="Aux4") key5 = models.CharField('Aux. metadata 5', max_length=50, default="Aux5") POINT_GENERATION_CHOICES = ( (PointGen.Types.SIMPLE, PointGen.Types.SIMPLE_VERBOSE), (PointGen.Types.STRATIFIED, PointGen.Types.STRATIFIED_VERBOSE), (PointGen.Types.UNIFORM, PointGen.Types.UNIFORM_VERBOSE), ) default_point_generation_method = models.CharField( "Point generation method", help_text=( "When we create annotation points for uploaded images, this is how" " we'll generate the point locations. Note that if you change this" " setting later on, it will NOT apply to images that are already" " uploaded."), max_length=50, default=PointGen.args_to_db_format( point_generation_type=PointGen.Types.SIMPLE, simple_number_of_points=200) ) image_annotation_area = models.CharField( "Default image annotation area", help_text=( "This defines a rectangle of the image where annotation points are" " allowed to be generated.\n" "For example, X boundaries of 10% and 95% mean that the leftmost" " 10% and the rightmost 5% of the image will not have any points." " Decimals like 95.6% are allowed.\n" "Later, you can also set these boundaries as pixel counts on a" " per-image basis; for images that don't have a specific value" " set, these percentages will be used."), max_length=50, null=True ) cpce_code_filepath = models.CharField( "Local absolute path to the CPCe code file", max_length=1000, default='', ) cpce_image_dir = models.CharField( "Local absolute path to the directory with image files", help_text="Ending slash can be present or not", max_length=1000, default='', ) confidence_threshold = models.IntegerField( "Confidence threshold (%)", validators=[MinValueValidator(0), MaxValueValidator(100)], default=100, ) enable_robot_classifier = models.BooleanField( "Enable robot classifier", default=True, help_text=( "With this option on, the automatic classification system will" " go through your images and add unconfirmed annotations to them." " Then when you enter the annotation tool, you will be able to" " start from the system's suggestions instead of from a blank" " slate."), ) FEATURE_EXTRACTOR_CHOICES = ( ('efficientnet_b0_ver1', "EfficientNet (default)"), ('vgg16_coralnet_ver1', "VGG16 (legacy)"), ) feature_extractor_setting = models.CharField( "Feature extractor", max_length=50, choices=FEATURE_EXTRACTOR_CHOICES, default='efficientnet_b0_ver1') longitude = models.CharField(max_length=20, blank=True) latitude = models.CharField(max_length=20, blank=True) class Meta: permissions = ( ('source_view', 'View'), ('source_edit', 'Edit'), ('source_admin', 'Admin'), ) class PermTypes: class ADMIN: code = 'source_admin' fullCode = 'images.' + code verbose = 'Admin' class EDIT: code = 'source_edit' fullCode = 'images.' + code verbose = 'Edit' class VIEW: code = 'source_view' fullCode = 'images.' + code verbose = 'View' @property def feature_extractor(self) -> str: if settings.FORCE_DUMMY_EXTRACTOR: return 'dummy' return self.feature_extractor_setting @staticmethod def get_public_sources(): return Source.objects.filter(visibility=Source.VisibilityTypes.PUBLIC) .order_by('name') @staticmethod def get_sources_of_user(user): if user.is_authenticated: return get_objects_for_user(user, Source.PermTypes.VIEW.fullCode) .order_by('name') else: return Source.objects.none() @staticmethod def get_other_public_sources(user): return Source.get_public_sources() .exclude(pk__in=Source.get_sources_of_user(user)) def has_member(self, user): return user in self.get_members() def get_members(self): return get_users_with_perms(self).order_by('username')
BSD 2-Clause Simplified License
flora-network/flora-blockchain
tests/core/util/test_config.py
write_config
python
def write_config(root_path: Path, config: Dict): sleep(random.random()) save_config(root_path=root_path, filename="config.yaml", config_data=config)
Wait for a random amount of time and write out the config data. With a large config, we expect save_config() to require multiple writes.
https://github.com/flora-network/flora-blockchain/blob/8557b3bd6f71a0d45160e155b3a4731bc5cfd0aa/tests/core/util/test_config.py#L23-L31
import asyncio import copy import pytest import random import yaml from chia.util.config import create_default_chia_config, initial_config_file, load_config, save_config from chia.util.path import mkdir from multiprocessing import Pool from pathlib import Path from threading import Thread from time import sleep from typing import Dict
Apache License 2.0
optibus/playback
playback/interception/files/file_interception.py
FileInterception._deserialize_file
python
def _deserialize_file(serialized_file): file_content = serialized_file['file_content'] if file_content != FileInterception.ABOVE_LIMIT_CONTENT: if six.PY2: file_content = file_content.decode('base64') else: file_content = base64.b64decode(file_content) return serialized_file['file_path'], file_content
Deserialize the content into file path and file content :param serialized_file: Serialized form of file :type serialized_file: dict[str, str] :return: File path and file content :rtype: str, str
https://github.com/optibus/playback/blob/7e8ea29f764a52753aef4e3334fd9bcf7826e082/playback/interception/files/file_interception.py#L143-L159
from __future__ import absolute_import import logging import os import base64 import six from playback.utils.timing_utils import Timed _logger = logging.getLogger(__name__) class FileInterception(object): ABOVE_LIMIT_CONTENT = six.b('above interception limit') def __init__(self, file_path_arg_index, file_path_arg_name, intercepted_size_limit=None): self.file_path_arg_index = file_path_arg_index self.file_path_arg_name = file_path_arg_name self.intercepted_size_limit = self._calculate_max_intercepted_size_limit(intercepted_size_limit) @staticmethod def _mb_size(size): return size / (1024.0 * 1024.0) @staticmethod def _calculate_max_intercepted_size_limit(intercepted_size_limit): if intercepted_size_limit is not None: return intercepted_size_limit return int(float(os.getenv('PLAYBACK_INTERCEPTED_FILE_SIZE_LIMIT', "500"))) def _get_file_path(self, args, kwargs): file_path = kwargs.get(self.file_path_arg_name) if not file_path: file_path = args[self.file_path_arg_index] return file_path def _intercept_file(self, args, kwargs): file_path = self._get_file_path(args, kwargs) if self._is_file_above_size_limit(file_path): return self._above_limit_result(file_path) _logger.info(u'Reading intercepted file ({})'.format(file_path)) with Timed() as timed: with open(file_path, "rb") as f: content = f.read() _logger.info(u'Done reading content size is {:.2f}MB ({})'.format(self._mb_size(len(content)), file_path)) result = self._serialize_file(content, file_path) _logger.info(u'Done preparing file for recording with in {:.2f}s ({})'.format(timed.duration, file_path)) return result def _is_file_above_size_limit(self, file_path): if self.intercepted_size_limit is not None: file_size_in_mb = self._mb_size(os.path.getsize(file_path)) if file_size_in_mb > self.intercepted_size_limit: _logger.info(u'Intercepted file is {:.2f}MB which is above interception limit of {:.2f}MB, ' u'ignoring content in file {}'.format( file_size_in_mb, self.intercepted_size_limit, file_path)) return True return False @staticmethod def _above_limit_result(file_path): return { 'file_path': file_path, 'file_content': FileInterception.ABOVE_LIMIT_CONTENT } @staticmethod def _serialize_file(content, file_path): if six.PY2: encoded_content = content.encode('base64') else: encoded_content = base64.b64encode(content) return { 'file_path': file_path, 'file_content': encoded_content } @staticmethod
BSD 3-Clause New or Revised License
acq4/acq4
acq4/devices/MultiClamp/multiclamp.py
MultiClampTask.getConfigOrder
python
def getConfigOrder(self): return ([], [self.dev.getDAQName("primary")])
return lists of devices that should be configured (before, after) this device
https://github.com/acq4/acq4/blob/4c0d9cdaf4740359023fd323f671e9af3c115d2e/acq4/devices/MultiClamp/multiclamp.py#L373-L375
from __future__ import print_function from __future__ import with_statement import time import numpy as np from acq4.Manager import logMsg from acq4.devices.PatchClamp import PatchClamp from pyqtgraph import multiprocess from acq4.util.Mutex import Mutex from pyqtgraph.metaarray import MetaArray, axis from .DeviceGui import MCDeviceGui from .taskGUI import MultiClampTaskGui from ..Device import DeviceTask from ...util.debug import printExc class MultiClamp(PatchClamp): proc = None def __init__(self, dm, config, name): PatchClamp.__init__(self, dm, config, name) self.config = config self.index = None self.devRackGui = None self.mc = None self._paramCache = {} self.mode_dependent_params = [ 'PrimarySignal', 'SecondarySignal', 'PrimarySignalGain', 'SecondarySignalGain', 'Holding', 'HoldingEnable', 'PipetteOffset', ] self.stateLock = Mutex(Mutex.Recursive) self.lastState = {} self.lastMode = None self._switchingToMode = None self.holding = { 'VC': -50e-3, 'IC': 0.0, 'I=0': 0.0 } executable = self.config.get('pythonExecutable', None) if executable is not None: if MultiClamp.proc is False: raise Exception("Already connected to multiclamp locally; cannot connect via remote process at the same time.") if MultiClamp.proc is None: MultiClamp.proc = multiprocess.Process(executable=executable, copySysPath=False) try: self.proc.mc_mod = self.proc._import('acq4.drivers.MultiClamp') self.proc.mc_mod._setProxyOptions(deferGetattr=False) except: MultiClamp.proc.close() MultiClamp.proc = None raise mcmod = self.proc.mc_mod else: if MultiClamp.proc not in (None, False): raise Exception("Already connected to multiclamp via remote process; cannot connect locally at the same time.") else: MultiClamp.proc = False try: import acq4.drivers.MultiClamp as MultiClampDriver except RuntimeError as exc: if "32-bit" in exc.message: raise Exception("MultiClamp commander does not support access by 64-bit processes. To circumvent this problem, " "Use the 'pythonExecutable' device configuration option to connect via a 32-bit python instead.") else: raise mcmod = MultiClampDriver dllPath = self.config.get('dllPath', None) if dllPath is not None: mcmod.getAxlib(dllPath) mc = mcmod.MultiClamp.instance() if executable is not None: self.mc = mc.getChannel(self.config['channelID'], multiprocess.proxy(self.mcUpdate, callSync='off')) else: self.mc = mc.getChannel(self.config['channelID'], self.mcUpdate) start = time.time() while self.mc.getState() is None: time.sleep(0.1) if time.time() - start > 10: raise Exception("Timed out waiting for first update from multi clamp commander.") print("Created MultiClamp device", self.config['channelID']) if 'vcHolding' in self.config: self.holding['VC'] = self.config['vcHolding'] if 'icHolding' in self.config: self.holding['IC'] = self.config['icHolding'] defaults = self.config.get('defaults', self.config.get('settings', None)) for mode in ['IC', 'VC']: self.setMode(mode) if defaults is not None and mode in defaults: self.mc.setParams(defaults[mode]) self.setMode('I=0') dm.declareInterface(name, ['clamp'], self) def listChannels(self): chans = {} for ch in ['commandChannel', 'primaryChannel', 'secondaryChannel']: chans[ch] = self.config[ch].copy() return chans def quit(self): if self.mc is not None: self.mc.mc.quit() def mcUpdate(self, state=None, mode=None): with self.stateLock: self._paramCache = {} if state is None: state = self.lastState[mode] mode = state['mode'] state['holding'] = self.holding[mode] self.lastState[mode] = state.copy() if self.lastMode != state['mode']: if self.lastMode is not None and state['mode'] != self._switchingToMode and state['mode'] != 'I=0': self.setHolding(state['mode']) logMsg("Warning: MultiClamp mode should be changed from ACQ4, not from the MultiClamp Commander window.", msgType='error') self.lastMode = state['mode'] self._switchingToMode = None self.sigStateChanged.emit(state) def getLastState(self, mode=None): if mode is None: mode = self.mc.getMode() with self.stateLock: if mode in self.lastState: return self.lastState[mode] def extCmdScale(self, mode): s = self.getLastState(mode) if s is not None: return s['extCmdScale'] else: if mode == 'VC': return 50 else: return 2.5e9 def getState(self): return self.mc.getState() def getParam(self, param): if param not in self._paramCache: val = self.mc.getParam(param) if self.config.get('enableParameterCache', False): self._paramCache[param] = val return self._paramCache[param] def setParam(self, param, value): if self.config.get('enableParameterCache', False): if param in self._paramCache and self._paramCache[param] == value: return if param == 'PrimarySignal': self.mc.setPrimarySignal(value) elif param == 'SecondarySignal': self.mc.setSecondarySignal(value) else: self.mc.setParam(param, value) self._paramCache.pop(param) self.getParam(param) else: self.mc.setParam(param, value) def deviceInterface(self, win): return MCDeviceGui(self, win) def taskInterface(self, taskRunner): return MultiClampTaskGui(self, taskRunner) def createTask(self, cmd, parentTask): return MultiClampTask(self, cmd, parentTask) def getHolding(self, mode=None): if mode is None: mode = self.mc.getMode() if mode == 'I=0': return 0.0 else: return self.holding[mode] def setHolding(self, mode=None, value=None): with self.dm.reserveDevices([self, self.config['commandChannel']['device']]): currentMode = self.mc.getMode() if mode is None: mode = currentMode if mode == 'I=0': return if mode == 'I=0': raise Exception("Can't set holding value for I=0 mode.") if value is not None: if self.holding[mode] == value: return self.holding[mode] = value state = self.lastState[mode] state['holding'] = value if mode == currentMode: self.sigStateChanged.emit(state) self.sigHoldingChanged.emit(self, mode) if mode != currentMode and currentMode != 'I=0': return holding = self.holding[mode] daq = self.getDAQName('command') chan = self.config['commandChannel']['channel'] daqDev = self.dm.getDevice(daq) s = self.extCmdScale(mode) if s == 0: if holding == 0.0: s = 1.0 else: raise Exception('Can not set holding value for multiclamp--external command sensitivity is disabled by commander.') scale = 1.0 / s daqDev.setChannelValue(chan, holding*scale, block=False) def autoPipetteOffset(self): with self.dm.reserveDevices([self]): self.mc.autoPipetteOffset() def autoBridgeBalance(self): with self.dm.reserveDevices([self]): self.mc.autoBridgeBal() def autoCapComp(self): with self.dm.reserveDevices([self]): self.mc.autoFastComp() self.mc.autoSlowComp() def listSignals(self, mode): return self.mc.listSignals(mode) def getMode(self): return self.mc.getMode() def setMode(self, mode): mode = mode.upper() if mode not in ['VC', 'IC', 'I=0']: raise Exception('MultiClamp mode "%s" not recognized.' % mode) for param in self.mode_dependent_params: self._paramCache.pop(param, None) with self.dm.reserveDevices([self, self.config['commandChannel']['device']]): mcMode = self.mc.getMode() if mcMode == mode: return if (mcMode=='IC' and mode=='VC') or (mcMode=='VC' and mode=='IC'): self._switchingToMode = 'I=0' self.mc.setMode('I=0') mcMode = 'I=0' if mcMode=='I=0': self.setHolding(mode) self._switchingToMode = mode self.mc.setMode(mode) time.sleep(0.5) def getDAQName(self, channel): return self.config[channel + 'Channel']['device'] class MultiClampTask(DeviceTask): recordParams = [ 'BridgeBalEnable', 'BridgeBalResist', 'FastCompCap', 'FastCompTau', 'Holding', 'HoldingEnable', 'LeakSubEnable', 'LeakSubResist', 'NeutralizationCap', 'NeutralizationEnable', 'OutputZeroAmplitude', 'OutputZeroEnable', 'PipetteOffset', 'PrimarySignalHPF', 'PrimarySignalLPF', 'RsCompBandwidth', 'RsCompCorrection', 'RsCompEnable', 'SlowCompCap', 'SlowCompTau', 'WholeCellCompCap', 'WholeCellCompEnable', 'WholeCellCompResist', ] def __init__(self, dev, cmd, parentTask): DeviceTask.__init__(self, dev, cmd, parentTask) self.cmd = cmd self.usedChannels = None self.daqTasks = {} if ('mode' not in self.cmd) or (type(self.cmd['mode']) is not str) or (self.cmd['mode'].upper() not in ['IC', 'VC', 'I=0']): raise Exception("Multiclamp command must specify clamp mode (IC, VC, or I=0)") self.cmd['mode'] = self.cmd['mode'].upper() for ch in ['primary', 'secondary']: if ch not in self.cmd: self.cmd[ch] = None
MIT License