code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def mode(self): if self._resources is None: self.__init() if "mode" in self._resources: url = self._url + "/mode" return _mode.Mode(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, ...
returns an object to work with the site mode
def convert_out(self, obj): newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if 'EMIRUUID' not in hdr: hdr['EMIRUUID'] = str(uuid.uuid1()) return newobj
Write EMIRUUID header on reduction
def directories(self): directories_description = [ self.project_name, self.project_name + '/conf', self.project_name + '/static', ] return directories_description
Return the names of directories to be created.
def filter_extant_exports(client, bucket, prefix, days, start, end=None): end = end or datetime.now() try: tag_set = client.get_object_tagging(Bucket=bucket, Key=prefix).get('TagSet', []) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise tag_se...
Filter days where the bucket already has extant export keys.
def find_netmiko_dir(): try: netmiko_base_dir = os.environ["NETMIKO_DIR"] except KeyError: netmiko_base_dir = NETMIKO_BASE_DIR netmiko_base_dir = os.path.expanduser(netmiko_base_dir) if netmiko_base_dir == "/": raise ValueError("/ cannot be netmiko_base_dir") netmiko_full_dir...
Check environment first, then default dir
def parse_trailer(header): pos = 0 names = [] while pos < len(header): name, pos = expect_re(re_token, header, pos) if name: names.append(name) _, pos = accept_ws(header, pos) _, pos = expect_lit(',', header, pos) _, pos = accept_ws(header, pos) return...
Parse the "Trailer" header.
def on_error(self, status_code): logger.error('Twitter returned error code %s', status_code) self.error = status_code return False
Called when a non-200 status code is returned
def FUNCTIONNOPROTO(self, _cursor_type): returns = _cursor_type.get_result() returns = self.parse_cursor_type(returns) attributes = [] obj = typedesc.FunctionType(returns, attributes) self.set_location(obj, None) return obj
Handles function with no prototype.
def draw_segments(image, segments, color=(255, 0, 0), line_width=1): for segment in segments: x, y, w, h = segment cv2.rectangle(image, (x, y), (x + w, y + h), color, line_width)
draws segments on image
def delete(cls, id): client = cls._new_api_client() return client.make_request(cls, 'delete', url_params={'id': id})
Destroy a Union object
def add(image_path, file_name=None): if file_name is not None: dst_path = os.path.join(IMG_DIR, str(Path(file_name).stem + Path(image_path).suffix)) else: dst_path = IMG_DIR if os.path.isfile(image_path): shutil.copy2(image_path, dst_path)
Add an image to the GUI img library.
def stage_http_response2(self, payload): if not self._http_response_version and not payload: return if self.enabled and self.http_detail_level is not None and \ self.httplogger.isEnabledFor(logging.DEBUG): if self._http_response_headers: header_str...
Log complete http response, including response1 and payload
def _maximize(self): if not self.space.is_observations_valid(): return None y_max = self.space.y.max() self.utility_function.gaussian_process.fit(self.space.x, self.space.y) return self.utility_function.max_compute(y_max=y_max, ...
Find argmax of the acquisition function.
def _do_help(self, cmd, args): print(self.doc_string()) print() data_unsorted = [] cls = self.__class__ for name in dir(cls): obj = getattr(cls, name) if iscommand(obj): cmds = [] for cmd in getcommands(obj): ...
Display doc strings of the shell and its commands.
def add_subcommands(parser, commands): "Add commands to a parser" subps = parser.add_subparsers() for cmd, cls in commands: subp = subps.add_parser(cmd, help=cls.__doc__) add_args = getattr(cls, 'add_arguments', None) if add_args: add_args(subp) handler = getattr(...
Add commands to a parser
def _replace_token_range(tokens, start, end, replacement): tokens = tokens[:start] + replacement + tokens[end:] return tokens
For a range indicated from start to end, replace with replacement.
def merge_from(self, other): if other.national_number_pattern is not None: self.national_number_pattern = other.national_number_pattern if other.example_number is not None: self.example_number = other.example_number
Merge information from another PhoneNumberDesc object into this one.
def _count_pixels_on_line(self, y, p): h = line(y, self._effective_thickness(p), 0.0) return h.sum()
Count the number of pixels rendered on this line.
def _compare_match(dict1, dict2): for karg, warg in six.iteritems(dict1): if karg in dict2 and dict2[karg] != warg: return False return True
Compare two dictionaries and return a boolean value if their values match.
def map_overview_header_element(feature, parent): _ = feature, parent header = map_overview_header['string_format'] return header.capitalize()
Retrieve map overview header string from definitions.
async def observer_evaluate(self, message): observer_id = message['observer'] throttle_rate = get_queryobserver_settings()['throttle_rate'] if throttle_rate <= 0: await self._evaluate(observer_id) return cache_key = throttle_cache_key(observer_id) try: ...
Execute observer evaluation on the worker or throttle.
def visit_list(self, node, parent): context = self._get_context(node) newnode = nodes.List( ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent ) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode
visit a List node by returning a fresh instance of it
def atom_by_serialnumber(self): atm_by_snum = {} for atom in self.model.atoms: atm_by_snum[atom.serialNumber] = atom return atm_by_snum
Provides a dictionary mapping serial numbers to their atom objects.
def server_list(endpoint_id): endpoint, server_list = get_endpoint_w_server_list(endpoint_id) if server_list == "S3": server_list = {"s3_url": endpoint["s3_url"]} fields = [("S3 URL", "s3_url")] text_format = FORMAT_TEXT_RECORD else: fields = ( ("ID", "id"), ...
Executor for `globus endpoint server list`
def Get(self, attribute, default=None): if attribute is None: return default elif isinstance(attribute, str): attribute = Attribute.GetAttributeByName(attribute) if "r" not in self.mode and (attribute not in self.new_attributes and attribute not in self.synced_at...
Gets the attribute from this object.
def sample_frame_single_env(self, batch_size, forward_steps=1): if self.current_size < self.buffer_capacity: return np.random.choice(self.current_size - forward_steps, batch_size, replace=False) else: candidate = np.random.choice(self.buffer_capacity, batch_size, replace=False) ...
Return an in index of a random set of frames from a buffer, that have enough history and future
def DosDateTimeToTimeTuple(dosDateTime): dos_date = dosDateTime >> 16 dos_time = dosDateTime & 0xffff day = dos_date & 0x1f month = (dos_date >> 5) & 0xf year = 1980 + (dos_date >> 9) second = 2 * (dos_time & 0x1f) minute = (dos_time >> 5) & 0x3f hour = dos_time >> 11 return time.loc...
Convert an MS-DOS format date time to a Python time tuple.
def batch_contains_deleted(self): "Check if current batch contains already deleted images." if not self._duplicates: return False imgs = [self._all_images[:self._batch_size][0][1], self._all_images[:self._batch_size][1][1]] return any(img in self._deleted_fns for img in imgs)
Check if current batch contains already deleted images.
def _parent_foreign_key_mappings(cls): parent_rel = cls.__mapper__.relationships.get(cls.export_parent) if parent_rel: return {l.name: r.name for (l, r) in parent_rel.local_remote_pairs} return {}
Get a mapping of foreign name to the local name of foreign keys
def _cancel_orphan_orders(self, orderId): orders = self.ibConn.orders for order in orders: order = orders[order] if order['parentId'] != orderId: self.ibConn.cancelOrder(order['id'])
cancel child orders when parent is gone
def authenticate(self, transport, account_name, password=None): Authenticator.authenticate(self, transport, account_name, password) if password == None: return self.pre_auth(transport, account_name) else: return self.auth(transport, account_name, password)
Authenticates account using soap method.
def _get_products(self): products_request = self.account_products() if products_request['error']: raise Exception(products_request['error']) product_ids = [] for product in products_request["json"]["entries"]: product_ids.append(product['productId']) self....
a method to retrieve account product details at initialization
def get(key, profile=None): if not profile: return False redis_kwargs = profile.copy() redis_kwargs.pop('driver') redis_conn = redis.StrictRedis(**redis_kwargs) return redis_conn.get(key)
Get a value from the Redis SDB.
def _convert_key_to_str(key): return salt.utils.data.encode(key) \ if six.PY2 and isinstance(key, unicode) \ else key
Stolen completely from boto.providers
def _no_op(name, **kwargs): return dict(name=name, result=True, changes={}, comment='')
No-op state to support state config via the stateconf renderer.
def bootstrap(self, config): pg_hba = config.get('pg_hba', []) method = config.get('method') or 'initdb' self._running_custom_bootstrap = method != 'initdb' and method in config and 'command' in config[method] if self._running_custom_bootstrap: do_initialize = self._custom_bo...
Initialize a new node from scratch and start it.
def _construct_form(self, i, **kwargs): if not settings.HIDE_LANGUAGE: self._construct_available_languages() form = super(TranslationFormSet, self)._construct_form(i, **kwargs) if settings.HIDE_LANGUAGE: form.instance.language_code = settings.DEFAULT_LANGUAGE else...
Construct the form, overriding the initial value for `language_code`.
def __isValidFilename(self, filename): if filename and isinstance(filename, string_types): if re.match(r'^[\w\d\_\-\.]+$', filename, re.I): if self.__isValidTGZ(filename) or self.__isValidZIP(filename): return True return False
Determine whether filename is valid
def _get_cache(self): if not self._cache: self._cache = get_cache(self.app) return self._cache
Return the cache to use for thundering herd protection, etc.
def remove_non_magic_cols(self): for table_name in self.tables: table = self.tables[table_name] table.remove_non_magic_cols_from_table()
Remove all non-MagIC columns from all tables.
def AddLabels(self, labels_names, owner=None): if owner is None and not self.token: raise ValueError("Can't set label: No owner specified and " "no access token available.") if isinstance(labels_names, string_types): raise ValueError("Label list can't be string.") owner = ...
Add labels to the AFF4Object.
def _default_capacity(self, value): if value is not None: return value if self.default_return_capacity or self.rate_limiters: return INDEXES return NONE
Get the value for ReturnConsumedCapacity from provided value
def run_to_states(self): self.execution_engine_lock.acquire() return_value = self._run_to_states self.execution_engine_lock.release() return return_value
Property for the _run_to_states field
def main(host='localhost', port=8086): now = datetime.datetime.today() points = [] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 point = { "measurement": 'foobar', "time": int(now.strftime('%s')) + angle, "fields": { ...
Define function to generate the sin wave.
def list_contributors(self, project_id=None, language_code=None): data = self._run( url_path="contributors/list", id=project_id, language=language_code ) return data['result'].get('contributors', [])
Returns the list of contributors
def parse_list(self): try: return List([self.parse() for _ in self.collect_tokens_until('CLOSE_BRACKET')]) except IncompatibleItemType as exc: raise self.error(f'Item {str(exc.item)!r} is not a ' f'{exc.subtype.__name__} tag')...
Parse a list from the token stream.
def parse_metrics(self, f): headers = None for l in f['f'].splitlines(): s = l.strip().split("\t") if headers is None: headers = s else: s_name = s[ headers.index('Sample') ] data = dict() for idx, h in e...
Parse the metrics.tsv file from RNA-SeQC
def do_verify(marfile, keyfiles=None): try: with open(marfile, 'rb') as f: with MarReader(f) as m: errors = m.get_errors() if errors: print("File is not well formed: {}".format(errors)) sys.exit(1) if keyfile...
Verify the MAR file.
def environment_session_entity_type_path(cls, project, environment, user, session, entity_type): return google.api_core.path_template.expand( 'projects/{project}/agent/environments/{environment}/users/{user}/sessions/{session}/entityTypes/{entity_type}', ...
Return a fully-qualified environment_session_entity_type string.
def jflatten(j): nobs, nf, nargs = j.shape nrows, ncols = nf * nobs, nargs * nobs jflat = np.zeros((nrows, ncols)) for n in xrange(nobs): r, c = n * nf, n * nargs jflat[r:(r + nf), c:(c + nargs)] = j[n] return jflat
Flatten 3_D Jacobian into 2-D.
def float16(val): frac = val & 0x03ff exp = (val >> 10) & 0x1F sign = val >> 15 if exp: value = 2 ** (exp - 16) * (1 + float(frac) / 2**10) else: value = float(frac) / 2**9 if sign: value *= -1 return value
Convert a 16-bit floating point value to a standard Python float.
def getTimes(dataTasks): global begin_time start_time, end_time = float('inf'), 0 for fichier, vals in dataTask.items(): try: if hasattr(vals, 'values'): tmp_start_time = min([a['start_time'] for a in vals.values()])[0] if tmp_start_time < start_time: ...
Get the start time and the end time of data in milliseconds
def zDDEClose(self): if _PyZDDE.server and not _PyZDDE.liveCh: _PyZDDE.server.Shutdown(self.conversation) _PyZDDE.server = 0 elif _PyZDDE.server and self.connection and _PyZDDE.liveCh == 1: _PyZDDE.server.Shutdown(self.conversation) self.connection = False...
Close the DDE link with Zemax server
def open(safe_file): if os.path.isdir(safe_file) or os.path.isfile(safe_file): return SentinelDataSet(safe_file) else: raise IOError("file not found: %s" % safe_file)
Return a SentinelDataSet object.
def getLocalDateAndTime(date, time, *args, **kwargs): localDt = getLocalDatetime(date, time, *args, **kwargs) if time is not None: return (localDt.date(), localDt.timetz()) else: return (localDt.date(), None)
Get the date and time in the local timezone from date and optionally time
def run(self): input = self._consume() put_item = self._que_out.put try: if input is None: res = self._callable(*self._args, **self._kwargs) else: res = self._callable(input, *self._args, **self._kwargs) if res != None: ...
Execute the task on all the input and send the needed number of EXIT at the end
def swap(self, c2): inv = False c1 = self if c1.order > c2.order: ct = c1 c1 = c2 c2 = ct inv = True return inv, c1, c2
put the order of currencies as market standard
def update(self, instance): assert isinstance(instance, UnitOfWork) if instance.db_id: query = {'_id': ObjectId(instance.db_id)} else: query = {unit_of_work.PROCESS_NAME: instance.process_name, unit_of_work.TIMEPERIOD: instance.timeperiod, ...
method finds unit_of_work record and change its status
def parse_table_name(self, table): if "." in table: schema, table = table.split(".") else: schema = None return (schema, table)
Parse schema qualified table name
def fetch(cls, client, _id, symbol): url = "https://api.robinhood.com/options/chains/" params = { "equity_instrument_ids": _id, "state": "active", "tradability": "tradable" } data = client.get(url, params=params) def filter_func(x): ...
fetch option chain for instrument
def result_consumed(self, task_id): logger.debug('Sending result consumed message.') data = { 'task_ids': task_id, } return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
Report the result as successfully consumed.
def previous_friday(dt): if dt.weekday() == 5: return dt - timedelta(1) elif dt.weekday() == 6: return dt - timedelta(2) return dt
If holiday falls on Saturday or Sunday, use previous Friday instead.
def _density_seaborn_(self, label=None, style=None, opts=None): try: fig = sns.kdeplot(self.df[self.x], self.df[self.y]) fig = self._set_with_height(fig, opts) return fig except Exception as e: self.err(e, self.density_, "Can not draw ...
Returns a Seaborn density chart
def objc_type_encoding(self): if not hasattr(self, '_objc_type_encoding'): self._objc_type_encoding = \ conf.lib.clang_getDeclObjCTypeEncoding(self) return self._objc_type_encoding
Return the Objective-C type encoding as a str.
def readTableFromDelimited(f, separator="\t"): rowNames = [] columnNames = [] matrix = [] first = True for line in f.readlines(): line = line.rstrip() if len(line) == 0: continue row = line.split(separator) if first: columnNames = row[1:] ...
Reads a table object from given plain delimited file.
def setup_runner(self): runner = ApplicationRunner( url=self.config['transport_host'], realm=u'realm1', extra={ 'config': self.config, 'handlers': self.handlers, } ) return runner
Setup instance of runner var
def send_to_address(recipient_address, amount, private_key, blockchain_client=BlockchainInfoClient(), fee=STANDARD_FEE, change_address=None): signed_tx = make_send_to_address_tx(recipient_address, amount, private_key, blockchain_client, fee=fee, change_address=change_address) res...
Builds, signs, and dispatches a "send to address" transaction.
def event_return(events): for event in events: ret = event.get('data', False) if ret: returner(ret)
Return event data via SMTP
def create_client(access_token): url = 'http://keycloak:8080/auth/admin/realms/dci-test/clients' r = requests.post(url, data=json.dumps(client_data), headers=get_auth_headers(access_token)) if r.status_code in (201, 409): print('Keycloak client dci created...
Create the dci client in the master realm.
def add_subparser(subparsers): parser = subparsers.add_parser("version", help="Export versions of used software to stdout or a file ") parser.add_argument("--workdir", help="Directory export programs to in workdir/provenance/programs.txt", default=None)
Add command line option for exporting version information.
def save_load(jid, clear_load, minions=None): for returner_ in __opts__[CONFIG_KEY]: _mminion().returners['{0}.save_load'.format(returner_)](jid, clear_load)
Write load to all returners in multi_returner
def tilequeue_rawr_enqueue(cfg, args): from tilequeue.stats import RawrTileEnqueueStatsHandler from tilequeue.rawr import make_rawr_enqueuer_from_cfg msg_marshall_yaml = cfg.yml.get('message-marshall') assert msg_marshall_yaml, 'Missing message-marshall config' msg_marshaller = make_message_marshall...
command to take tile expiry path and enqueue for rawr tile generation
def add(self, piece_uid, index): if self.occupancy[index]: raise OccupiedPosition if self.exposed_territory[index]: raise VulnerablePosition klass = PIECE_CLASSES[piece_uid] piece = klass(self, index) territory = piece.territory for i in self.index...
Add a piece to the board at the provided linear position.
def shell(no_ipython): banner = "Interactive Werkzeug Shell" namespace = make_shell() if not no_ipython: try: try: from IPython.frontend.terminal.embed import InteractiveShellEmbed sh = InteractiveShellEmbed.instance(banner1=banner) except Impo...
Start a new interactive python session.
def artifact_cache_dir(self): return (self.get_options().artifact_cache_dir or os.path.join(self.scratch_dir, 'artifacts'))
Note that this is unrelated to the general pants artifact cache.
def extended_blank_lines(logical_line, blank_lines, blank_before, indent_level, previous_logical): if previous_logical.startswith('def '): if blank_lines and pycodestyle.DOCSTRING_REGEX.match(logical_line): ...
Check for missing blank lines after class declaration.
def load(self, **kwargs): coordsys = kwargs.get('coordsys', 'CEL') extdir = kwargs.get('extdir', self.extdir) srcname = kwargs.get('srcname', None) self.clear() self.load_diffuse_srcs() for c in self.config['catalogs']: if isinstance(c, catalog.Catalog): ...
Load both point source and diffuse components.
def widen(self): t, h = self.time, self.half_duration h *= self.scaling_coeff_x self.set_interval((t - h, t + h))
Increase the interval size.
def start(ctx, **kwargs): update_context(ctx, kwargs) daemon = mk_daemon(ctx) if ctx.debug or kwargs['no_fork']: daemon.run() else: daemon.start()
start a vaping process
def update_params(params, updates): params = params.copy() if isinstance(params, dict) else dict() params.update(updates) return params
Merges updates into params
def to_prj(self, filename): with open(filename, "w") as fp: fp.write(self.prj)
Saves prj WKT to given file.
def aggregate_key(self, aggregate_key): aggregation = self.data_dict[aggregate_key] data_dict_keys = {y for x in aggregation for y in x.keys()} for key in data_dict_keys: stacked = np.stack([d[key] for d in aggregation], axis=0) self.data_dict[key] = np.mean(stacked, axis...
Aggregate values from key and put them into the top-level dictionary
def create(cls, api, run_id=None, project=None, username=None): run_id = run_id or util.generate_id() project = project or api.settings.get("project") mutation = gql( ) variables = {'entity': username, 'project': project, 'name': run_id} res = api.client.exec...
Create a run for the given project
def apply(funcs, stack): return reduce(lambda x, y: y(x), funcs, stack)
Apply functions to the stack, passing the resulting stack to next state.
def filter_by_domain(self, domain): query = self._copy() query.domain = domain return query
Apply the given domain to a copy of this query
def flag_values_dict(self): return {name: flag.value for name, flag in six.iteritems(self._flags())}
Returns a dictionary that maps flag names to flag values.
def _execution(self): did_start_executing = False if self.state == STATE_DEFAULT: did_start_executing = True self.state = STATE_EXECUTING def close(): if did_start_executing and self.state == STATE_EXECUTING: self.state = STATE_DEFAULT ...
Context manager for executing some JavaScript inside a template.
def _spec(self, name): "Return the named spec." for s in self._framespec: if s.name == name: return s raise ValueError("Unknown spec: " + name)
Return the named spec.
def create_secret(*args, **kwargs): to_sign = '-!'.join(args) + '$$'.join(kwargs.values()) key = settings.SECRET_FOR_SIGNS hashed = hmac.new(key, to_sign, sha1) return re.sub(r'[\W_]+', '', binascii.b2a_base64(hashed.digest()))
Return a secure key generated from the user and the object. As we load elements fron any class from user imput, this prevent the user to specify arbitrary class
def unfreeze_extensions(self): output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') if not os.path.isfile(output_path): raise ExternalError("There is no frozen extension list") os.remove(output_path) ComponentRegistry._frozen_extensions = None
Remove a previously frozen list of extensions.
def scale_cb(self, setting, value): zoomlevel = self.zoom.calc_level(value) self.t_.set(zoomlevel=zoomlevel) self.redraw(whence=0)
Handle callback related to image scaling.
def _keyring_equivalent(keyring_one, keyring_two): def keyring_extract_key(file_path): with open(file_path) as f: for line in f: content = line.strip() if len(content) == 0: continue split_line = content.split('=') ...
Check two keyrings are identical
def objectsFromPEM(pemdata): certificates = [] keys = [] blobs = [b""] for line in pemdata.split(b"\n"): if line.startswith(b'-----BEGIN'): if b'CERTIFICATE' in line: blobs = certificates else: blobs = keys blobs.append(b'') ...
Load some objects from a PEM.
def dweet_for(thing_name, payload, key=None, session=None): if key is not None: params = {'key': key} else: params = None return _send_dweet(payload, '/dweet/for/{0}'.format(thing_name), params=params, session=session)
Send a dweet to dweet.io for a thing with a known name
def partition(molList, options): status_field = options.status_field active_label = options.active_label decoy_label = options.decoy_label activeList = [] decoyList = [] for mol in molList: if mol.GetProp(status_field) == active_label: activeList.append(mol) elif mol....
Partition molList into activeList and decoyList
def cursor(self): if self._cursor < 0: self.cursor = 0 if self._cursor > len(self): self.cursor = len(self) return self._cursor
The position of the cursor in the text.
def load_json_file_contents(path: str) -> str: assert isinstance(path, str) content = None file_path = os.path.abspath(path) content = fileutils.read_text_from_file(file_path) json_object = json.loads(content) content = json.dumps(json_object, sort_keys=True, indent=4) return content
Loads contents from a json file
def _no_primary(max_staleness, selection): smax = selection.secondary_with_max_last_write_date() if not smax: return selection.with_server_descriptions([]) sds = [] for s in selection.server_descriptions: if s.server_type == SERVER_TYPE.RSSecondary: staleness = (smax.last_wri...
Apply max_staleness, in seconds, to a Selection with no known primary.
def experiment_list(args): experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() if not experiment_dict: print('There is no experiment running...') exit(1) update_experiment() experiment_id_list = [] if args.all and args.all == 'all': ...
get the information of all experiments
def _is_last_child(self, tagname, attributes=None): children = self.cur_node.getchildren() if children: result = self._is_node(tagname, attributes, node=children[-1]) return result return False
Check if last child of cur_node is tagname with attributes