code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def generate_stickers(self, default_width, do_create_numbers=True): <NEW_LINE> <INDENT> def uvedge_priority(uvedge): <NEW_LINE> <INDENT> face = uvedge.uvface.face <NEW_LINE> return face.calc_area() / face.calc_perimeter() <NEW_LINE> <DEDENT> def add_sticker(uvedge, index, target_uvedge): <NEW_LINE> <INDENT> uvedge.sticker = Sticker(uvedge, default_width, index, target_uvedge) <NEW_LINE> uvedge.uvface.island.add_marker(uvedge.sticker) <NEW_LINE> <DEDENT> def is_index_obvious(uvedge, target): <NEW_LINE> <INDENT> if uvedge in (target.neighbor_left, target.neighbor_right): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if uvedge.neighbor_left.loop.edge is target.neighbor_right.loop.edge and uvedge.neighbor_right.loop.edge is target.neighbor_left.loop.edge: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> for edge in self.edges.values(): <NEW_LINE> <INDENT> index = None <NEW_LINE> if edge.is_main_cut and len(edge.uvedges) >= 2 and edge.vector.length_squared > 0: <NEW_LINE> <INDENT> target, source = edge.uvedges[:2] <NEW_LINE> if uvedge_priority(target) < uvedge_priority(source): <NEW_LINE> <INDENT> target, source = source, target <NEW_LINE> <DEDENT> target_island = target.uvface.island <NEW_LINE> if do_create_numbers: <NEW_LINE> <INDENT> for uvedge in [source] + edge.uvedges[2:]: <NEW_LINE> <INDENT> if not is_index_obvious(uvedge, target): <NEW_LINE> <INDENT> target_island.sticker_numbering += 1 <NEW_LINE> index = str(target_island.sticker_numbering) <NEW_LINE> if is_upsidedown_wrong(index): <NEW_LINE> <INDENT> index += "." <NEW_LINE> <DEDENT> target_island.add_marker(Arrow(target, default_width, index)) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> add_sticker(source, index, target) <NEW_LINE> <DEDENT> elif len(edge.uvedges) > 2: <NEW_LINE> <INDENT> target = edge.uvedges[0] <NEW_LINE> <DEDENT> if len(edge.uvedges) > 2: <NEW_LINE> <INDENT> for source in edge.uvedges[2:]: <NEW_LINE> <INDENT> add_sticker(source, index, target)
Add sticker faces where they are needed.
625941b83346ee7daa2b2bca
def get_number_of_unique_terms_for_accounts(filter_query: ES_Q, field: str, is_nested: bool = True) -> int: <NEW_LINE> <INDENT> search = AccountSearch().filter(filter_query) <NEW_LINE> cardinality_aggregation = A("cardinality", field=field, precision_threshold=11000) <NEW_LINE> if is_nested: <NEW_LINE> <INDENT> nested_agg = A("nested", path="financial_accounts_by_award") <NEW_LINE> nested_agg.metric("field_count", cardinality_aggregation) <NEW_LINE> search.aggs.metric("financial_account_agg", nested_agg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> search.aggs.metric("financial_account_agg", cardinality_aggregation) <NEW_LINE> <DEDENT> response = search.handle_execute() <NEW_LINE> response_dict = response.aggs.to_dict() <NEW_LINE> return response_dict.get("financial_account_agg", {}).get("field_count", {"value": 0})["value"]
Returns the count for a specific filter_query. NOTE: Counts below the precision_threshold are expected to be close to accurate (per the Elasticsearch documentation). Since aggregations do not support more than 10k buckets this value is hard coded to 11k to ensure that endpoints using Elasticsearch do not cross the 10k threshold. Elasticsearch endpoints should be implemented with a safeguard in case this count is above 10k.
625941b8ab23a570cc24ffe0
def test_varios_contratos(): <NEW_LINE> <INDENT> contratos = [ Contrato(125, 'DEV', 'H'), Contrato(15000, 'DEV', 'E', horas_estimadas=100), Contrato(100, 'CAFE', 'H') ] <NEW_LINE> custos_medios = custo_medio_por_tipo_servico(contratos) <NEW_LINE> assert custos_medios == { 'DEV': (275, 2), 'CAFE': (100, 1), }
Testa o cenário onde vários contratos são informados
625941b832920d7e50b2802d
def print_product(num1, num2): <NEW_LINE> <INDENT> return print(num1 * num2)
>>> print_product(3, 5) 15
625941b8c432627299f04aa4
def fit(self, durations, event_observed=None, timeline=None, entry=None, label='KM-estimate', alpha=None, left_censorship=False, ci_labels=None): <NEW_LINE> <INDENT> estimate_name = 'survival_function_' if not left_censorship else 'cumulative_density_' <NEW_LINE> v = preprocess_inputs(durations, event_observed, timeline, entry) <NEW_LINE> self.durations, self.event_observed, self.timeline, self.entry, self.event_table = v <NEW_LINE> log_survival_function, cumulative_sq_ = _additive_estimate(self.event_table, self.timeline, self._additive_f, self._additive_var, left_censorship) <NEW_LINE> if entry is not None: <NEW_LINE> <INDENT> n = self.event_table.shape[0] <NEW_LINE> net_population = (self.event_table['entrance'] - self.event_table['removed']).cumsum() <NEW_LINE> if net_population.iloc[:int(n / 2)].min() == 0: <NEW_LINE> <INDENT> ix = net_population.iloc[:int(n / 2)].argmin() <NEW_LINE> raise StatError("""There are too few early truncation times and too many events. S(t)==0 for all t>%.1f. Recommend BFH estimator.""" % ix) <NEW_LINE> <DEDENT> <DEDENT> setattr(self, estimate_name, pd.DataFrame(np.exp(log_survival_function), columns=[label])) <NEW_LINE> self.__estimate = getattr(self, estimate_name) <NEW_LINE> self.confidence_interval_ = self._bounds(cumulative_sq_[:, None], alpha if alpha else self.alpha, ci_labels) <NEW_LINE> self.median_ = median_survival_times(self.__estimate) <NEW_LINE> self.predict = _predict(self, estimate_name, label) <NEW_LINE> self.subtract = _subtract(self, estimate_name) <NEW_LINE> self.divide = _divide(self, estimate_name) <NEW_LINE> self.plot = plot_estimate(self, estimate_name) <NEW_LINE> setattr(self, "plot_" + estimate_name, self.plot) <NEW_LINE> return self
Parameters: duration: an array, or pd.Series, of length n -- duration subject was observed for timeline: return the best estimate at the values in timelines (postively increasing) event_observed: an array, or pd.Series, of length n -- True if the the death was observed, False if the event was lost (right-censored). Defaults all True if event_observed==None entry: an array, or pd.Series, of length n -- relative time when a subject entered the study. This is useful for left-truncated observations, i.e the birth event was not observed. If None, defaults to all 0 (all birth events observed.) label: a string to name the column of the estimate. alpha: the alpha value in the confidence intervals. Overrides the initializing alpha for this call to fit only. left_censorship: True if durations and event_observed refer to left censorship events. Default False ci_labels: add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha> Returns: self, with new properties like 'survival_function_'.
625941b8ff9c53063f47c05e
def getAxes(self, *args, **kwargs): <NEW_LINE> <INDENT> graph_ancestor = utils.searchFirstOccurrence(self, "graph", -1) <NEW_LINE> return graph_ancestor.getAxes(*args, **kwargs)
Needed to allow children drawing
625941b8b830903b967e9777
def obj_create(self, bundle, request=None, **kwargs): <NEW_LINE> <INDENT> return super(CloudletResource, self).obj_create(bundle, request, **kwargs)
called for POST
625941b8377c676e9127200b
def get_data_consents(self, instance): <NEW_LINE> <INDENT> data_consents = get_or_create_data_consent_users(instance) <NEW_LINE> return DataConsentUserSerializer(instance=data_consents, many=True).data
Get the DataConsentUser objects associated with the basket via coupon and product
625941b8956e5f7376d70cda
def _cleanup_cat(self, cats): <NEW_LINE> <INDENT> for cat in CONFIG.options("Categories"): <NEW_LINE> <INDENT> if cat not in cats: <NEW_LINE> <INDENT> CONFIG.remove_option("Categories", cat) <NEW_LINE> <DEDENT> <DEDENT> default = CONFIG.get("General", "default_category") <NEW_LINE> if not CONFIG.has_option("Categories", default): <NEW_LINE> <INDENT> CONFIG.set("General", "default_category", CONFIG.options("Categories")[0]) <NEW_LINE> cst.save_config()
Remove categories not in cats
625941b8a4f1c619b28afea2
def test_search_alias_exceptions(self): <NEW_LINE> <INDENT> self.assertRaises(AttributeError, getattr, Article.objects, 'bsearch_no_such_alias') <NEW_LINE> self.assertRaises(NotImplementedError, Article.objects.bsearch_invalidalias) <NEW_LINE> self.assertRaises(ValueError, getattr, Article.objects.search.bsearch_title('title query').bsearch_titlefilter('title filter'), 'bsearch_noupdatedmdlonly')
Tests that invalid aliases raise exceptions.
625941b87c178a314d6ef2b9
def _nselected_components(self): <NEW_LINE> <INDENT> if self._pca is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if self.ncomponents == 0: <NEW_LINE> <INDENT> max_comp = len(self._variance_ratio) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max_comp = self.ncomponents <NEW_LINE> <DEDENT> var_max = self._cumulative[max_comp - 1] <NEW_LINE> if var_max != numpy.floor(self.variance_covered / 100.0): <NEW_LINE> <INDENT> cut = max_comp <NEW_LINE> self.variance_covered = var_max * 100 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cut = numpy.searchsorted( self._cumulative, self.variance_covered / 100.0 ) <NEW_LINE> self.ncomponents = cut <NEW_LINE> <DEDENT> return cut
Return the number of selected components.
625941b850485f2cf553cbf9
def _get_withholdable_amounts( self, withholding_amount_type, withholding_advances): <NEW_LINE> <INDENT> self.ensure_one() <NEW_LINE> if self.state == 'posted': <NEW_LINE> <INDENT> untaxed_field = 'matched_amount_untaxed' <NEW_LINE> total_field = 'matched_amount' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> untaxed_field = 'selected_debt_untaxed' <NEW_LINE> total_field = 'selected_debt' <NEW_LINE> <DEDENT> if withholding_amount_type == 'untaxed_amount': <NEW_LINE> <INDENT> withholdable_invoiced_amount = self[untaxed_field] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> withholdable_invoiced_amount = self[total_field] <NEW_LINE> <DEDENT> withholdable_advanced_amount = 0.0 <NEW_LINE> if self.withholdable_advanced_amount < 0.0 and self.to_pay_move_line_ids and self.state != 'posted': <NEW_LINE> <INDENT> withholdable_advanced_amount = 0.0 <NEW_LINE> sign = self.partner_type == 'supplier' and -1.0 or 1.0 <NEW_LINE> sorted_to_pay_lines = sorted( self.to_pay_move_line_ids, key=lambda a: a.date_maturity or a.date) <NEW_LINE> partial_line = sorted_to_pay_lines[-1] <NEW_LINE> if sign * partial_line.amount_residual < sign * self.withholdable_advanced_amount: <NEW_LINE> <INDENT> raise ValidationError(_( 'Seleccionó deuda por %s pero aparentente desea pagar ' ' %s. En la deuda seleccionada hay algunos comprobantes de' ' mas que no van a poder ser pagados (%s). Deberá quitar ' ' dichos comprobantes de la deuda seleccionada para poder ' 'hacer el correcto cálculo de las retenciones.' % ( self.selected_debt, self.to_pay_amount, partial_line.move_id.display_name, ))) <NEW_LINE> <DEDENT> if withholding_amount_type == 'untaxed_amount' and partial_line.invoice_id: <NEW_LINE> <INDENT> invoice_factor = partial_line.invoice_id._get_tax_factor() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> invoice_factor = 1.0 <NEW_LINE> <DEDENT> withholdable_invoiced_amount -= ( sign * self.unreconciled_amount * invoice_factor) <NEW_LINE> <DEDENT> elif withholding_advances: <NEW_LINE> <INDENT> if self.state == 'posted': <NEW_LINE> <INDENT> if self.unreconciled_amount and self.withholdable_advanced_amount: <NEW_LINE> <INDENT> withholdable_advanced_amount = self.unmatched_amount * ( self.withholdable_advanced_amount / self.unreconciled_amount) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> withholdable_advanced_amount = self.unmatched_amount <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> withholdable_advanced_amount = self.withholdable_advanced_amount <NEW_LINE> <DEDENT> <DEDENT> return (withholdable_advanced_amount, withholdable_invoiced_amount)
Method to help on getting withholding amounts from account.tax
625941b845492302aab5e121
def test_channels(self): <NEW_LINE> <INDENT> self.assertTrue(dionaea_capture.DionaeaCaptures.channels)
Test that the channel variable exists.
625941b8c432627299f04aa5
def extract_UK(): <NEW_LINE> <INDENT> f = open('jawiki-country.json') <NEW_LINE> lines = f.readlines() <NEW_LINE> f.close() <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> data_json = json.loads(line) <NEW_LINE> if data_json['title'] == 'イタリア': <NEW_LINE> <INDENT> return data_json['text'] <NEW_LINE> <DEDENT> <DEDENT> raise ValueError('イギリスの記事が見つからない')
イギリスに関する記事本文を取得 戻り値: イギリスの記事本文
625941b84527f215b584c2bc
def commonChars(self, A): <NEW_LINE> <INDENT> self.li=[] <NEW_LINE> for ch in range(97,123): <NEW_LINE> <INDENT> self.li.append(chr(ch)) <NEW_LINE> <DEDENT> self.linum=[101]*26 <NEW_LINE> for st in A: <NEW_LINE> <INDENT> self.extract(st) <NEW_LINE> <DEDENT> out=[] <NEW_LINE> for i in range(26): <NEW_LINE> <INDENT> if self.linum!=101: <NEW_LINE> <INDENT> out+=self.li[i]*self.linum[i] <NEW_LINE> <DEDENT> <DEDENT> return out
:type A: List[str] :rtype: List[str]
625941b8dc8b845886cb5395
def merge_with_database(base, database, app_name, customer, environment): <NEW_LINE> <INDENT> if not isinstance(base, dict) or not isinstance(database, dict): <NEW_LINE> <INDENT> raise AnsibleFilterError("input database is empty") <NEW_LINE> <DEDENT> if not isinstance(environment, dict): <NEW_LINE> <INDENT> raise AnsibleFilterError("input environment must be a dictionnary") <NEW_LINE> <DEDENT> if "engine" not in database: <NEW_LINE> <INDENT> raise AnsibleFilterError("input database should define an 'engine' key") <NEW_LINE> <DEDENT> if "release" not in database: <NEW_LINE> <INDENT> raise AnsibleFilterError("input database should define a 'release' key") <NEW_LINE> <DEDENT> if "code" not in environment: <NEW_LINE> <INDENT> raise AnsibleFilterError("environment dict should define a 'code' key") <NEW_LINE> <DEDENT> if "name" not in environment: <NEW_LINE> <INDENT> raise AnsibleFilterError("environment dict should define a 'name' key") <NEW_LINE> <DEDENT> result = deepcopy(base) <NEW_LINE> database_name = "_".join([environment.get("code"), customer, app_name]) <NEW_LINE> new_database = { "application": app_name, "password": random_password(), "name": database_name, "user": database_name, } <NEW_LINE> engine = database.get("engine") <NEW_LINE> if engine not in result: <NEW_LINE> <INDENT> result[database.get("engine")] = [ {"release": database.get("release"), "databases": [new_database]} ] <NEW_LINE> return result <NEW_LINE> <DEDENT> for defined_engine in result[engine]: <NEW_LINE> <INDENT> if defined_engine.get("release", None) == database.get("release"): <NEW_LINE> <INDENT> for defined_database in defined_engine.get("databases"): <NEW_LINE> <INDENT> if defined_database.get("application") == app_name: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> defined_engine.get("databases").append(new_database) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result[engine].append( {"release": database.get("release"), "databases": [new_database]} ) <NEW_LINE> <DEDENT> <DEDENT> return result
Merge a database information with a database structure already existent. If database already exist the new one is ignored.
625941b8d99f1b3c44c673f7
def test_update_node_set_dynamic_driver_and_interfaces(self): <NEW_LINE> <INDENT> self._set_config_interface_options_hardware_type() <NEW_LINE> for iface in drivers_base.ALL_INTERFACES: <NEW_LINE> <INDENT> iface_name = '%s_interface' % iface <NEW_LINE> node_kwargs = {'uuid': uuidutils.generate_uuid()} <NEW_LINE> node = obj_utils.create_test_node(self.context, driver='fake', **node_kwargs) <NEW_LINE> i_name = self._get_valid_default_interface_name(iface) <NEW_LINE> setattr(node, iface_name, i_name) <NEW_LINE> node.driver = 'fake-hardware' <NEW_LINE> driver_factory.check_and_update_node_interfaces(node) <NEW_LINE> self.assertEqual(i_name, getattr(node, iface_name))
Update driver to dynamic and interfaces specified
625941b8d99f1b3c44c673f8
def select_rays(self, idxs): <NEW_LINE> <INDENT> self._idxs = idxs <NEW_LINE> self._backside = N.nonzero(self._backside[idxs])[0] <NEW_LINE> v = self._working_bundle.get_vertices()[:,idxs] <NEW_LINE> d = self._working_bundle.get_directions()[:,idxs] <NEW_LINE> p = self._params[idxs] <NEW_LINE> del self._params <NEW_LINE> self._global = v + p[None,:]*d
Inform the geometry manager that only the given rays are to be used, so that internal data size is kept small. Arguments: idxs - an index array stating which rays of the working bundle are active.
625941b8435de62698dfdab5
def __init__(self, recipe_id): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.recipe_id = recipe_id <NEW_LINE> self.detectable = True <NEW_LINE> self.detected = True <NEW_LINE> self.dead = False
Initialize a doodad. :param recipe_id: ID of the recipe from which this doodad is/was created.
625941b8dc8b845886cb5396
def _update_rule_position(self, rule_elt): <NEW_LINE> <INDENT> current_position = self._get_rule_position() <NEW_LINE> expected_position = self._get_expected_rule_position() <NEW_LINE> if current_position == expected_position: <NEW_LINE> <INDENT> self.position_changed = False <NEW_LINE> return False <NEW_LINE> <DEDENT> self.root_elt.remove(rule_elt) <NEW_LINE> self._insert(rule_elt) <NEW_LINE> self.position_changed = True <NEW_LINE> return True
move rule in xml if required
625941b830c21e258bdfa2fe
def model_admin(traverse_id: str) -> type: <NEW_LINE> <INDENT> assert all(c in ALLOWED_TRAVERSE_ID_CHARACTERS for c in traverse_id), "traverse_id may only contain lowercase letters, digits and a dash: {}".format(traverse_id) <NEW_LINE> def _inner(cls): <NEW_LINE> <INDENT> def register(scanner, name, wrapped): <NEW_LINE> <INDENT> config = scanner.config <NEW_LINE> model = getattr(cls, "model", None) <NEW_LINE> assert model, "Class {} must declare model attribute".format(cls) <NEW_LINE> registry = config.registry <NEW_LINE> registry.unregisterAdapter(required=(IRequest,), provided=IModelAdmin) <NEW_LINE> registry.registerAdapter(cls, required=(IRequest,), provided=IModelAdmin, name=traverse_id) <NEW_LINE> registry.model_admin_ids_by_model[model] = traverse_id <NEW_LINE> <DEDENT> venusian.attach(cls, register, category='websauna') <NEW_LINE> return cls <NEW_LINE> <DEDENT> return _inner
Class decorator to mark the class to become part of model admins. ``Configure.scan()`` must be run on this module for the implementation to be picked up. If there is already an existing model admin with same ``model``, then the existing model admin is overwritten. :param traverse_id: Under which URL id this model appears in the admin interface. Allowed to contain lowercase letters, dash and digits. This will be available as ``ModelAdmin.__name__`` instance attribute. :param model_cls: Which model class this admin resource is controlling
625941b85e10d32532c5ed90
def multioutput_fscore(y_true,y_pred,beta=1): <NEW_LINE> <INDENT> score_list = [] <NEW_LINE> if isinstance(y_pred, pd.DataFrame) == True: <NEW_LINE> <INDENT> y_pred = y_pred.values <NEW_LINE> <DEDENT> if isinstance(y_true, pd.DataFrame) == True: <NEW_LINE> <INDENT> y_true = y_true.values <NEW_LINE> <DEDENT> for column in range(0,y_true.shape[1]): <NEW_LINE> <INDENT> score = fbeta_score(y_true[:,column],y_pred[:,column],beta,average='weighted') <NEW_LINE> score_list.append(score) <NEW_LINE> <DEDENT> f1score_numpy = np.asarray(score_list) <NEW_LINE> f1score_numpy = f1score_numpy[f1score_numpy<1] <NEW_LINE> f1score = gmean(f1score_numpy) <NEW_LINE> return f1score
MultiOutput Fscore This is a performance metric of my own creation. It is a sort of geometric mean of the fbeta_score, computed on each label. It is compatible with multi-label and multi-class problems. It features some peculiarities (geometric mean, 100% removal...) to exclude trivial solutions and deliberatly under-estimate a stangd fbeta_score average. The aim is avoiding issues when dealing with multi-class/multi-label imbalanced cases. It can be used as scorer for GridSearchCV: scorer = make_scorer(multioutput_fscore,beta=1) Arguments: y_true -> labels y_prod -> predictions beta -> beta value of fscore metric Output: f1score -> customized fscore
625941b8e8904600ed9f1d8a
def main(): <NEW_LINE> <INDENT> import os <NEW_LINE> nome = str(input("Nome do jogador: ")) <NEW_LINE> gols = int(input("Gols no campeonato: ")) <NEW_LINE> os.system('cls') <NEW_LINE> print(jogador(nome, gols))
[Função principal]
625941b8be7bc26dc91cd466
def log_p_x(self, x, x_embedded, x_embedded_dropped, z, all_embeddings): <NEW_LINE> <INDENT> S = T.cast(z.shape[0] / x.shape[0], 'int32') <NEW_LINE> x_rep = T.tile(x, (S, 1)) <NEW_LINE> x_rep_padding_mask = T.switch(T.lt(x_rep, 0), 0, 1) <NEW_LINE> x_embedded_rep = T.tile(x_embedded, (S, 1, 1)) <NEW_LINE> x_embedded_dropped_rep = T.tile(x_embedded_dropped, (S, 1, 1)) <NEW_LINE> probs = self.get_probs(x_embedded_rep, x_embedded_dropped_rep, z, all_embeddings, mode='true') <NEW_LINE> probs += T.cast(1.e-15, 'float32') <NEW_LINE> log_p_x = T.sum(x_rep_padding_mask * T.log(probs), axis=-1) <NEW_LINE> L = T.sum(x_rep_padding_mask, axis=1) <NEW_LINE> return log_p_x
:param x: N * max(L) tensor :param x_embedded: N * max(L) * E tensor :param z: (S*N) * dim(z) matrix :param all_embeddings: D * E matrix :return log_p_x: (S*N) length vector
625941b885dfad0860c3acba
def run_lines(self, commands_str, **kwargs): <NEW_LINE> <INDENT> output = [] <NEW_LINE> for cmd_line in [line.strip() for line in commands_str.split("\n")]: <NEW_LINE> <INDENT> if cmd_line: <NEW_LINE> <INDENT> output.append(self.run(cmd_line, **kwargs)) <NEW_LINE> <DEDENT> <DEDENT> return ''.join(output)
Run each newline separated line from commands_str. :param commands_str: commands to run, may be separated with newlines. :param kwargs: arguments to pass to the run command. :returns: the output of the command :rtype: str
625941b8b57a9660fec336e1
def populate_interface(self, types=None): <NEW_LINE> <INDENT> for s in self.services: <NEW_LINE> <INDENT> logger.debug("populating '%s.%s (%s)' types...", s.__module__, s.__name__, s.get_service_key()) <NEW_LINE> for method in s.public_methods.values(): <NEW_LINE> <INDENT> if method.in_header is None: <NEW_LINE> <INDENT> method.in_header = s.__in_header__ <NEW_LINE> <DEDENT> if method.out_header is None: <NEW_LINE> <INDENT> method.out_header = s.__out_header__ <NEW_LINE> <DEDENT> if method.aux is None: <NEW_LINE> <INDENT> method.aux = s.__aux__ <NEW_LINE> <DEDENT> if method.aux is not None: <NEW_LINE> <INDENT> method.aux.methods.append(_generate_method_id(s, method)) <NEW_LINE> <DEDENT> if not self.check_method(method): <NEW_LINE> <INDENT> logger.debug("method %s' discarded by check_method", method.class_key) <NEW_LINE> continue <NEW_LINE> <DEDENT> logger.debug(" enumerating classes for method '%s'", method.class_key) <NEW_LINE> for cls in self.add_method(method): <NEW_LINE> <INDENT> self.add_class(cls) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for s in self.services: <NEW_LINE> <INDENT> s.__tns__ = self.get_tns() <NEW_LINE> logger.debug("populating '%s.%s' routes...", s.__module__, s.__name__) <NEW_LINE> for method in s.public_methods.values(): <NEW_LINE> <INDENT> self.process_method(s, method) <NEW_LINE> <DEDENT> <DEDENT> for cls, descriptor in self.member_methods: <NEW_LINE> <INDENT> self.process_method(cls.__orig__ or cls, descriptor) <NEW_LINE> <DEDENT> self.method_descriptor_id_to_key = dict(((id(v[0]), k) for k,v in self.service_method_map.items())) <NEW_LINE> logger.debug("From this point on, you're not supposed to make any " "changes to the class and method structure of the exposed " "services.")
Harvests the information stored in individual classes' _type_info dictionaries. It starts from function definitions and includes only the used objects.
625941b8091ae35668666dc7
def insert_jscheck_details(chname, chdesc): <NEW_LINE> <INDENT> numrows_chname = get_number_jschecks_with_chname(chname) <NEW_LINE> if numrows_chname == 0: <NEW_LINE> <INDENT> q = """INSERT INTO sbmCHECKS (chname,chdesc,cd,md,chefi1,chefi2) VALUES (%s,%s,CURDATE(),CURDATE(),NULL,NULL)""" <NEW_LINE> run_sql(q, (chname, chdesc)) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1
Insert details of a new JavaScript Check into the WebSubmit database IF there are not already Checks with the same Check-name (chname). @param chname: unique check id/name (chname) @param chdesc: Check description (the JavaScript code body that is the Check) (chdesc) @return: 0 (ZERO) if insert is performed; 1 (ONE) if insert not performed due to rows existing for given Check name/id.
625941b8099cdd3c635f0abe
def twoCitySchedCost(self, costs): <NEW_LINE> <INDENT> N = len(costs) // 2 <NEW_LINE> costs.sort(key = lambda pair : pair[0]-pair[1]) <NEW_LINE> res = 0 <NEW_LINE> for pair in costs: <NEW_LINE> <INDENT> if N: <NEW_LINE> <INDENT> res += pair[0] <NEW_LINE> N -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res += pair[1] <NEW_LINE> <DEDENT> <DEDENT> return res
:type costs: List[List[int]] :rtype: int
625941b87cff6e4e811177e7
def find_all_compounds(self): <NEW_LINE> <INDENT> elems = self.nominal_composition.get_element_ids() <NEW_LINE> fracs = self.nominal_composition.get_element_fractions() <NEW_LINE> gen = PhaseDiagramCompositionEntryGenerator() <NEW_LINE> gen.set_elements_by_index(elems) <NEW_LINE> gen.set_even_spacing(False) <NEW_LINE> gen.set_order(1, len(elems)) <NEW_LINE> gen.set_size(self.max_formula_unit_size) <NEW_LINE> all_possibilities = gen.generate_entries() <NEW_LINE> hits = [] <NEW_LINE> for entry in all_possibilities: <NEW_LINE> <INDENT> dist = 0.0 <NEW_LINE> for e in range(len(elems)): <NEW_LINE> <INDENT> dist += abs(fracs[e] - entry.get_element_fraction(id=elems[e])) <NEW_LINE> <DEDENT> if dist > self.maximum_distance: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ox_g = OxidationStateGuesser() <NEW_LINE> en = LookUpData.load_property("Electronegativity") <NEW_LINE> os = LookUpData.load_property("OxidationStates") <NEW_LINE> ox_g.set_electronegativity(en) <NEW_LINE> ox_g.set_oxidationstates(os) <NEW_LINE> can_form_ionic = len(ox_g.get_possible_states(entry)) > 0 <NEW_LINE> if can_form_ionic: <NEW_LINE> <INDENT> hits.append((dist, entry)) <NEW_LINE> <DEDENT> <DEDENT> hits.sort() <NEW_LINE> accepted = [i[1] for i in hits] <NEW_LINE> return accepted
Function to find all the compounds in the vicinity of the target composition. Returns ------- accepted : array-like A list of CompositionEntry's.
625941b876d4e153a657e992
@pytest.fixture <NEW_LINE> def edges_between_molecule(): <NEW_LINE> <INDENT> molecule = vermouth.molecule.Molecule() <NEW_LINE> molecule.add_edges_from(( (0, 1), (1, 2), (1, 3), (3, 4), (4, 5), (5, 6), (5, 7), (7, 8), (9, 10), (10, 11), (11, 12), )) <NEW_LINE> for node1, node2, attributes in molecule.edges(data=True): <NEW_LINE> <INDENT> attributes['arbitrary'] = '{} - {}'.format(min(node1, node2), max(node1, node2)) <NEW_LINE> <DEDENT> return molecule
Build an empty molecule with known connectivity. The molecule does not have any node attribute nor any molecule metadata. It only has a bare graph with a few nodes and edges. The graph looks like:: 0 - 1 - 3 - 4 - 5 - 7 - 8 9 - 10 - 11 - 12 | | 2 6
625941b8f7d966606f6a9e69
def edge_length(ch, edges): <NEW_LINE> <INDENT> edge_verts = np.array([np.intersect1d(x[0], x[1]) for x in ch.simplices[edges]]) <NEW_LINE> return np.sqrt(np.sum((ch.points[edge_verts][:,1,:2] - ch.points[edge_verts][:,0,:2])**2, axis=1))
Get the length of each edge (in the Delaunay triangulation).
625941b8796e427e537b0424
def load(fp: TextIO, *args, **kwargs) -> Dict: <NEW_LINE> <INDENT> pass
Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a Kickstart document) to a Python object. [extended_summary] :param fp: [description] :type fp: TextIO :return: [description] :rtype: Dict
625941b8b57a9660fec336e2
def convert_to_rdotmap(row): <NEW_LINE> <INDENT> return pd.Series({'Total':row['P0050001'], 'White':row['P0050003'], 'Black':row['P0050004'], 'Asian':row['P0050006'], 'Hispanic':row['P0050010'], 'Other': row['P0050005'] + row['P0050007'] + row['P0050008'] + row['P0050009'], }, index=['Total', 'White', 'Black', 'Hispanic', 'Asian', 'Other'])
takes the P005 variables and maps to a series with White, Black, Asian, Hispanic, Other Total
625941b8097d151d1a222cbe
def test_body_status_code(self): <NEW_LINE> <INDENT> for body, response, status_code in zip( self.bodies, self.responses, self.status_codes): <NEW_LINE> <INDENT> if (body == ['Incorrect data type'] and self.cfg.pulp_version < Version('2.8')): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> with self.subTest(body=body): <NEW_LINE> <INDENT> self.assertEqual(response.json()['http_status'], status_code)
Assert that each response body has the expected HTTP status code.
625941b823849d37ff7b2ef3
def compute_separable_model(self): <NEW_LINE> <INDENT> F = self.function_field() <NEW_LINE> p = F.characteristic() <NEW_LINE> if p == 0: <NEW_LINE> <INDENT> self._is_separable = True <NEW_LINE> return <NEW_LINE> <DEDENT> F0 = F.base_field() <NEW_LINE> if F is F0: <NEW_LINE> <INDENT> self._is_separable = True <NEW_LINE> return <NEW_LINE> <DEDENT> G = F.polynomial() <NEW_LINE> q = ZZ(1) <NEW_LINE> while q < G.degree(): <NEW_LINE> <INDENT> if all([G[i].is_zero() for i in range(G.degree()+1) if not (p*q).divides(i)]): <NEW_LINE> <INDENT> q = q*p <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if q.is_one(): <NEW_LINE> <INDENT> self._is_separable = True <NEW_LINE> return <NEW_LINE> <DEDENT> self._is_separable = False <NEW_LINE> self._degree_of_inseparability = q <NEW_LINE> R1 = PolynomialRing(F0, F.variable_name()+'_s') <NEW_LINE> G1 = R1([G[q*i] for i in range((G.degree()/q).floor()+1)]) <NEW_LINE> F1 = F0.extension(G1, R1.variable_name()) <NEW_LINE> self._separable_model = SmoothProjectiveCurve(F1) <NEW_LINE> self._phi = F1.hom([F.gen()**q]) <NEW_LINE> self._degree_of_inseparability = q
Compute a separable model of the curve (if necessary). OUTPUT: ``None`` This function only has to be called only once. It then decides whether or not the function field of the curve is given as a separable extension of the base field or not. If it is not separable then we compute a separable model, which is a tripel `(Y_1,\phi, q)` where - `Y_1` is a smooth projective curve over the same constant base field `k` as the curve `Y` itself, and which is given by a separable extension, - `\phi` is a field homomorphism from the function field of `Y_1` into the function field of `Y` corresponding to a purely inseparable extension, - `q` is the degree of the extension given by `\phi`, i.e. the degree of inseparability of the map `Y\to Y_1` given by `\phi`. Note that `q` is a power of the characteristic of `k`.
625941b85166f23b2e1a4fbb
def move_cars(self): <NEW_LINE> <INDENT> car = self.periph.cars.next <NEW_LINE> while car: <NEW_LINE> <INDENT> self.periph.move_car(car) <NEW_LINE> x, y = Peripherique.x_y_to_circle(car.pos, Peripherique.INNER_RADIUS + Peripherique.ROAD_WIDTH / 2) <NEW_LINE> self.graph.RelocateFigure(car.image, x - car.RADIUS, y + car.RADIUS) <NEW_LINE> car = car.next
Move all the car respectively to their behavior
625941b8d6c5a10208143ea9
@core.flake8ext <NEW_LINE> def no_mutable_default_args(logical_line): <NEW_LINE> <INDENT> msg = "N322: Method's default argument shouldn't be mutable!" <NEW_LINE> if mutable_default_args.match(logical_line): <NEW_LINE> <INDENT> yield (0, msg)
Check that mutable object isn't used as default argument N322: Method's default argument shouldn't be mutable
625941b8498bea3a759b9913
def run_tests(steps): <NEW_LINE> <INDENT> global results <NEW_LINE> passed = True <NEW_LINE> failed = [] <NEW_LINE> for step in steps: <NEW_LINE> <INDENT> util.pexpect_close_all() <NEW_LINE> t1 = time.time() <NEW_LINE> print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime())) <NEW_LINE> try: <NEW_LINE> <INDENT> if run_step(step): <NEW_LINE> <INDENT> results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1) <NEW_LINE> print(">>>> PASSED STEP: %s at %s" % (step, time.asctime())) <NEW_LINE> check_logs(step) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(">>>> FAILED STEP: %s at %s" % (step, time.asctime())) <NEW_LINE> passed = False <NEW_LINE> failed.append(step) <NEW_LINE> results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1) <NEW_LINE> <DEDENT> <DEDENT> except Exception as msg: <NEW_LINE> <INDENT> passed = False <NEW_LINE> failed.append(step) <NEW_LINE> print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg)) <NEW_LINE> traceback.print_exc(file=sys.stdout) <NEW_LINE> results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1) <NEW_LINE> check_logs(step) <NEW_LINE> <DEDENT> <DEDENT> if not passed: <NEW_LINE> <INDENT> print("FAILED %u tests: %s" % (len(failed), failed)) <NEW_LINE> <DEDENT> util.pexpect_close_all() <NEW_LINE> write_fullresults() <NEW_LINE> return passed
Run a list of steps.
625941b871ff763f4b5494f0
def ElasticToGraph(args): <NEW_LINE> <INDENT> client = eccemotus.GetClient(args.host, args.port) <NEW_LINE> generator = eccemotus.ElasticDataGenerator(client, args.indices, args.verbose) <NEW_LINE> CreateGraph(generator, args)
Computes lateral graph based on data from elastic-search. Args: args (argparse.Namespace): command line arguments.
625941b8cc0a2c11143dccfa
def requires_auth(f: Callable): <NEW_LINE> <INDENT> @wraps(f) <NEW_LINE> def decorated(*args, **kwargs) -> Callable: <NEW_LINE> <INDENT> isValid = (args[0].json.indent_json() and args[0].userKey and args[0].serverIP) <NEW_LINE> if not isValid: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return f(*args, **kwargs) <NEW_LINE> <DEDENT> return decorated
Decorator function to ensure that the JSON is valid and that there is a valid key and server IP address
625941b87b25080760e392bc
def test_2_sample(self): <NEW_LINE> <INDENT> self.fail("Failing")
@created 2013-05-10 12:00:00 @modified 2013-05-15 12:00:00 @tags tag3
625941b85fdd1c0f98dc0093
def add_product(self, name, product, index=None, requires=[]): <NEW_LINE> <INDENT> index1 = None <NEW_LINE> inds = [self._products.index(r) for r in requires] <NEW_LINE> if len(inds) > 0: <NEW_LINE> <INDENT> index1 = max(inds) <NEW_LINE> <DEDENT> if index1 is not None and index is not None: <NEW_LINE> <INDENT> if index1 < index: <NEW_LINE> <INDENT> logger.warn(f'Cannot insert product {index} before its' ' requirements.' f' Overwriting to {index1}') <NEW_LINE> index = index1 <NEW_LINE> <DEDENT> <DEDENT> if index is None: <NEW_LINE> <INDENT> self._products[name] = product <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._products.insert_at(index, name, product)
Add a new product to this Manager. Parameters: name : string Indentifying name for this product. Uniqueness checked. product : `Product` Product valid instance. index : int Insert the product in a given index. requires : list(string) List of products to be processed first.
625941b83346ee7daa2b2bcb
@window.event <NEW_LINE> def on_mouse_motion(x, y, dx, dy): <NEW_LINE> <INDENT> app.change_mouse_pos(x, y)
Save the mouse position.
625941b8046cf37aa974cbac
def test_require_collection_wrong_version() -> None: <NEW_LINE> <INDENT> subprocess.check_output( [ "ansible-galaxy", "collection", "install", "containers.podman", "-p", "~/.ansible/collections", ] ) <NEW_LINE> with pytest.raises(SystemExit) as pytest_wrapped_e: <NEW_LINE> <INDENT> prerun.require_collection("containers.podman", '9999.9.9') <NEW_LINE> <DEDENT> assert pytest_wrapped_e.type == SystemExit <NEW_LINE> assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
Tests behaviour of require_collection.
625941b891af0d3eaac9b876
def full_log_likelihood(params,data): <NEW_LINE> <INDENT> m,b,sigma_intrinsic,y_outlier,sigma_outlier,outlier_fraction = params <NEW_LINE> return np.sum(np.log((1.-outlier_fraction)*likelihood_line([m,b,sigma_intrinsic],data)+ outlier_fraction*outlier_distribution([y_outlier,sigma_outlier],data)))
The log-likelihood of the data given the full mixture model of the linear function & the outlier distribution. Args: (1) np.ndarray or list, params = [slope,intercept,intrinsic scatter,outlier mean, outlier standard deviation, outlier fraction] (2) np.ndarray, data. Should have shape (N,4) (if no covariances on errors) or (N,5) (if covariant errors). Should be in the order (x,y,dx,dy) or (x,y,dx,dy,dxy). Returns: (1) float, the likelihood of the data given this set of model parameters.
625941b8b5575c28eb68de5f
def set_clipboard_text(text): <NEW_LINE> <INDENT> data = wx.TextDataObject() <NEW_LINE> data.SetText('asdf') <NEW_LINE> if wx.TheClipboard.Open(): <NEW_LINE> <INDENT> wx.TheClipboard.SetData(data) <NEW_LINE> wx.TheClipboard.Close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> wx.MessageBox('Unable to open the clipboard', 'Error')
クリップボードにテキストを設定する
625941b876e4537e8c3514d9
def secondsToNanos (self, seconds): <NEW_LINE> <INDENT> return long(seconds * self._NANOS_IN_ONE_SECOND)
Converts seconds into nanoseconds
625941b897e22403b379cdfb
def __init__(self, block, subtype): <NEW_LINE> <INDENT> Descriptor.__init__(self, block, TYPE_DISPLAY_RANGE_LIMITS) <NEW_LINE> self._subtype = subtype
Create a DisplayRangeDescriptor object. Args: block: A list of 18-bytes that make up this descriptor. subtype: A string that indicates the specific type of DisplayRangeDescriptor.
625941b80fa83653e4656e1f
def _check_max(self, reading, state): <NEW_LINE> <INDENT> max_value = self._readings[reading].get(CONF_MAXIMUM) <NEW_LINE> if max_value and state > float(max_value): <NEW_LINE> <INDENT> return "{} high".format(reading)
If configured, check the value against the defined maximum value.
625941b8c4546d3d9de72892
def hello_world(param=2): <NEW_LINE> <INDENT> today = datetime.date.today() <NEW_LINE> to_string = "hello world to {} in {}".format(param, today) <NEW_LINE> print(to_string) <NEW_LINE> return to_string
this is to test how to use sphinx.ext.autodoc :param var: (int) just a test a param (default is 2) :param datetime: (int) just a test param (default is 1) :return: (str) hello world time str
625941b8236d856c2ad44640
def _get_logger(self): <NEW_LINE> <INDENT> log_queue_handler = QueueHandler(self.log_queue) <NEW_LINE> logger = logging.getLogger(self.name) <NEW_LINE> logger.setLevel(conf.LOG['log_level']) <NEW_LINE> logger.addHandler(log_queue_handler) <NEW_LINE> fmt = "%(asctime)-15s %(levelname)s %(lineno)d %(message)s" <NEW_LINE> datefmt = "%Y-%m-%d %H:%M:%S" <NEW_LINE> formatter = logging.Formatter(fmt, datefmt) <NEW_LINE> log_path = conf.LOG['log_path'] <NEW_LINE> fh = logging.FileHandler(log_path) <NEW_LINE> fh.setFormatter(formatter) <NEW_LINE> log_queue_listener = QueueListener(self.log_queue, fh) <NEW_LINE> return log_queue_listener, logger
log_queue_listener is passed a queue and some handlers, and it fires up an internal thread which listens to its queue for LogRecords sent from QueueHandlers. For more information, see https://docs.python.org/3/howto/logging-cookbook.html -'Dealing with handlers that block'
625941b8187af65679ca4f7f
def sorted_grasps(self, key, metric, gripper='pr2', stable_pose_id=None): <NEW_LINE> <INDENT> grasps = self.grasps(key, gripper=gripper, stable_pose_id=stable_pose_id) <NEW_LINE> if len(grasps) == 0: <NEW_LINE> <INDENT> return [], [] <NEW_LINE> <DEDENT> grasp_metrics = self.grasp_metrics(key, grasps, gripper=gripper, stable_pose_id=stable_pose_id) <NEW_LINE> if metric not in grasp_metrics[grasp_metrics.keys()[0]].keys(): <NEW_LINE> <INDENT> raise ValueError('Metric %s not recognized' %(metric)) <NEW_LINE> <DEDENT> grasps_and_metrics = [(g, grasp_metrics[g.id][metric]) for g in grasps] <NEW_LINE> grasps_and_metrics.sort(key=lambda x: x[1], reverse=True) <NEW_LINE> sorted_grasps = [g[0] for g in grasps_and_metrics] <NEW_LINE> sorted_metrics = [g[1] for g in grasps_and_metrics] <NEW_LINE> return sorted_grasps, sorted_metrics
Returns the list of grasps for the given graspable sorted by decreasing quality according to the given metric. Parameters ---------- key : :obj:`str` key of object to check metrics for metric : :obj:`str` name of metric to use for sorting gripper : :obj:`str` name of gripper stable_pose_id : :obj:`str` id of stable pose Returns ------- :obj:`list` of :obj:`dexnet.grasping.ParallelJawPtGrasp3D` stored grasps for the object and gripper sorted by metric in descending order, empty list if gripper not found :obj:`list` of float values of metrics for the grasps sorted in descending order, empty list if gripper not found
625941b86fece00bbac2d59d
def check_collision_bullet_aliens (ai_settings , screen,stats, ship, bullets, aliens,score_board): <NEW_LINE> <INDENT> collisions = pygame.sprite.groupcollide(bullets,aliens,True, True) <NEW_LINE> if collisions: <NEW_LINE> <INDENT> for value_aliens in collisions.values(): <NEW_LINE> <INDENT> stats.score += ai_settings.alien_points <NEW_LINE> score_board.prep_score() <NEW_LINE> <DEDENT> check_high_score(stats,score_board) <NEW_LINE> <DEDENT> if len(aliens)== 0: <NEW_LINE> <INDENT> update_level(ai_settings,screen, stats,aliens,ship, bullets,score_board)
respond to bullet- aliens collisions
625941b83eb6a72ae02ec33d
def is_following(user, obj): <NEW_LINE> <INDENT> check(obj) <NEW_LINE> return get_document('actstream.Follow').objects.filter( user=user, follow_object=obj ).exists()
Checks if a "follow" relationship exists. Returns True if exists, False otherwise. Example:: is_following(request.user, group)
625941b8aad79263cf39089d
def testVehicleStatsListResponseData(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
Test VehicleStatsListResponseData
625941b89f2886367277a6f3
def transform(self, X, copy=True): <NEW_LINE> <INDENT> if hasattr(X, 'dtype') and np.issubdtype(X.dtype, np.float): <NEW_LINE> <INDENT> X = sp.csr_matrix(X, copy=copy) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> X = sp.csr_matrix(X, dtype=np.float64, copy=copy) <NEW_LINE> <DEDENT> for word in self.vocab: <NEW_LINE> <INDENT> wordIndex = self.vocab[word] <NEW_LINE> words = X.tocsc()[:, wordIndex] <NEW_LINE> words *= self.bns_scores[word] <NEW_LINE> <DEDENT> return sp.coo_matrix(X, dtype=np.float64)
Transform a count matrix to a bns or tf-bns representation Parameters ---------- X : sparse matrix, [n_samples, n_features] a matrix of term/token counts Returns ------- vectors : sparse matrix, [n_samples, n_features]
625941b892d797404e303fec
def _MergeField(tokenizer, message): <NEW_LINE> <INDENT> message_descriptor = message.DESCRIPTOR <NEW_LINE> if tokenizer.TryConsume(b('[')): <NEW_LINE> <INDENT> name = [tokenizer.ConsumeIdentifier()] <NEW_LINE> while tokenizer.TryConsume(b('.')): <NEW_LINE> <INDENT> name.append(tokenizer.ConsumeIdentifier()) <NEW_LINE> <DEDENT> name = '.'.join(name) <NEW_LINE> if not message_descriptor.is_extendable: <NEW_LINE> <INDENT> raise tokenizer.ParseErrorPreviousToken( 'Message type "%s" does not have extensions.' % message_descriptor.full_name) <NEW_LINE> <DEDENT> field = message.Extensions._FindExtensionByName(name) <NEW_LINE> if not field: <NEW_LINE> <INDENT> raise tokenizer.ParseErrorPreviousToken( 'Extension "%s" not registered.' % name) <NEW_LINE> <DEDENT> elif message_descriptor != field.containing_type: <NEW_LINE> <INDENT> raise tokenizer.ParseErrorPreviousToken( 'Extension "%s" does not extend message type "%s".' % ( name, message_descriptor.full_name)) <NEW_LINE> <DEDENT> tokenizer.Consume(b(']')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = tokenizer.ConsumeIdentifier() <NEW_LINE> field = message_descriptor.fields_by_name.get(name, None) <NEW_LINE> if not field: <NEW_LINE> <INDENT> field = message_descriptor.fields_by_name.get(name.lower(), None) <NEW_LINE> if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: <NEW_LINE> <INDENT> field = None <NEW_LINE> <DEDENT> <DEDENT> if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and field.message_type.name != name): <NEW_LINE> <INDENT> field = None <NEW_LINE> <DEDENT> if not field: <NEW_LINE> <INDENT> raise tokenizer.ParseErrorPreviousToken( 'Message type "%s" has no field named "%s".' % ( message_descriptor.full_name, name)) <NEW_LINE> <DEDENT> <DEDENT> if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: <NEW_LINE> <INDENT> tokenizer.TryConsume(b(':')) <NEW_LINE> if tokenizer.TryConsume(b('<')): <NEW_LINE> <INDENT> end_token = b('>') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tokenizer.Consume(b('{')) <NEW_LINE> end_token = b('}') <NEW_LINE> <DEDENT> if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: <NEW_LINE> <INDENT> if field.is_extension: <NEW_LINE> <INDENT> sub_message = message.Extensions[field].add() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sub_message = getattr(message, field.name).add() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if field.is_extension: <NEW_LINE> <INDENT> sub_message = message.Extensions[field] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sub_message = getattr(message, field.name) <NEW_LINE> <DEDENT> sub_message.SetInParent() <NEW_LINE> <DEDENT> while not tokenizer.TryConsume(end_token): <NEW_LINE> <INDENT> if tokenizer.AtEnd(): <NEW_LINE> <INDENT> raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % bytestr_to_string((end_token))) <NEW_LINE> <DEDENT> _MergeField(tokenizer, sub_message) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> _MergeScalarField(tokenizer, message, field)
Merges a single protocol message field into a message. Args: tokenizer: A tokenizer to parse the field name and values. message: A protocol message to record the data. Raises: ParseError: In case of ASCII parsing problems.
625941b88a349b6b435e7fd7
def wait_gone(self, timeout=None, raise_error=True): <NEW_LINE> <INDENT> start_time = time.time() <NEW_LINE> if timeout is None or timeout <= 0: <NEW_LINE> <INDENT> timeout = self._timeout <NEW_LINE> <DEDENT> while start_time + timeout > time.time(): <NEW_LINE> <INDENT> if not self.exists: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> if not raise_error: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> raise WDAElementNotDisappearError("element not gone")
Args: timeout (float): default timeout raise_error (bool): return bool or raise error Returns: bool: works when raise_error is False Raises: WDAElementNotDisappearError
625941b82c8b7c6e89b35626
def deletePlayers(): <NEW_LINE> <INDENT> DB = connect() <NEW_LINE> cur = DB.cursor() <NEW_LINE> cur.execute("""delete from players""") <NEW_LINE> DB.commit() <NEW_LINE> DB.close()
Remove all the player records from the database without any constraint
625941b8460517430c393ff0
def check_first_result(self, ref): <NEW_LINE> <INDENT> first_link = self.driver.find_element(*SearchResultsPageLocators.FIRST_LINK) <NEW_LINE> link_text = first_link.text <NEW_LINE> assert link_text == ref , 'Oops, incorrect result'
Проверяет, что первый результат соответсвует запросу
625941b84527f215b584c2bd
def tag_file(self, globaltag, package, **kwargs): <NEW_LINE> <INDENT> all_params = ['globaltag', 'package'] <NEW_LINE> all_params.append('callback') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method tag_file" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('globaltag' not in params) or (params['globaltag'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `globaltag` when calling `tag_file`") <NEW_LINE> <DEDENT> if ('package' not in params) or (params['package'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `package` when calling `tag_file`") <NEW_LINE> <DEDENT> resource_path = '/expert/calibration/tag'.replace('{format}', 'json') <NEW_LINE> method = 'POST' <NEW_LINE> path_params = {} <NEW_LINE> query_params = {} <NEW_LINE> if 'globaltag' in params: <NEW_LINE> <INDENT> query_params['globaltag'] = params['globaltag'] <NEW_LINE> <DEDENT> if 'package' in params: <NEW_LINE> <INDENT> query_params['package'] = params['package'] <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = {} <NEW_LINE> files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client. select_header_accept(['application/json', 'application/xml']) <NEW_LINE> if not header_params['Accept']: <NEW_LINE> <INDENT> del header_params['Accept'] <NEW_LINE> <DEDENT> header_params['Content-Type'] = self.api_client. select_header_content_type(['application/json']) <NEW_LINE> auth_settings = [] <NEW_LINE> response = self.api_client.call_api(resource_path, method, path_params, query_params, header_params, body=body_params, post_params=form_params, files=files, response_type='GlobalTag', auth_settings=auth_settings, callback=params.get('callback')) <NEW_LINE> return response
Tag a package. This method will create a global tag for every file created in the given package.The tag name is in general hidden to the user, and is automatically generated by the 'commit' command. The description field is generated, and the global tag will have maximum snapshot time before the locking step. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.tag_file(globaltag, package, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str globaltag: global tag name; it should start with package name and have the format xxx-version-subversion (required) :param str package: package name (required) :return: GlobalTag If the method is called asynchronously, returns the request thread.
625941b8d8ef3951e324339f
def _map_symbols_on_current_thread(self, mapped_files): <NEW_LINE> <INDENT> frame = gdb.newest_frame() <NEW_LINE> while frame and frame.is_valid(): <NEW_LINE> <INDENT> if frame.name() is None: <NEW_LINE> <INDENT> m = self._find_mapping_for_address(mapped_files, frame.pc()) <NEW_LINE> if m is not None and self._try_to_map(m): <NEW_LINE> <INDENT> _gdb_execute("info threads") <NEW_LINE> frame = gdb.newest_frame() <NEW_LINE> assert frame.is_valid() <NEW_LINE> <DEDENT> <DEDENT> if (frame.older() is not None and frame.older().is_valid() and frame.older().pc() != frame.pc()): <NEW_LINE> <INDENT> frame = frame.older() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> frame = None
Updates the symbols for the current thread using files from mapped_files.
625941b885dfad0860c3acbb
def getPropertyCSSValue(self, name, normalize=True): <NEW_LINE> <INDENT> nname = self._normalize(name) <NEW_LINE> if nname in self._SHORTHANDPROPERTIES: <NEW_LINE> <INDENT> self._log.info('CSSValue for shorthand property "%s" should be ' 'None, this may be implemented later.' % nname, neverraise=True) <NEW_LINE> <DEDENT> p = self.getProperty(name, normalize) <NEW_LINE> if p: <NEW_LINE> <INDENT> return p.propertyValue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
:param name: of the CSS property, always lowercase (even if not normalized) :param normalize: if ``True`` (DEFAULT) name will be normalized (lowercase, no simple escapes) so "color", "COLOR" or "C\olor" will all be equivalent If ``False`` may return **NOT** the effective value but the effective for the unnormalized name. :returns: :class:`~css_parser.css.CSSValue`, the value of the effective property if it has been explicitly set for this declaration block. (DOM) Used to retrieve the object representation of the value of a CSS property if it has been explicitly set within this declaration block. Returns None if the property has not been set. (This method returns None if the property is a shorthand property. Shorthand property values can only be accessed and modified as strings, using the getPropertyValue and setProperty methods.) **css_parser currently always returns a CSSValue if the property is set.** for more on shorthand properties see http://www.dustindiaz.com/css-shorthand/
625941b823849d37ff7b2ef4
@router_songs.patch( "/uninvolve", summary="Uninvolve a person from some songs.", status_code=204, responses={ **responses.login_error, 404: {"description": "Role / person not found"}, } ) <NEW_LINE> def edit_multiple_uninvolve( ls: dependencies.LoginSession = f.Depends(dependencies.dependency_login_session), song_ids: List[int] = f.Query(..., description="The ids of the songs to uninvolve the person from."), person_id: int = f.Query(..., description="The ids of the person to uninvolve."), role_id: int = f.Query(..., description="The id of the role of the involvement."), ): <NEW_LINE> <INDENT> role = ls.get(tables.Role, role_id) <NEW_LINE> person = ls.get(tables.Person, person_id) <NEW_LINE> for song in ls.group(tables.Song, song_ids): <NEW_LINE> <INDENT> tables.SongInvolvement.unmake(session=ls.session, role=role, song=song, person=person) <NEW_LINE> ls.log("song.edit.multiple.uninvolve", obj=song.id) <NEW_LINE> <DEDENT> ls.session.commit() <NEW_LINE> return f.Response(status_code=204)
The opposite of _involve_: delete the connection between the specified person and the specified songs that has the specified role. Non-existing `song_ids` passed to the method will be silently skipped, while a 404 error will be raised for non-existing people or roles. Involvements that don't exist will be silently ignored.
625941b838b623060ff0ac51
@spiceErrorCheck <NEW_LINE> def unload(filename): <NEW_LINE> <INDENT> if isinstance(filename, list): <NEW_LINE> <INDENT> for f in filename: <NEW_LINE> <INDENT> libspice.unload_c(stypes.stringToCharP(f)) <NEW_LINE> <DEDENT> <DEDENT> filename = stypes.stringToCharP(filename) <NEW_LINE> libspice.unload_c(filename) <NEW_LINE> pass
Unload a SPICE kernel. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unload_c.html :param filename: The name of a kernel to unload. :type filename: str
625941b8d53ae8145f87a0d9
def minimal_node_degree(self,degree): <NEW_LINE> <INDENT> nodedeletion.minimal_node_degree(self,degree)
Remove all nodes with a degree less than requested @attention: for documentation see nodedeletion.minimal_node_degree()
625941b89b70327d1c4e0c36
def contains(self, word): <NEW_LINE> <INDENT> return word in self.gazetteer
Returns whether the Gazetteer contains the provided word. Args: word: The word to check. Returns: True if the word is contained in the Gazetteer, False otherwise.
625941b891f36d47f21ac359
def test_higher_priority_low_high(self): <NEW_LINE> <INDENT> self.heap.array = [(1, 'important'), (2, 'not important')] <NEW_LINE> self.assertFalse( self.heap._higher_priority(1, 0), "_higher_priority priority should return False when comparing {0} to {1}" .format(self.heap.array[1], self.heap.array[0]) )
_higher_priority returns False when comparing an element with a lower priority to an element with a higher priority. (1p)
625941b8f9cc0f698b140468
def total(self, a, fpr, n, y): <NEW_LINE> <INDENT> assert a > 0, "a==%f <= 0" % a <NEW_LINE> s = self.bf_num_bytes(fpr, n) <NEW_LINE> if ceil(a)+y-1 >= len(self.params): <NEW_LINE> <INDENT> tmp = (ceil(a)+y) * 1.362549 <NEW_LINE> rows = ceil(tmp) <NEW_LINE> i = rows * 12 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rows = self.params[ceil(a)+y-1][3] <NEW_LINE> i = rows * 12 <NEW_LINE> <DEDENT> total = s + i <NEW_LINE> return total, rows
Total cost of graphene for given parameters
625941b87d43ff24873a2b06
def process_modisL2_TSM(l2_lac_productPath, L2_TSM_productpath): <NEW_LINE> <INDENT> processing_call = gpt + ' ' + TSM_graph_file + ' -SsourceProduct=' + l2_lac_productPath + ' -t ' + L2_TSM_productpath <NEW_LINE> print("Executing: ", processing_call) <NEW_LINE> system(processing_call) <NEW_LINE> if exists(L2_TSM_productpath): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
:param l2_lac_productPath: the input product to the gpt processor (-SsourceProduct) :param L2_TSM_productpath: the output (target) product :return: True if product has been created, False if not
625941b8fbf16365ca6f601f
def set_joint_positions(self, positions, joint_ids=None, velocities=None, kps=None, kds=None, forces=None): <NEW_LINE> <INDENT> if self.is_publishing: <NEW_LINE> <INDENT> q = self.subscriber.get_joint_positions() <NEW_LINE> dq = self.subscriber.get_joint_velocities() <NEW_LINE> if q is not None and len(q) > 0: <NEW_LINE> <INDENT> q_indices = None if joint_ids is None else self.q_indices[joint_ids] <NEW_LINE> if q_indices is not None: <NEW_LINE> <INDENT> q[q_indices] = positions[:len(q_indices)] <NEW_LINE> if velocities is not None: <NEW_LINE> <INDENT> dq[q_indices] = velocities[:len(q_indices)] <NEW_LINE> <DEDENT> <DEDENT> self.arm_point.positions = q[:len(q_indices)] <NEW_LINE> self.arm_point.velocities = dq[:len(q_indices)] <NEW_LINE> self.arm_point.time_from_start.secs = 0 <NEW_LINE> self.arm_point.time_from_start.nsecs = 200000000 <NEW_LINE> self.arm_publisher.msg.points = [self.arm_point] <NEW_LINE> self.arm_publisher.publish()
Set the position of the given joint(s) (using position control). Args: positions (float, np.array[float[N]]): desired position, or list of desired positions [rad] joint_ids (int, list[int], None): joint id, or list of joint ids. velocities (None, float, np.array[float[N]]): desired velocity, or list of desired velocities [rad/s] kps (None, float, np.array[float[N]]): position gain(s) kds (None, float, np.array[float[N]]): velocity gain(s) forces (None, float, np.array[float[N]]): maximum motor force(s)/torque(s) used to reach the target values.
625941b83539df3088e2e1ae
def merge(self, *args): <NEW_LINE> <INDENT> return _osgUtil.DelaunayConstraint_merge(self, *args)
merge(DelaunayConstraint self, DelaunayConstraint dco)
625941b8d10714528d5ffb42
def scalar_mul(v, alpha): <NEW_LINE> <INDENT> return Vec(v.D, {i:getitem(v,i) * alpha for i in v.D})
Returns the scalar-vector product alpha times v
625941b8187af65679ca4f80
def keypress(self, key): <NEW_LINE> <INDENT> import pudb; pudb.set_trace() <NEW_LINE> self.loop.process_input([key]) <NEW_LINE> if key.upper() in COLUMN_KEYS: <NEW_LINE> <INDENT> i = COLUMN_KEYS.index(key.upper()) <NEW_LINE> if i >= len( self.col_list ): <NEW_LINE> <INDENT> raise CalcEvent(E_no_such_column % key.upper()) <NEW_LINE> <DEDENT> self.columns.set_focus_column( i ) <NEW_LINE> return <NEW_LINE> <DEDENT> elif key == "(": <NEW_LINE> <INDENT> if len( self.col_list ) >= len(COLUMN_KEYS): <NEW_LINE> <INDENT> raise CalcEvent(E_no_more_columns) <NEW_LINE> <DEDENT> i = self.columns.get_focus_column() <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> col = self.col_list[i] <NEW_LINE> new_letter = COLUMN_KEYS[len(self.col_list)] <NEW_LINE> parent, child = col.create_child( new_letter ) <NEW_LINE> if child is None: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> self.col_list.append(child) <NEW_LINE> self.set_link( parent, col, child ) <NEW_LINE> self.columns.set_focus_column(len(self.col_list)-1) <NEW_LINE> <DEDENT> elif key == ")": <NEW_LINE> <INDENT> i = self.columns.get_focus_column() <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> col = self.col_list[i] <NEW_LINE> parent, pcol = self.get_parent( col ) <NEW_LINE> if parent is None: <NEW_LINE> <INDENT> raise CalcEvent(E_no_parent_column) <NEW_LINE> <DEDENT> new_i = self.col_list.index( pcol ) <NEW_LINE> self.columns.set_focus_column( new_i ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return key
Handle a keystroke.
625941b8d164cc6175782bb0
def get_ix_name(net, debugging): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> url = 'https://www.peeringdb.com/api/ix/{0:s}'.format(net) <NEW_LINE> json_obj = urllib.request.urlopen(url) <NEW_LINE> str_json_obj = json_obj.read().decode('utf-8') <NEW_LINE> output = json.loads(str_json_obj) <NEW_LINE> return str(output['data'][0]['name']) <NEW_LINE> <DEDENT> except urllib.error.HTTPError: <NEW_LINE> <INDENT> if debugging: <NEW_LINE> <INDENT> print('DEBUG: Could not find IX name from peeringDB. Setting ' 'to \'Exchange_Number_{0:s}\''.format(net)) <NEW_LINE> <DEDENT> return ('Exchange_Number_' + net)
This function accesses the peering DB API and pulls back the 'name' string of a the given IX
625941b8d7e4931a7ee9dd7e
def remove_resi(head): <NEW_LINE> <INDENT> rhead = [] <NEW_LINE> delhead = [] <NEW_LINE> for dummy, line in enumerate(head): <NEW_LINE> <INDENT> line = line.split() <NEW_LINE> try: <NEW_LINE> <INDENT> if line[0][:4] in RESTRAINT_CARDS: <NEW_LINE> <INDENT> line[0] = line[0].split('_')[0] <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> line = ' '.join(line) <NEW_LINE> delhead.append(line) <NEW_LINE> <DEDENT> for line in delhead: <NEW_LINE> <INDENT> line = line.strip(' \n\r').upper() <NEW_LINE> if line.startswith('RESI'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> rhead.append(line) <NEW_LINE> <DEDENT> return rhead
removes all resi commands and classes from restraints :param head: database header of the current fragment :type head: list
625941b89f2886367277a6f4
def get_dft(self, unit=None, validate=True): <NEW_LINE> <INDENT> if validate is True: <NEW_LINE> <INDENT> if unit is not None: <NEW_LINE> <INDENT> assert(UNITS[unit].quantity == self.gmt) <NEW_LINE> <DEDENT> <DEDENT> frequencies, amplitudes = fft( self._time_delta, self.get_amplitudes(unit)) <NEW_LINE> return DFT(frequencies, amplitudes, unit or self._unit)
Discrete Fourier transform (DFT).
625941b826068e7796caeb3b
def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkInPlaceImageFilterIUL2IRGBUS2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj
New() -> itkInPlaceImageFilterIUL2IRGBUS2 Create a new object of the class itkInPlaceImageFilterIUL2IRGBUS2 and set the input and the parameters if some named or non-named arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects - the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. Ex: itkInPlaceImageFilterIUL2IRGBUS2.New( reader, Threshold=10 ) is (most of the time) equivalent to: obj = itkInPlaceImageFilterIUL2IRGBUS2.New() obj.SetInput( 0, reader.GetOutput() ) obj.SetThreshold( 10 )
625941b8b7558d58953c4d7e
def get_demand(self): <NEW_LINE> <INDENT> return self.demand
Returnt the user demand.
625941b85fcc89381b1e1527
def get(self, request, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = self.get_queryset().get(pk=int(pk)) <NEW_LINE> <DEDENT> except get_user_model().DoesNotExist: <NEW_LINE> <INDENT> return Response({}, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> user_data = self.get_serializer_class()(user, context={'request': request}).data <NEW_LINE> return Response(user_data)
Get user's data by it's id
625941b8dc8b845886cb5397
def updatedState(self): <NEW_LINE> <INDENT> self.observe_index = (self.observe_index + 1) % (2**24) <NEW_LINE> for o in self.observers.values(): <NEW_LINE> <INDENT> o.trigger()
Call this whenever the resource was updated, and a notification should be sent to observers.
625941b84f6381625f1148a9
def testAddToGroupOK(self): <NEW_LINE> <INDENT> pass
Verify that if a ``Group`` instance is passed, the parametric role gets assigned to that group
625941b8d99f1b3c44c673f9
def wallsAndGates(self, rooms): <NEW_LINE> <INDENT> if not any(rooms): return <NEW_LINE> queue = collections.deque() <NEW_LINE> for i, row in enumerate(rooms): <NEW_LINE> <INDENT> for j, val in enumerate(row): <NEW_LINE> <INDENT> if val == 0: <NEW_LINE> <INDENT> queue.append((i, j, 0)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> m, n = len(rooms), len(rooms[0]) <NEW_LINE> while queue: <NEW_LINE> <INDENT> i, j, dis = queue.popleft() <NEW_LINE> dis += 1 <NEW_LINE> for di, dj in ((-1, 0), (1, 0), (0, 1), (0, -1)): <NEW_LINE> <INDENT> newi, newj = i + di, j + dj <NEW_LINE> if 0 <= newi < m and 0 <= newj < n and rooms[newi][newj] != -1: <NEW_LINE> <INDENT> if rooms[newi][newj] > dis: <NEW_LINE> <INDENT> rooms[newi][newj] = dis <NEW_LINE> queue.append((newi, newj, dis))
:type rooms: List[List[int]] :rtype: void Do not return anything, modify rooms in-place instead.
625941b830c21e258bdfa300
def __init__(self, access_token: str, download_path='', working_directory='/', mode='auto'): <NEW_LINE> <INDENT> assert isinstance(download_path, str), 'path should be str type.' <NEW_LINE> assert working_directory.startswith('/'), 'working directory should starts with \'/\'' <NEW_LINE> assert mode == 'auto' or mode == 'manual', 'mode should be \'auto\' or \'manual\'' <NEW_LINE> self.dbx = dropbox.Dropbox(access_token) <NEW_LINE> self._download_path = download_path <NEW_LINE> self._working_directory = working_directory <NEW_LINE> self._mode = mode
:param str access_token: dropbox API token. :param str download_path: path to folder for providing data (sould include file name). :param str working_directory: path to working directory on dropbox where monefy data should be placed. :param str mode: using for switching workflow, can take only two values: 'auto' - when dropbox working directory refreshing automatically, 'manual' - when user refreshing dropbox working directory manually. Warning: 'download_path' are relative to module where method calls. :raises: AssertionError, TypeError.
625941b830bbd722463cbc26
def drifting(self): <NEW_LINE> <INDENT> return [n for n in self if n.drifting]
Get list of drifting times
625941b815baa723493c3dd5
def forward(self, inputs): <NEW_LINE> <INDENT> outputs = self.conv1(inputs) <NEW_LINE> outputs = self.conv2(outputs) <NEW_LINE> if not (self.drop is None): <NEW_LINE> <INDENT> outputs = self.drop(outputs) <NEW_LINE> <DEDENT> return outputs
Do a forward pass
625941b8379a373c97cfa9ad
def __testDeleteRelatedCounting(self): <NEW_LINE> <INDENT> NP = 20 <NEW_LINE> N = Instrument.objects.all().count() + NP <NEW_LINE> self.makePositions(NP) <NEW_LINE> Instrument.objects.all().delete() <NEW_LINE> self.assertEqual(Instrument.objects.all().count(),0) <NEW_LINE> self.assertEqual(Position.objects.all().count(),0)
Test delete on models with related models. This is a crucial test as it involves lots of operations and consistency checks.
625941b867a9b606de4a7d20
def test_stepresult_exists(self): <NEW_LINE> <INDENT> sr = self.F.StepResultFactory() <NEW_LINE> self.assertEqual( self.result_for( sr.result, sr.step, "{{ stepresult.id }}"), str(sr.id) )
If the step result already exists, it is returned.
625941b829b78933be1e551c
def mode(arr): <NEW_LINE> <INDENT> uniques, counts = np.unique(arr, return_counts=True) <NEW_LINE> return uniques[np.argmax(counts)]
Return the mode, i.e. most common value, of NumPy array <arr>
625941b8a17c0f6771cbdeb7
def discriminator_loss(real_output, fake_output): <NEW_LINE> <INDENT> real_loss = cross_entropy(tf.ones_like(real_output), real_output) <NEW_LINE> fake_loss = cross_entropy(tf.zeros_like(fake_output), fake_output) <NEW_LINE> total_loss = real_loss + fake_loss <NEW_LINE> return total_loss
鉴别器损失
625941b807d97122c41786ee
def test_data_range_overlap_strings(self): <NEW_LINE> <INDENT> lesser = DataRange("cat", "horse") <NEW_LINE> greater = DataRange("dog", "fish") <NEW_LINE> self.assertTrue(greater.overlaps(lesser))
Test that we detect overlapping string DataRanges.
625941b80c0af96317bb804c
def create_joint_angle_cmd(self, angle): <NEW_LINE> <INDENT> joint_cmd = ArmJointAnglesGoal() <NEW_LINE> joint_cmd.angles.joint1 = self.convert_to_degree(angle[0]) <NEW_LINE> joint_cmd.angles.joint2 = self.convert_to_degree(angle[1]) <NEW_LINE> joint_cmd.angles.joint3 = self.convert_to_degree(angle[2]) <NEW_LINE> joint_cmd.angles.joint4 = self.convert_to_degree(angle[3]) <NEW_LINE> joint_cmd.angles.joint5 = self.convert_to_degree(angle[4]) <NEW_LINE> joint_cmd.angles.joint6 = self.convert_to_degree(angle[5]) <NEW_LINE> if self.n_joints == 6: <NEW_LINE> <INDENT> joint_cmd.angles.joint7 = 0. <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> joint_cmd.angles.joint7 = self.convert_to_degree(angle[6]) <NEW_LINE> <DEDENT> return joint_cmd
Creates a joint angle command with the target joint angles. Planning is done in the base of the robot. :param angle: goal position of the waypoint, angles are in radians :type angle: list :return: joint angle command :rtype: ArmJointAnglesGoal
625941b850485f2cf553cbfc
def set_status(self, status, error=None): <NEW_LINE> <INDENT> if status in ['ready', 'pending', 'finished', 'error']: <NEW_LINE> <INDENT> payload = {'_status': status} <NEW_LINE> if error is not None: <NEW_LINE> <INDENT> payload.update({'_issues': error}) <NEW_LINE> <DEDENT> r = requests.patch('%s/%s' % (self.api_url, self._id), data=json.dumps(payload, cls=EveJSONEncoder), headers=self._get_headers_etag()) <NEW_LINE> if r.status_code == 200: <NEW_LINE> <INDENT> r_json = r.json() <NEW_LINE> if '_etag' in r_json: <NEW_LINE> <INDENT> self.change['_etag'] = r_json['_etag'] <NEW_LINE> self._etag = self.change['_etag'] <NEW_LINE> self.change['_status'] = status <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> elif r.status_code == 412: <NEW_LINE> <INDENT> new = requests.get('%s/%s' % (self.api_url, self._id), headers=API_HEADERS) <NEW_LINE> if new.status_code == 200: <NEW_LINE> <INDENT> new_json = new.json() <NEW_LINE> if new_json['_status'] == status: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif '_etag' in new_json: <NEW_LINE> <INDENT> self._etag = new_json['_etag'] <NEW_LINE> self.set_status(status, error) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return False
Sets the status of a change item
625941b8925a0f43d2549cd7
def decode_args(nvim, args): <NEW_LINE> <INDENT> encoding = nvim.eval('&encoding') <NEW_LINE> return [arg.decode(encoding) if isinstance(arg, bytes) else arg for arg in args]
Helper function to decode from `bytes` to `str` `neovim` has some issues with encoding in Python3.
625941b885dfad0860c3acbc
def test_location(self, strain): <NEW_LINE> <INDENT> if isinstance(strain, str) and "/" in strain: <NEW_LINE> <INDENT> location = strain.split('/')[1] <NEW_LINE> if self.determine_location(location) is None: <NEW_LINE> <INDENT> print("Couldn't determine location for this strain, consider adding to flu_fix_location_label.tsv", location, strain)
Determine that strains come from known locations, if not, print suggestion to add location to flu_fix_location_label.tsv.
625941b89b70327d1c4e0c37
def write_hal_choice(self, current_board, move): <NEW_LINE> <INDENT> if Board().check_for_draw(current_board): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_board[move] = "O" <NEW_LINE> return current_board
Takes HAL's move and writes it to the board if there is an open space
625941b8097d151d1a222cbf
def fetch_billing_account_iam_policy(self, account_id): <NEW_LINE> <INDENT> resource = self.dao.fetch_cai_asset( ContentTypes.iam_policy, 'cloudbilling.googleapis.com/BillingAccount', '//cloudbilling.googleapis.com/{}'.format(account_id), self.engine) <NEW_LINE> if resource: <NEW_LINE> <INDENT> return resource <NEW_LINE> <DEDENT> return {}, None
Gets IAM policy of a Billing Account from Cloud Asset data. Args: account_id (str): id of the billing account to get policy. Returns: dict: Billing Account IAM policy.
625941b8099cdd3c635f0ac0
def writeConfigStorage(butlerLocation, obj): <NEW_LINE> <INDENT> filename = os.path.join(butlerLocation.getStorage().root, butlerLocation.getLocations()[0]) <NEW_LINE> with SafeFilename(filename) as locationString: <NEW_LINE> <INDENT> logLoc = LogicalLocation(locationString, butlerLocation.getAdditionalData()) <NEW_LINE> obj.save(logLoc.locString())
Writes an lsst.pex.config.Config object to a location specified by ButlerLocation. Parameters ---------- butlerLocation : ButlerLocation The location for the object to be written. obj : object instance The object to be written.
625941b863f4b57ef0000f85
def doc_contains_tags_only(self): <NEW_LINE> <INDENT> pass
Tags: foo, bar
625941b85f7d997b871748ff
def OnExportItemGraphButton(self): <NEW_LINE> <INDENT> self.ExportGraph(self.ContextualButtonsItem) <NEW_LINE> self.DismissContextualButtons()
Function called when contextual menu Export button is pressed
625941b896565a6dacc8f539