code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def preprocess(spark, slide_nums, folder="data", training=True, tile_size=1024, overlap=0, tissue_threshold=0.9, sample_size=256, grayscale=False, normalize_stains=True, num_partitions=20000): <NEW_LINE> <INDENT> slides = (spark.sparkContext .parallelize(slide_nums) .filter(lambda slide: open_slide(slide, folder, training) is not None)) <NEW_LINE> tile_indices = (slides.flatMap( lambda slide: process_slide(slide, folder, training, tile_size, overlap))) <NEW_LINE> tile_indices = tile_indices.repartition(num_partitions) <NEW_LINE> tile_indices.cache() <NEW_LINE> tiles = tile_indices.map(lambda tile_index: process_tile_index(tile_index, folder, training)) <NEW_LINE> filtered_tiles = tiles.filter(lambda tile: keep_tile(tile, tile_size, tissue_threshold)) <NEW_LINE> samples = filtered_tiles.flatMap(lambda tile: process_tile(tile, sample_size, grayscale)) <NEW_LINE> if normalize_stains: <NEW_LINE> <INDENT> samples = samples.map(lambda sample: normalize_staining(sample)) <NEW_LINE> <DEDENT> samples = samples.map(lambda sample: flatten_sample(sample)) <NEW_LINE> if training: <NEW_LINE> <INDENT> labels_df = get_labels_df(folder) <NEW_LINE> samples_with_labels = (samples.map( lambda tup: (int(tup[0]), int(labels_df.at[tup[0],"tumor_score"]), float(labels_df.at[tup[0],"molecular_score"]), Vectors.dense(tup[1])))) <NEW_LINE> df = samples_with_labels.toDF(["slide_num", "tumor_score", "molecular_score", "sample"]) <NEW_LINE> df = df.select(df.slide_num.astype("int"), df.tumor_score.astype("int"), df.molecular_score, df["sample"]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> df = samples.toDF(["slide_num", "sample"]) <NEW_LINE> df = df.select(df.slide_num.astype("int"), df["sample"]) <NEW_LINE> <DEDENT> return df
Preprocess a set of whole-slide images. Preprocess a set of whole-slide images as follows: 1. Tile the slides into tiles of size (tile_size, tile_size, 3). 2. Filter the tiles to remove unnecessary tissue. 3. Cut the remaining tiles into samples of size (sample_size, sample_size, ch), where `ch` is 1 if `grayscale` is true, or 3 otherwise. Args: spark: SparkSession. slide_nums: List of whole-slide numbers to process. folder: Local directory in which the slides folder and ground truth file is stored, as a string. This should contain a `training_image_data` folder with images in the format `TUPAC-TR-###.svs`, as well as a `training_ground_truth.csv` file containing the ground truth "tumor_score" and "molecular_score" labels for each slide. Alternatively, the folder should contain a `testing_image_data` folder with images in the format `TUPAC-TE-###.svs`. training: Boolean for training or testing datasets. tile_size: The width and height of a square tile to be generated. overlap: Number of pixels by which to overlap the tiles. tissue_threshold: Tissue percentage threshold for filtering. sample_size: The new width and height of the square samples to be generated. grayscale: Whether or not to generate grayscale samples, rather than RGB. normalize_stains: Whether or not to apply stain normalization. num_partitions: Number of partitions to use during processing. Returns: A Spark DataFrame in which each row contains the slide number, tumor score, molecular score, and the sample stretched out into a Vector.
625941b8b545ff76a8913c70
def ExportGetFatherComponent(self, ObjectId, FatherComponentId): <NEW_LINE> <INDENT> pass
ExportGetFatherComponent(self: DelegateFake, ObjectId: int, FatherComponentId: int) -> (int, int)
625941b8d164cc6175782b9f
def __connections(user, content_type_id, current_level, target_level): <NEW_LINE> <INDENT> target_position = target_level <NEW_LINE> target_company = 1 <NEW_LINE> shortlisted_profiles = UserProfile.objects.get_all_profiles_by(content_type_id, target_position) <NEW_LINE> aggregated_data = __aggregate_statistics(shortlisted_profiles) <NEW_LINE> user_exp = user.userprofiles.get_default_profile().total_experience() <NEW_LINE> result="" <NEW_LINE> if user_exp < aggregated_data.get("avg_exp"): <NEW_LINE> <INDENT> result= "You need approximately " + str(aggregated_data["avg_exp"]) + " of experience." <NEW_LINE> <DEDENT> for skill, count in aggregated_data.get("skill_set").iteritems(): <NEW_LINE> <INDENT> result+=" Required Skill: " + skill <NEW_LINE> <DEDENT> return result
1. identify target organization 2. identify target position 3. Identify list of ppl in target position 3.1 Gather profile data 3.1.1 Number of years work ex 3.1.2 Age 3.1.3 Highest degree 3.2 Aggregate data 4 return data
625941b89b70327d1c4e0c25
def __init__(self): <NEW_LINE> <INDENT> self.identifier = 0 <NEW_LINE> self.name = '' <NEW_LINE> self.level = 10 <NEW_LINE> self.created_at = '' <NEW_LINE> self.avatar_id = '' <NEW_LINE> self.post_count = 0 <NEW_LINE> self.del_post_count = 0 <NEW_LINE> self.edit_count = 0 <NEW_LINE> self.favorite_count = 0 <NEW_LINE> self.wiki_count = 0 <NEW_LINE> self.forum_post_count = 0 <NEW_LINE> self.note_count = 0 <NEW_LINE> self.edit_count = 0 <NEW_LINE> self.comment_count = 0 <NEW_LINE> self.blip_count = 0 <NEW_LINE> self.set_count = 0 <NEW_LINE> self.pool_update_count = 0 <NEW_LINE> self.pos_user_records = 0 <NEW_LINE> self.neutral_user_records = 0 <NEW_LINE> self.neg_user_records = 0 <NEW_LINE> self.artist_tag = []
Initiate properties
625941b80a366e3fb873e668
def update_multi_precision(self, index, weight, grad, state): <NEW_LINE> <INDENT> if self.multi_precision and weight.dtype == numpy.float16: <NEW_LINE> <INDENT> weight_master_copy = state[0] <NEW_LINE> original_state = state[1] <NEW_LINE> grad32 = grad.astype(numpy.float32) <NEW_LINE> self.update(index, weight_master_copy, grad32, original_state) <NEW_LINE> cast(weight_master_copy, dtype=weight.dtype, out=weight) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.update(index, weight, grad, state)
Updates the given parameter using the corresponding gradient and state. Mixed precision version. Parameters ---------- index : int The unique index of the parameter into the individual learning rates and weight decays. Learning rates and weight decay may be set via `set_lr_mult()` and `set_wd_mult()`, respectively. weight : NDArray The parameter to be updated. grad : NDArray The gradient of the objective with respect to this parameter. state : any obj The state returned by `create_state()`.
625941b80fa83653e4656e0e
def _render_grid_horlines(self, w, h, x2, y2, w2, h2, min_div_hpix=50.): <NEW_LINE> <INDENT> assert h > 1. <NEW_LINE> ch = self.channels[0] <NEW_LINE> if abs(h2) < 0.000001: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> v1 = y2 <NEW_LINE> v2 = y2 + h2 <NEW_LINE> if v2 < v1: <NEW_LINE> <INDENT> v1, v2 = v2, v1 <NEW_LINE> <DEDENT> volts_per_min_div = (v2 - v1) / (h - 1.) * min_div_hpix <NEW_LINE> pixels_per_volt = (h - 1.) / (v2 - v1) <NEW_LINE> v_step = math.pow(2, math.ceil(math.log(volts_per_min_div, 2))) <NEW_LINE> v_begin = math.floor(v1 / v_step) * v_step + v_step <NEW_LINE> px_begin = self._sample_to_pixel(v_begin, y2, h2, h) <NEW_LINE> px_end = self._sample_to_pixel(v2, y2, h2, h) <NEW_LINE> sign = 1 if px_end > px_begin else -1 <NEW_LINE> px_step = pixels_per_volt * v_step * sign <NEW_LINE> gl.glBegin(gl.GL_LINES) <NEW_LINE> px = px_begin <NEW_LINE> while px * sign < px_end: <NEW_LINE> <INDENT> gl.glVertex3f( 0., px, 0. ) <NEW_LINE> gl.glVertex3f( w, px, 0. ) <NEW_LINE> px += px_step <NEW_LINE> <DEDENT> gl.glEnd()
min_div_hpix : minimum division height (grid line distance) in pixels
625941b899fddb7c1c9de1e4
def testApproachSpeedDoNotExceed(self): <NEW_LINE> <INDENT> self.controller.approachSpeed = 3.99 <NEW_LINE> self.controller._approach(0.5) <NEW_LINE> self.assertEqual(self.controller.approachSpeed,4)
Test that the incremented speed doesn't exceed approachSpeed
625941b8e64d504609d74692
def __init__(self, config): <NEW_LINE> <INDENT> self.contextualStructure = config["contextualStructure"] <NEW_LINE> self.contextualDependencies = {} <NEW_LINE> self.contextualGenerationProcess = {} <NEW_LINE> if "contextualDependencies" in config: <NEW_LINE> <INDENT> self.contextualDependencies = config["contextualDependencies"] <NEW_LINE> <DEDENT> if "contextualGenerationProcess" in config: <NEW_LINE> <INDENT> self.contextualGenerationProcess = config["contextualGenerationProcess"] <NEW_LINE> for key in self.contextualGenerationProcess: <NEW_LINE> <INDENT> if not isinstance(self.contextualGenerationProcess[key], dict): <NEW_LINE> <INDENT> self.contextualGenerationProcess[key] = self.__getNormalizedDistribution(self.contextualGenerationProcess[key]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for innerKey in self.contextualGenerationProcess[key]: <NEW_LINE> <INDENT> self.contextualGenerationProcess[key][innerKey] = self.__getNormalizedDistribution(self.contextualGenerationProcess[key][innerKey])
config is json and must declare: contextualStructure: iterable where each entry is a contextual variable. entries > 0 indicate a categorical variable with that number of options. entries = -1 indicate a continuous variable. May also declare: contextualDependencies, contextualGenerationProcess which impact generateContext. contextualDependencies: dictionary with string keys corresponding to any variables that are dependent on the value of other variables. We require that the original structure be partially ordered such that a contextual variable that is dependent on other variables comes later in the structure than those it's dependent on. (So variable 0 is always lacking dependencies on other variables.) contextualGenerationProcess: dictionary with string keys, where if a variable appears as a key, the value specifies the distribution for generating context. (currently only categorical distributions are allowed.) If the variable is not specified, its value is generated uniformly at random. If the variable is dependent on another variable, then the keys are of the form "i-v" where i is the index of the variable and v is the value. If it's dependent on multiple variables, these are comma separated (e.g., "i-v1,j-v2").
625941b876d4e153a657e981
def flush(self): <NEW_LINE> <INDENT> if self._flush_passthru: <NEW_LINE> <INDENT> return bytearray() <NEW_LINE> <DEDENT> return self._comp.flush()
Flush the data internally if required.
625941b87cff6e4e811177d7
def get_libris_edition(raw): <NEW_LINE> <INDENT> control_number = raw["controlNumber"] <NEW_LINE> if is_libris_edition_id(control_number): <NEW_LINE> <INDENT> return control_number <NEW_LINE> <DEDENT> return None
Extract old-format Libris ID. @param raw: json object of a Libris edition @type raw: dictionary
625941b8046cf37aa974cb9c
def _calculate_eac(self): <NEW_LINE> <INDENT> self.workpackage.eac = self.workpackage.etc_costs + self.workpackage.ac_costs
Calculates the EAC
625941b8377c676e91271ffc
def _format_data(self, data): <NEW_LINE> <INDENT> return [spectrum for spectrum in sorted(data if isinstance(data, (list, tuple)) else [data], key=lambda x: x.disp[0]) if np.any(np.isfinite(spectrum.flux))]
Sort the data in blue wavelengths to red, and ignore any spectra that have entirely non-finite or negative fluxes.
625941b863d6d428bbe44341
def updateKeyCombinations(self): <NEW_LINE> <INDENT> label = self.grabKeyLabel <NEW_LINE> label.setText('Shortcut to paste the resized image: ') <NEW_LINE> [label.setText(label.text() + k + '+') for k, v in Setup.config['keys'].items() if k != 'Extra' and v == True] <NEW_LINE> if Setup.config['keys'].get('Extra'): <NEW_LINE> <INDENT> label.setText(label.text() + Setup.config['keys'].get('Extra')) <NEW_LINE> <DEDENT> logger.debug('shortcut is updated: {}'.format(Setup.config['keys']))
update the key combination label in the settings window according to Setup.config
625941b8462c4b4f79d1d522
def get_direction(position, next_position): <NEW_LINE> <INDENT> x, y = position <NEW_LINE> nx, ny = next_position <NEW_LINE> if x == nx: <NEW_LINE> <INDENT> if y < ny: <NEW_LINE> <INDENT> return constants.Action.Right <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return constants.Action.Left <NEW_LINE> <DEDENT> <DEDENT> elif y == ny: <NEW_LINE> <INDENT> if x < nx: <NEW_LINE> <INDENT> return constants.Action.Down <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return constants.Action.Up <NEW_LINE> <DEDENT> <DEDENT> raise constants.InvalidAction("We did not receive a valid position transition.")
Get the direction such that position --> next_position. We assume that they are adjacent.
625941b8baa26c4b54cb0f75
def show(*args): <NEW_LINE> <INDENT> context = XSCRIPTCONTEXT.getComponentContext() <NEW_LINE> shell = context.ServiceManager.createInstanceWithContext( "com.sun.star.system.SystemShellExecute", context ) <NEW_LINE> fileAccess = context.ServiceManager.createInstance( "com.sun.star.ucb.SimpleFileAccess" ) <NEW_LINE> tempFile = tempfile.NamedTemporaryFile( delete=False, prefix="help-", suffix=".html" ) <NEW_LINE> tempFileUrl = uno.systemPathToFileUrl(tempFile.name) <NEW_LINE> helpFileUrl = "vnd.sun.star.tdoc:/{}/Scripts/python/doc/help.html".format( XSCRIPTCONTEXT.getDocument().RuntimeUID ) <NEW_LINE> tempFile.close() <NEW_LINE> fileAccess.copy(helpFileUrl, tempFileUrl) <NEW_LINE> shell.execute( "file://" + tempFile.name, "", 0 )
Показать справочное руководство.
625941b882261d6c526ab2f5
def copy(self): <NEW_LINE> <INDENT> attrib = ItemAttribute() <NEW_LINE> for k, v in self._attribs(): <NEW_LINE> <INDENT> setattr(attrib, k, v.copy() if hasattr(v, 'copy') else v) <NEW_LINE> <DEDENT> return attrib
Performs a deep copy of the ItemAttribute, including all values of any dynamically added attributes. :return:
625941b830c21e258bdfa2ef
def DbGetDeviceExportedList(self, argin): <NEW_LINE> <INDENT> self.debug_stream("In " + self.get_name() + ".DbGetDeviceExportedList()") <NEW_LINE> argout = [''] <NEW_LINE> return argout
Get a list of exported devices whose names satisfy the filter (wildcard is :param argin: filter :type: PyTango.DevString :return: list of exported devices :rtype: PyTango.DevVarStringArray
625941b8a219f33f346287c6
def GetOutputHistogram(self): <NEW_LINE> <INDENT> return _itkHistogramMatchingImageFilterPython.itkHistogramMatchingImageFilterID3ID3_GetOutputHistogram(self)
GetOutputHistogram(self) -> itkHistogramD
625941b8498bea3a759b9903
def mta013(self): <NEW_LINE> <INDENT> fromaddr = self.testaddr <NEW_LINE> toaddr = self.pconfig.get( 'noeaito') <NEW_LINE> pmsg = self.domsg('eaisubj', From=fromaddr, To=toaddr, submit=True, eaiflag=True, getrmt=True, maxcheck=3) <NEW_LINE> if pmsg: <NEW_LINE> <INDENT> return ('Fail', "Message sent anyway") <NEW_LINE> <DEDENT> pmsg = self.getmori(maxcheck=1, maxage=180) <NEW_LINE> if pmsg: <NEW_LINE> <INDENT> (dhdrs, lhdrs, body) = self.parsemsg(pmsg) <NEW_LINE> dl = tuple(l for l in body if 'Diagnostic' in l) <NEW_LINE> if dl: <NEW_LINE> <INDENT> bm = dl[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bm = dhdrs['subject'][0] <NEW_LINE> <DEDENT> return ('Pass', "Test message not received, likely bounce "+bm) <NEW_LINE> <DEDENT> return ('Pass', "Test message not received")
check that EAI message to non-EAI server fails submit locally, check on home server, then check for local bounce
625941b85f7d997b871748ed
def negotiate_session_async( self, session_compression: str, session_encryption: str ) -> Awaitable[Session]: <NEW_LINE> <INDENT> self.ensure_state([SessionState.NEGOTIATING], True) <NEW_LINE> loop = get_running_loop() <NEW_LINE> future = loop.create_future() <NEW_LINE> self.on_session_authenticating = future.set_result <NEW_LINE> self.__on_session_failed = future.set_exception <NEW_LINE> session = Session( SessionState.NEGOTIATING, encryption=session_encryption, compression=session_compression ) <NEW_LINE> session.id = self.session_id <NEW_LINE> self.send_session(session) <NEW_LINE> return future
Handle session in negotiating state. Args: session_compression (str): session compression type session_encryption (str): session encryption type Returns: Future: A negotiated Session
625941b80fa83653e4656e0f
def _calculate_bilinear_cost( op, coeff, num_alive_inputs, num_alive_outputs, batch_size): <NEW_LINE> <INDENT> if op.type == 'DepthwiseConv2dNative': <NEW_LINE> <INDENT> return batch_size * coeff * num_alive_outputs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return batch_size * coeff * num_alive_inputs * num_alive_outputs
Calculates bilinear cost for an op. Args: op: A tf.Operation. coeff: A float coefficient for the bilinear function. num_alive_inputs: Scalar Tensor indicating how many input channels are considered alive. num_alive_outputs: Scalar Tensor indicating how many output channels are considered alive. batch_size: Integer batch size to calculate cost/loss for. Returns: Tensor with the cost of the op.
625941b88e71fb1e9831d5ff
def set_bit_rate(self, rate): <NEW_LINE> <INDENT> if rate == 12: <NEW_LINE> <INDENT> self.__adc1_conf = self.__adc1_conf & ~(1 << 2) & ~(1 << 3) <NEW_LINE> self.__adc2_conf = self.__adc2_conf & ~(1 << 2) & ~(1 << 3) <NEW_LINE> self.__bitrate = 12 <NEW_LINE> self.__lsb = 0.0005 <NEW_LINE> <DEDENT> elif rate == 14: <NEW_LINE> <INDENT> self.__adc1_conf = self.__adc1_conf & ~(1 << 3) | (1 << 2) <NEW_LINE> self.__adc2_conf = self.__adc2_conf & ~(1 << 3) | (1 << 2) <NEW_LINE> self.__bitrate = 14 <NEW_LINE> self.__lsb = 0.000125 <NEW_LINE> <DEDENT> elif rate == 16: <NEW_LINE> <INDENT> self.__adc1_conf = self.__adc1_conf & ~(1 << 2) | (1 << 3) <NEW_LINE> self.__adc2_conf = self.__adc2_conf & ~(1 << 2) | (1 << 3) <NEW_LINE> self.__bitrate = 16 <NEW_LINE> self.__lsb = 0.00003125 <NEW_LINE> <DEDENT> elif rate == 18: <NEW_LINE> <INDENT> self.__adc1_conf = self.__adc1_conf | (1 << 2) | (1 << 3) <NEW_LINE> self.__adc2_conf = self.__adc2_conf | (1 << 2) | (1 << 3) <NEW_LINE> self.__bitrate = 18 <NEW_LINE> self.__lsb = 0.0000078125 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('set_bit_rate: rate out of range') <NEW_LINE> <DEDENT> self.__bus.write_byte(self.__adc1_address, self.__adc1_conf) <NEW_LINE> self.__bus.write_byte(self.__adc2_address, self.__adc2_conf) <NEW_LINE> return
sample rate and resolution 12 = 12 bit (240SPS max) 14 = 14 bit (60SPS max) 16 = 16 bit (15SPS max) 18 = 18 bit (3.75SPS max)
625941b8a8370b77170526f2
def diff(a, b, segmenter=None): <NEW_LINE> <INDENT> a, b = list(a), list(b) <NEW_LINE> segmenter = segmenter or SEGMENTER <NEW_LINE> a_segments = segmenter.segment(a) <NEW_LINE> b_segments = segmenter.segment(b) <NEW_LINE> return diff_segments(a_segments, b_segments)
Performs a diff comparison between two sequences of tokens (`a` and `b`) using `segmenter` to cluster and match :class:`deltas.MatchableSegment`. :Example: >>> from deltas import segment_matcher, text_split >>> >>> a = text_split.tokenize("This is some text. This is some other text.") >>> b = text_split.tokenize("This is some other text. This is some text.") >>> operations = segment_matcher.diff(a, b) >>> >>> for op in operations: ... print(op.name, repr(''.join(a[op.a1:op.a2])), ... repr(''.join(b[op.b1:op.b2]))) ... equal 'This is some other text.' 'This is some other text.' insert '' ' ' equal 'This is some text.' 'This is some text.' delete ' ' '' :Parameters: a : `list`(:class:`deltas.tokenizers.Token`) Initial sequence b : `list`(:class:`deltas.tokenizers.Token`) Changed sequence segmenter : :class:`deltas.Segmenter` A segmenter to use on the tokens. :Returns: An `iterable` of operations.
625941b8283ffb24f3c5575e
def get_crosslingual_wordsim_scores(lang1, word2id1, embeddings1, lang2, word2id2, embeddings2, lower=True): <NEW_LINE> <INDENT> if lang1 > lang2: <NEW_LINE> <INDENT> return get_crosslingual_wordsim_scores(lang2, word2id2, embeddings2, lang1, word2id1, embeddings1, lower) <NEW_LINE> <DEDENT> dirpath = os.path.join(SEMEVAL17_EVAL_PATH, '%s-%s' % (lang1, lang2)) <NEW_LINE> if not os.path.isdir(dirpath): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> scores = {} <NEW_LINE> separator = "=" * (30 + 1 + 10 + 1 + 13 + 1 + 12) <NEW_LINE> pattern = "%30s %10s %13s %12s" <NEW_LINE> logger.info(separator) <NEW_LINE> logger.info(pattern % ("Dataset", "Found", "Not found", "Rho")) <NEW_LINE> logger.info(separator) <NEW_LINE> for filename in list(os.listdir(dirpath)): <NEW_LINE> <INDENT> if 'SEMEVAL17' not in filename: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> filepath = os.path.join(dirpath, filename) <NEW_LINE> assert len(filename.split('_')) >= 2 <NEW_LINE> split = filename.split('_')[0].split('-') <NEW_LINE> assert len(split) == 2 <NEW_LINE> if split[0] == lang1.upper() and split[1] == lang2.upper(): <NEW_LINE> <INDENT> coeff, found, not_found = get_spearman_rho( word2id1, embeddings1, filepath, lower, word2id2, embeddings2 ) <NEW_LINE> <DEDENT> elif split[0] == lang2.upper() and split[1] == lang1.upper(): <NEW_LINE> <INDENT> coeff, found, not_found = get_spearman_rho( word2id2, embeddings2, filepath, lower, word2id1, embeddings1 ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Unexpected parse: %s' % filename) <NEW_LINE> <DEDENT> logger.info(pattern % (filename[:-4], str(found), str(not_found), "%.4f" % coeff)) <NEW_LINE> scores[filename[:-4]] = coeff <NEW_LINE> <DEDENT> if not scores: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> logger.info(separator) <NEW_LINE> return scores
Return cross-lingual word similarity scores.
625941b88a43f66fc4b53ebb
def check_jmespath_match(parsed_response, query, expected=None): <NEW_LINE> <INDENT> actual = jmespath.search(query, parsed_response) <NEW_LINE> msg = "JMES path '{}' not found in response".format(query) <NEW_LINE> if actual is None: <NEW_LINE> <INDENT> raise exceptions.JMESError(msg) <NEW_LINE> <DEDENT> if expected is not None: <NEW_LINE> <INDENT> check_keys_match_recursive(expected, actual, [], True) <NEW_LINE> <DEDENT> elif not actual and not (actual == expected): <NEW_LINE> <INDENT> raise exceptions.JMESError(msg) <NEW_LINE> <DEDENT> return actual
Check that the JMES path given in 'query' is present in the given response Args: parsed_response (dict, list): Response list or dict query (str): JMES query expected (str, optional): Possible value to match against. If None, 'query' will just check that _something_ is present
625941b894891a1f4081b8fa
def email_results(message, recipients): <NEW_LINE> <INDENT> if not settings.FR_EMAIL_ENABLED: <NEW_LINE> <INDENT> print("Email not configured.") <NEW_LINE> return <NEW_LINE> <DEDENT> email_from = "FireRoad <{}>".format(settings.EMAIL_HOST_USER) <NEW_LINE> send_mail("Daily update", message, email_from, recipients)
Sends an email to the given recipients with the given message. Prints to the console if email is not set up.
625941b8507cdc57c6306b25
def gps_raw_encode(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg): <NEW_LINE> <INDENT> msg = MAVLink_gps_raw_message(usec, fix_type, lat, lon, alt, eph, epv, v, hdg) <NEW_LINE> msg.pack(self) <NEW_LINE> return msg
The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right-handed, Z-axis up (GPS frame) usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t) lat : Latitude in degrees (float) lon : Longitude in degrees (float) alt : Altitude in meters (float) eph : GPS HDOP (float) epv : GPS VDOP (float) v : GPS ground speed (float) hdg : Compass heading in degrees, 0..360 degrees (float)
625941b8656771135c3eb6c4
def getblockheader(self, block_hash, verbose=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> block_hash = b2lx(block_hash) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise TypeError('%s.getblockheader(): block_hash must be bytes; got %r instance' % (self.__class__.__name__, block_hash.__class__)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> r = self._call('getblockheader', block_hash, verbose) <NEW_LINE> <DEDENT> except InvalidAddressOrKeyError as ex: <NEW_LINE> <INDENT> raise IndexError('%s.getblockheader(): %s (%d)' % (self.__class__.__name__, ex.error['message'], ex.error['code'])) <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> nextblockhash = None <NEW_LINE> if 'nextblockhash' in r: <NEW_LINE> <INDENT> nextblockhash = lx(r['nextblockhash']) <NEW_LINE> <DEDENT> return {'confirmations':r['confirmations'], 'height':r['height'], 'mediantime':r['mediantime'], 'nextblockhash':nextblockhash, 'chainwork':x(r['chainwork'])} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return CBlockHeader.deserialize(unhexlify_str(r))
Get block header <block_hash> verbose - If true a dict is returned with the values returned by getblockheader that are not in the block header itself (height, nextblockhash, etc.) Raises IndexError if block_hash is not valid.
625941b88e7ae83300e4ae1d
def extend(self, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) > 1: <NEW_LINE> <INDENT> raise TypeError('expected at most 1 arguments, got %d' % len(args)) <NEW_LINE> <DEDENT> iterable = args[0] if args else None <NEW_LINE> if iterable: <NEW_LINE> <INDENT> if isinstance(iterable, Mapping) or hasattr(iterable, 'items'): <NEW_LINE> <INDENT> for key, value in iterable.items(): <NEW_LINE> <INDENT> self.append(key, value) <NEW_LINE> <DEDENT> <DEDENT> elif hasattr(iterable, 'keys'): <NEW_LINE> <INDENT> for key in iterable.keys(): <NEW_LINE> <INDENT> self.append(key, iterable[key]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for key, value in iterable: <NEW_LINE> <INDENT> self.append(key, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for key, value in kwargs.items(): <NEW_LINE> <INDENT> self.append(key, value)
Add key value pairs for an iterable.
625941b8b5575c28eb68de4f
def _sell_limit(self, amount, price): <NEW_LINE> <INDENT> res = self.trade_client.place_order( str(amount), str(price), 'sell', 'exchange limit', symbol=self.pair_code) <NEW_LINE> return res['order_id']
Create a sell limit order
625941b8d10714528d5ffb31
@pytest.mark.skip(reason='Test is flaky.') <NEW_LINE> @pytest.mark.jira('ASC-891') <NEW_LINE> @pytest.mark.test_id('747b5458-aafb-11e8-bfa2-0025227c8120') <NEW_LINE> def test_skip(): <NEW_LINE> <INDENT> assert True is False
Verify that a test can be skipped.
625941b876e4537e8c3514c9
def get_attribute(self, attribute_name: str) -> str: <NEW_LINE> <INDENT> if hasattr(self, '__' + attribute_name): <NEW_LINE> <INDENT> return getattr(self, '__' + attribute_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
获取属性,如ClassAsClass.get_attribute('summary')
625941b8de87d2750b85fbe0
def get_invalid_double( self, custom_headers={}, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/number/invaliddouble' <NEW_LINE> query_parameters = {} <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> raise models.ErrorException(self._deserialize, response) <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('float', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Get invalid double Number value :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: float :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true
625941b8d18da76e23532324
def compute_numerial_gradient(cost_func, theta): <NEW_LINE> <INDENT> numgrad = np.zeros(theta.size) <NEW_LINE> perturb = np.zeros(theta.size) <NEW_LINE> e = 1e-4 <NEW_LINE> for p in range(theta.size): <NEW_LINE> <INDENT> perturb[p] = e <NEW_LINE> loss1, grad1 = cost_func(theta-perturb) <NEW_LINE> loss2, grad2 = cost_func(theta+perturb) <NEW_LINE> numgrad[p] = (loss2-loss1)/(2*e) <NEW_LINE> perturb[p] = 0 <NEW_LINE> <DEDENT> return numgrad
numerically compute gradients
625941b84527f215b584c2ad
def lemonade_change(self, bills: List[int]) -> bool: <NEW_LINE> <INDENT> five_money, ten_money = 0, 0 <NEW_LINE> for bill in bills: <NEW_LINE> <INDENT> if bill == 5: <NEW_LINE> <INDENT> five_money += 1 <NEW_LINE> <DEDENT> if bill == 10: <NEW_LINE> <INDENT> if five_money > 0: <NEW_LINE> <INDENT> five_money -= 1 <NEW_LINE> ten_money += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if bill == 20: <NEW_LINE> <INDENT> if five_money > 0 and ten_money > 0: <NEW_LINE> <INDENT> five_money -= 1 <NEW_LINE> ten_money -= 1 <NEW_LINE> <DEDENT> elif five_money > 2: <NEW_LINE> <INDENT> five_money -= 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True
计算是否能够换钱 Args: bills: 账单数据- Returns: 布尔值
625941b8a17c0f6771cbdea6
def __init__(self, dataset: DiscreteDataset, **kwargs): <NEW_LINE> <INDENT> super(TFBind10Oracle, self).__init__( dataset, is_batched=False, internal_batch_size=1, internal_measurements=1, expect_normalized_y=False, expect_normalized_x=False, expect_logits=False, **kwargs) <NEW_LINE> self.sequence_to_score = dict() <NEW_LINE> self.internal_dataset._disable_transform = True <NEW_LINE> for x, y in self.internal_dataset.iterate_samples(): <NEW_LINE> <INDENT> self.sequence_to_score[tuple(x.tolist())] = y <NEW_LINE> <DEDENT> self.internal_dataset._disable_transform = False
Initialize the ground truth score function f(x) for a model-based optimization problem, which involves loading the parameters of an oracle model and estimating its computational cost Arguments: dataset: DiscreteDataset an instance of a subclass of the DatasetBuilder class which has a set of design values 'x' and prediction values 'y', and defines batching and sampling methods for those attributes
625941b8a79ad161976cbf98
def testCSApiResponseBoolean(self): <NEW_LINE> <INDENT> pass
Test CSApiResponseBoolean
625941b8dd821e528d63affd
def latestKarma(quantity=25): <NEW_LINE> <INDENT> pass
Return the latest karma actions for this person. Return no more than the number given as quantity.
625941b86aa9bd52df036bf4
def task_2(): <NEW_LINE> <INDENT> mylist_42 = [1, 2, 4, 8] <NEW_LINE> print(mylist_42) <NEW_LINE> mylist_42.append(16) <NEW_LINE> print(16)
2) В вашем распоряжении список mylist_42, состоящий из 4-х значений: 1,2,4,8. Выполните команду для определения списка, напечатайте его. Напишите команду для добавления к этому списку значения 16, снова напечатайте список.
625941b8e8904600ed9f1d7b
def to_intel64(self): <NEW_LINE> <INDENT> intel64.check_ideep_available() <NEW_LINE> d = self.__dict__ <NEW_LINE> for name in self._params: <NEW_LINE> <INDENT> d[name].to_intel64() <NEW_LINE> <DEDENT> for name in self._persistent: <NEW_LINE> <INDENT> value = d[name] <NEW_LINE> if isinstance(value, cuda.ndarray): <NEW_LINE> <INDENT> value = value.get() <NEW_LINE> <DEDENT> if (isinstance(value, numpy.ndarray) and value.ndim in (1, 2, 4)): <NEW_LINE> <INDENT> value = intel64.ideep.array( value, itype=intel64.ideep.wgt_array) <NEW_LINE> <DEDENT> d[name] = value <NEW_LINE> <DEDENT> self._cpu = True <NEW_LINE> self._device_id = None <NEW_LINE> return self
Copies parameter variables and persistent values to CPU.
625941b815fb5d323cde095c
def _stats_veloc_forward(physics): <NEW_LINE> <INDENT> return player.walker.observables.veloc_forward(physics)
Player's forward velocity.
625941b84a966d76dd550e5e
def __iter__(self): <NEW_LINE> <INDENT> for tag, name in self.__tags_to_names.items(): <NEW_LINE> <INDENT> yield (tag, name, self.__tags_to_types[tag], self.__flags[tag])
Iterates over all fields.
625941b8be8e80087fb20aa2
def __init__(self): <NEW_LINE> <INDENT> super(SISR, self).__init__() <NEW_LINE> self.model = nn.Sequential( nn.Conv2d(in_channels=3, out_channels=16, kernel_size=3, stride=1, padding=1), nn.ReLU(), nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, stride=1, padding=1), nn.ReLU(), nn.Conv2d(in_channels=32, out_channels=12, kernel_size=3, stride=1, padding=1), nn.ReLU(), nn.PixelShuffle(2) )
Load the pretrained ResNet-152 and replace top fc layer.
625941b8b830903b967e9769
def step_into(self): <NEW_LINE> <INDENT> self.main_window.set_debugging_status(4) <NEW_LINE> self.debugger.step_into()
Issue a step into continuation command on the debugger This gets called when the "Step into" action button is pressed.
625941b84c3428357757c17d
def build_url(self): <NEW_LINE> <INDENT> if self.by == "friendly_string": <NEW_LINE> <INDENT> self.base_url = "https://api.companieshouse.gov.uk/search/companies?q=" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.base_url = "https://api.companieshouse.gov.uk/company/" <NEW_LINE> <DEDENT> self.request_url = self.base_url + self.company_query_string
Creates self.request_url and adds parameters to request_kwargs if self.by is set to "friendly_string" Called when parent class is instantiated
625941b81f037a2d8b946051
def tradfri_get_lightbulb(hubip, apiuser, apikey, deviceid): <NEW_LINE> <INDENT> tradfriHub = 'coaps://{}:5684/15001/{}' .format(hubip, deviceid) <NEW_LINE> api = '{} -m get -u "{}" -k "{}" "{}" -B {} 2> /dev/null' .format(coap, apiuser, apikey, tradfriHub, timeout) <NEW_LINE> if os.path.exists(coap): <NEW_LINE> <INDENT> result = os.popen(api) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sys.stderr.write('[-] libcoap: could not find libcoap.\n') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> return json.loads(result.read().strip('\n'))
function for getting tradfri lightbulb information
625941b8ab23a570cc24ffd2
def destroy(self, request, uuid): <NEW_LINE> <INDENT> draft_repo = DraftRepo(SnapshotRepo()) <NEW_LINE> draft_repo.delete(uuid) <NEW_LINE> return super().destroy(request, uuid)
This removes any files that were staged along with the database entry.
625941b8ff9c53063f47c050
def random_element(self, num_bound=None, den_bound=None, *args, **kwds): <NEW_LINE> <INDENT> global ZZ <NEW_LINE> if ZZ is None: <NEW_LINE> <INDENT> from . import integer_ring <NEW_LINE> ZZ = integer_ring.ZZ <NEW_LINE> <DEDENT> if num_bound is None: <NEW_LINE> <INDENT> num = ZZ.random_element(*args, **kwds) <NEW_LINE> den = ZZ.random_element(*args, **kwds) <NEW_LINE> while den == 0: den = ZZ.random_element(*args, **kwds) <NEW_LINE> return self((num, den)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if num_bound == 0: <NEW_LINE> <INDENT> num_bound = 2 <NEW_LINE> <DEDENT> if den_bound is None: <NEW_LINE> <INDENT> den_bound = num_bound <NEW_LINE> if den_bound < 1: <NEW_LINE> <INDENT> den_bound = 2 <NEW_LINE> <DEDENT> <DEDENT> num = ZZ.random_element(-num_bound, num_bound+1, *args, **kwds) <NEW_LINE> den = ZZ.random_element(1, den_bound+1, *args, **kwds) <NEW_LINE> while den == 0: den = ZZ.random_element(1, den_bound+1, *args, **kwds) <NEW_LINE> return self((num,den))
Return an random element of `\QQ`. Elements are constructed by randomly choosing integers for the numerator and denominator, not neccessarily coprime. INPUT: - ``num_bound`` -- a positive integer, specifying a bound on the absolute value of the numerator. If absent, no bound is enforced. - ``den_bound`` -- a positive integer, specifying a bound on the value of the denominator. If absent, the bound for the numerator will be reused. Any extra positional or keyword arguments are passed through to :meth:`sage.rings.integer_ring.IntegerRing_class.random_element`. EXAMPLES:: sage: QQ.random_element() -4 sage: QQ.random_element() 0 sage: QQ.random_element() -1/2 In the following example, the resulting numbers range from -5/1 to 5/1 (both inclusive), while the smallest possible positive value is 1/10:: sage: QQ.random_element(5, 10) -2/7 Extra positional or keyword arguments are passed through:: sage: QQ.random_element(distribution='1/n') 0 sage: QQ.random_element(distribution='1/n') -1
625941b8a4f1c619b28afe94
def get_name() -> str: <NEW_LINE> <INDENT> global used_names <NEW_LINE> name = names.get_full_name() <NEW_LINE> while name in used_names: <NEW_LINE> <INDENT> name = names.get_full_name() <NEW_LINE> <DEDENT> used_names.append(name) <NEW_LINE> return name
Used to generate a full name that has not been previously used
625941b8507cdc57c6306b26
def InstallImportant(self): <NEW_LINE> <INDENT> self.whitelist = [entry for entry in self.states if not self.states[entry]] <NEW_LINE> if not self.setup['file']: <NEW_LINE> <INDENT> if self.setup['decision'] == 'whitelist': <NEW_LINE> <INDENT> dwl = self.setup['decision_list'] <NEW_LINE> w_to_rem = [e for e in self.whitelist if not matches_white_list(e, dwl)] <NEW_LINE> if w_to_rem: <NEW_LINE> <INDENT> self.logger.info("In whitelist mode: suppressing installation of:") <NEW_LINE> self.logger.info(["%s:%s" % (e.tag, e.get('name')) for e in w_to_rem]) <NEW_LINE> self.whitelist = [x for x in self.whitelist if x not in w_to_rem] <NEW_LINE> <DEDENT> <DEDENT> elif self.setup['decision'] == 'blacklist': <NEW_LINE> <INDENT> b_to_rem = [e for e in self.whitelist if not passes_black_list(e, self.setup['decision_list'])] <NEW_LINE> if b_to_rem: <NEW_LINE> <INDENT> self.logger.info("In blacklist mode: suppressing installation of:") <NEW_LINE> self.logger.info(["%s:%s" % (e.tag, e.get('name')) for e in b_to_rem]) <NEW_LINE> self.whitelist = [x for x in self.whitelist if x not in b_to_rem] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not self.dryrun: <NEW_LINE> <INDENT> for cfile in self.config.findall(".//Path"): <NEW_LINE> <INDENT> if (cfile.get('name') not in self.__important__ or cfile.get('type') != 'file' or cfile not in self.whitelist): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> parent = cfile.getparent() <NEW_LINE> if ((parent.tag == "Bundle" and ((self.setup['bundle'] and parent.get("name") not in self.setup['bundle']) or (self.setup['skipbundle'] and parent.get("name") in self.setup['skipbundle']))) or (parent.tag == "Independent" and (self.setup['bundle'] or self.setup['skipindep']))): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> tl = [t for t in self.tools if t.handlesEntry(cfile) and t.canVerify(cfile)] <NEW_LINE> if tl: <NEW_LINE> <INDENT> if self.setup['interactive'] and not promptFilter("Install %s: %s? (y/N):", [cfile]): <NEW_LINE> <INDENT> self.whitelist.remove(cfile) <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.states[cfile] = tl[0].InstallPath(cfile) <NEW_LINE> if self.states[cfile]: <NEW_LINE> <INDENT> tl[0].modified.append(cfile) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.error("Unexpected tool failure", exc_info=1) <NEW_LINE> <DEDENT> cfile.set('qtext', '') <NEW_LINE> if tl[0].VerifyPath(cfile, []): <NEW_LINE> <INDENT> self.whitelist.remove(cfile)
Install important entries We also process the decision mode stuff here because we want to prevent non-whitelisted/blacklisted 'important' entries from being installed prior to determining the decision mode on the client.
625941b891f36d47f21ac349
def __init__(self, other_block=-1, other_face=-1, orientation=0, filename="udf.lua", is_wall=0, sets_conv_flux=0, sets_visc_flux=0, reorient_vector_quantities=False, Rmatrix=[1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0], label=""): <NEW_LINE> <INDENT> BoundaryCondition.__init__(self, type_of_BC=ADJACENT_PLUS_UDF, other_block=other_block, other_face=other_face, orientation=orientation, filename=filename, is_wall=is_wall, sets_conv_flux=sets_conv_flux, sets_visc_flux=sets_visc_flux, reorient_vector_quantities=reorient_vector_quantities, Rmatrix=Rmatrix, label=label) <NEW_LINE> return
Construct a connecting boundary condition that also has some user-defined behaviour. :param other_block: index to an adjacent block, if any. A value of -1 will indicate that there is no adjacent block. :param other_face: index of the adjacent face of the other block, if any. :param orientation: for 3D connections the other block face can have one of 4 rotational orientations. :param filename: Name of the file containing the Lua functions. :param is_wall: Flag to indicate that various parts of the simulation code should treat this boundary as a solid wall. :param sets_conv_flux: For this boundary, the fluxes are computed directly. Typically, this relates to using a user-supplied Lus script which provides a convective_flux() function. This pretty much ignores the ghost-cell data, however, it does not relieve the user of supplying a suitable function for setting that data. :param sets_visc_flux: As for sets_conv_flux except that this relates to setting the viscous component of flux due to the effect of the boundary. :param reorient_vector_quantities: for exchange of vector quantities between adjacent boundaries :param Rmatrix: the 9 elements of the rotation matrix :param label: A string that may be used to assist in identifying the boundary in the post-processing phase of a simulation.
625941b87d43ff24873a2af6
def calculate_best_confidence(choices, metadata): <NEW_LINE> <INDENT> best_confidence = 0 <NEW_LINE> for path, line in choices: <NEW_LINE> <INDENT> confidence = calculate_confidence(path, line, metadata) <NEW_LINE> best_confidence = max(confidence, best_confidence) <NEW_LINE> <DEDENT> return best_confidence
:type choices: tuple[tuple[str, int]] :type metadata: Metadata :rtype: int
625941b8d10714528d5ffb32
def test_doc_equality(self): <NEW_LINE> <INDENT> text = "My name is Inigo Montoya." <NEW_LINE> doc1 = API.annotate(text) <NEW_LINE> doc2 = API.annotate(text) <NEW_LINE> self.assertEqual(doc1, doc2, "two .annotate calls on same text did not produce equivalent Documents") <NEW_LINE> self.assertEqual(doc1, Document.load_from_JSON(json.loads(doc2.to_JSON())), "loading JSON dumped from one Document should produce an equivalent Document")
Two calls to API.annotate using the same text should produce equivalent Documents
625941b8627d3e7fe0d68ca1
def test_Square(self): <NEW_LINE> <INDENT> s1 = Square(1) <NEW_LINE> self.assertTrue(isinstance(s1, Base))
Check if is instance
625941b8baa26c4b54cb0f76
def define_xml_path(path, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with salt.utils.files.fopen(path, 'r') as fp_: <NEW_LINE> <INDENT> return define_xml_str( salt.utils.stringutils.to_unicode(fp_.read()), **kwargs ) <NEW_LINE> <DEDENT> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> return False
Define a persistent domain based on the XML-file path passed to the function :param path: path to a file containing the libvirt XML definition of the domain :param connection: libvirt connection URI, overriding defaults .. versionadded:: Fluorine :param username: username to connect with, overriding defaults .. versionadded:: Fluorine :param password: password to connect with, overriding defaults .. versionadded:: Fluorine CLI Example: .. code-block:: bash salt '*' virt.define_xml_path <path to XML file on the node>
625941b87c178a314d6ef2ac
def resetStopOnReset(self, software_reset = None): <NEW_LINE> <INDENT> logging.debug("reset stop on Reset") <NEW_LINE> self.halt() <NEW_LINE> demcr = self.readMemory(DEMCR) <NEW_LINE> self.writeMemory(DEMCR, demcr | VC_CORERESET) <NEW_LINE> self.reset(software_reset) <NEW_LINE> while (self.getState() == TARGET_RUNNING): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.writeMemory(DEMCR, demcr)
perform a reset and stop the core on the reset handler
625941b88c3a873295158211
def fib_gen(): <NEW_LINE> <INDENT> first_no = 0 <NEW_LINE> second_no = 1 <NEW_LINE> yield first_no <NEW_LINE> while True: <NEW_LINE> <INDENT> yield second_no <NEW_LINE> second_no, first_no = second_no + first_no, second_no
Method: Generator Yield statement is immediately recognized by the computer and it treats this code as a Generator. One cannot have return and yield statements in the same function. .next() in the for loop is another special call that the generator recognizes :return:
625941b826068e7796caeb2c
def set_thread_by_index(self, thread_id): <NEW_LINE> <INDENT> result = self.debugger.communicator.send( "-thread-select {0}".format(thread_id)).is_success() <NEW_LINE> if result: <NEW_LINE> <INDENT> self.debugger.on_thread_changed.notify( self.get_thread_info().selected_thread) <NEW_LINE> self.debugger.on_frame_changed.notify(self.get_current_frame()) <NEW_LINE> util.Logger.debug("Changed to thread with id {0}".format( thread_id)) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
@type thread_id: int @rtype: bool
625941b8dc8b845886cb5388
def serialize(self, view): <NEW_LINE> <INDENT> return collections.OrderedDict([ ('id', self.id), ('flavor_id', self.flavor_id), ('chassis_model_id', self.chassis_model_id), ('schedule_priority', self.schedule_priority), ('deleted', self.deleted) ])
Turn a FlavorProvider into a dict.
625941b8fff4ab517eb2f28d
@app.get('/form') <NEW_LINE> def form(request: Request, csrf_protect:CsrfProtect = Depends()): <NEW_LINE> <INDENT> csrf_token = csrf_protect.generate_csrf() <NEW_LINE> response = templates.TemplateResponse('form.html', { 'request': request, 'csrf_token': csrf_token }) <NEW_LINE> return response
Returns form template.
625941b8d6c5a10208143e9a
def queryset(self, request, queryset): <NEW_LINE> <INDENT> field_key = 'class_day' <NEW_LINE> if queryset.model == Registration: <NEW_LINE> <INDENT> field_key = 'classoffer__' + field_key <NEW_LINE> <DEDENT> return queryset.filter(**{field_key: self.value()}) if self.value() else queryset
Returns the filtered queryset based on value provided in the query string, retrievable via self.value().
625941b8442bda511e8be279
def build_params_from_flags(): <NEW_LINE> <INDENT> FLAGS = flags.FLAGS <NEW_LINE> if FLAGS.task_mode == 'evals': <NEW_LINE> <INDENT> assert not FLAGS.reset_output_dir, '`eval` tasks cannot `reset_output_dir`' <NEW_LINE> <DEDENT> output_dir = FLAGS.output_dir <NEW_LINE> logging.info(f'Checkpoints are at: {output_dir}') <NEW_LINE> if not gfile.IsDirectory(output_dir): <NEW_LINE> <INDENT> logging.info(f'Path `{output_dir}` does not exist. Creating') <NEW_LINE> gfile.MakeDirs(output_dir) <NEW_LINE> <DEDENT> elif FLAGS.reset_output_dir: <NEW_LINE> <INDENT> logging.info(f'Path `{output_dir}` exists. Removing') <NEW_LINE> gfile.DeleteRecursively(output_dir) <NEW_LINE> gfile.MakeDirs(output_dir) <NEW_LINE> <DEDENT> global _flags <NEW_LINE> params = HParams( inf=float('inf'), output_dir=output_dir, ) <NEW_LINE> for flag_name in _flags: <NEW_LINE> <INDENT> flag_value = getattr(FLAGS, flag_name) <NEW_LINE> if flag_name not in params: <NEW_LINE> <INDENT> params.add_hparam(flag_name, flag_value) <NEW_LINE> <DEDENT> <DEDENT> _deduce_num_classes(params) <NEW_LINE> pretty_print_params = params.to_json(indent=2) <NEW_LINE> logging.info(pretty_print_params) <NEW_LINE> if params.task_mode not in ['inference', 'evals', 'eval_forever']: <NEW_LINE> <INDENT> params_filename = os.path.join(params.output_dir, 'hparams.json') <NEW_LINE> if not gfile.Exists(params_filename): <NEW_LINE> <INDENT> with gfile.GFile(params_filename, 'w') as fout: <NEW_LINE> <INDENT> fout.write(pretty_print_params) <NEW_LINE> fout.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return params
Build and return a `tf.HParams` object.
625941b830c21e258bdfa2f0
def _recover_auth_meta(self, auth_id, auth_meta): <NEW_LINE> <INDENT> remove_subvolumes = [] <NEW_LINE> for subvol, subvol_data in auth_meta['subvolumes'].items(): <NEW_LINE> <INDENT> if not subvol_data['dirty']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> (group_name, subvol_name) = subvol.split('/') <NEW_LINE> group_name = group_name if group_name != 'None' else Group.NO_GROUP_NAME <NEW_LINE> access_level = subvol_data['access_level'] <NEW_LINE> with self.auth_mdata_mgr.subvol_metadata_lock(group_name, subvol_name): <NEW_LINE> <INDENT> subvol_meta = self.auth_mdata_mgr.subvol_metadata_get(group_name, subvol_name) <NEW_LINE> if not subvol_meta or auth_id not in subvol_meta['auths']: <NEW_LINE> <INDENT> remove_subvolumes.append(subvol) <NEW_LINE> continue <NEW_LINE> <DEDENT> want_auth = { 'access_level': access_level, 'dirty': False, } <NEW_LINE> if subvol_meta['auths'][auth_id] == want_auth: <NEW_LINE> <INDENT> auth_meta['subvolumes'][subvol]['dirty'] = False <NEW_LINE> self.auth_mdata_mgr.auth_metadata_set(auth_id, auth_meta) <NEW_LINE> continue <NEW_LINE> <DEDENT> client_entity = "client.{0}".format(auth_id) <NEW_LINE> ret, out, err = self.mgr.mon_command( { 'prefix': 'auth get', 'entity': client_entity, 'format': 'json' }) <NEW_LINE> if ret == 0: <NEW_LINE> <INDENT> existing_caps = json.loads(out) <NEW_LINE> <DEDENT> elif ret == -errno.ENOENT: <NEW_LINE> <INDENT> existing_caps = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.error(err) <NEW_LINE> raise VolumeException(ret, err) <NEW_LINE> <DEDENT> self._authorize_subvolume(auth_id, access_level, existing_caps) <NEW_LINE> <DEDENT> auth_meta['subvolumes'][subvol]['dirty'] = False <NEW_LINE> self.auth_mdata_mgr.auth_metadata_set(auth_id, auth_meta) <NEW_LINE> <DEDENT> for subvol in remove_subvolumes: <NEW_LINE> <INDENT> del auth_meta['subvolumes'][subvol] <NEW_LINE> <DEDENT> if not auth_meta['subvolumes']: <NEW_LINE> <INDENT> self.fs.unlink(self.auth_mdata_mgr._auth_metadata_path(auth_id)) <NEW_LINE> return <NEW_LINE> <DEDENT> auth_meta['dirty'] = False <NEW_LINE> self.auth_mdata_mgr.auth_metadata_set(auth_id, auth_meta)
Call me after locking the auth meta file.
625941b8de87d2750b85fbe1
def load_pkl( file_name ) : <NEW_LINE> <INDENT> with open( file_name, "r" ) as f : <NEW_LINE> <INDENT> return pickle.load( f )
Load a python object back from the pickle file.
625941b80fa83653e4656e10
def find_kth_node_from_end(self, k): <NEW_LINE> <INDENT> node1 = self.head <NEW_LINE> node2 = self.head <NEW_LINE> i = 0 <NEW_LINE> while i < k: <NEW_LINE> <INDENT> node1 = node1.next <NEW_LINE> i += 1 <NEW_LINE> if node1.next is None: <NEW_LINE> <INDENT> print('%d is larger than length of LinkedList '%k) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if node1.next is not None: <NEW_LINE> <INDENT> while node1 is not None: <NEW_LINE> <INDENT> node1 = node1.next <NEW_LINE> node2 = node2.next <NEW_LINE> <DEDENT> print('value of kth node from end is %d'%node2.value) <NEW_LINE> return node2.value
Given a linked list, find 'n'th node from the end for a given value of n (n > 0) idea: make 1 pointer move k nodes and then move both 1 node complexity is O(n) and extra space used is O(1). http://www.ideserve.co.in/learn/find-nth-node-from-the-end-of-linked-list
625941b899fddb7c1c9de1e6
def abort(self, jobid, reason, code): <NEW_LINE> <INDENT> self.execute('UPDATE jobs SET abort_code = ?, aborted_by = ?, abort_time = ? WHERE jobid = ?', (code, reason, _now_ts(), jobid))
codes: 0 - error during submission 1 - error with parent 2 - got killed by SGE/job scheduler
625941b876d4e153a657e983
def main(): <NEW_LINE> <INDENT> opt = docopt(__doc__, options_first=True) <NEW_LINE> if opt['--day'] is None: <NEW_LINE> <INDENT> thisday = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> thisday = int(opt['--day']) <NEW_LINE> <DEDENT> jbt.get_background(float(opt['<ra>']), float(opt['<dec>']), float(opt['<wavelength>']), thresh=float(opt['--thresh']), thisday=thisday, showsubbkgs=opt['--showsubbkgs'], background_file=opt['--background_file'], bathtub_file=opt['--bathtub_file'])
Main CLI entrypoint.
625941b8e64d504609d74694
def _mapped_to_this_conductor(self, node_uuid, driver): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ring = self.ring_manager[driver] <NEW_LINE> <DEDENT> except exception.DriverNotFound: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.host in ring.get_nodes( node_uuid.encode('utf-8'), replicas=CONF.hash_distribution_replicas)
Check that node is mapped to this conductor. Note that because mappings are eventually consistent, it is possible for two conductors to simultaneously believe that a node is mapped to them. Any operation that depends on exclusive control of a node should take out a lock.
625941b80c0af96317bb803d
def Dispose(self): <NEW_LINE> <INDENT> pass
Dispose(self: FamilyTypeSetIterator,A_0: bool)
625941b8091ae35668666db9
def _upsample_add(self, x, y): <NEW_LINE> <INDENT> _, _, H, W = y.size() <NEW_LINE> return F.upsample(x, size=(H, W), mode='bilinear') + y
Upsample and add two feature maps.
625941b8796e427e537b0416
def clean_internal_terms(self): <NEW_LINE> <INDENT> for term in self.connection[self.numTerms:]: <NEW_LINE> <INDENT> self.disconnect(term) <NEW_LINE> <DEDENT> self.connection = self.connection[:self.numTerms] <NEW_LINE> self.localReference = 0
Disconnect any internal terms, Normally used before calling process_params() for a second time or when an element is removed from circuit.
625941b8d18da76e23532325
def test_get_history_dicts(self): <NEW_LINE> <INDENT> self.resource_monitor.check_resources() <NEW_LINE> cpu_dict = self.resource_monitor.get_cpu_history_dict() <NEW_LINE> self.assertIsInstance(cpu_dict, list) <NEW_LINE> memory_dict = self.resource_monitor.get_memory_history_dict() <NEW_LINE> self.assertIsInstance(memory_dict, list)
Test the CPU/memory history dictionary of a resource monitor
625941b8f548e778e58cd3cf
def test_player_get_max_payoff(self): <NEW_LINE> <INDENT> game = gambit.read_game("test_games/payoff_game.nfg") <NEW_LINE> assert game.players[0].max_payoff == fractions.Fraction(10,1) <NEW_LINE> assert game.players["Player 1"].max_payoff == fractions.Fraction(10,1) <NEW_LINE> assert game.players[1].max_payoff == fractions.Fraction(8,1) <NEW_LINE> assert game.players["Player 2"].max_payoff == fractions.Fraction(8,1)
To test getting the maximum payoff for the players
625941b8e5267d203edcdaf4
def __addRows( self, aValueList ): <NEW_LINE> <INDENT> for aValue in aValueList: <NEW_LINE> <INDENT> anIter = self.theListStore.append( ) <NEW_LINE> self.theListStore.set_value ( anIter, 0 , aValue[0] ) <NEW_LINE> self.theListStore.set_value ( anIter, 1 , aValue[1] ) <NEW_LINE> self.theListStore.set_value ( anIter, 2 , aValue[2] ) <NEW_LINE> self.theListStore.set_value ( anIter, 3 , aValue[3] ) <NEW_LINE> self.theListStore.set_value ( anIter, 4 , aValue[4] ) <NEW_LINE> self.theListStore.set_value ( anIter, 5 , aValue[5] ) <NEW_LINE> self.theListStore.set_value ( anIter, 6 , aValue[6] ) <NEW_LINE> self.theListStore.set_value ( anIter, 7 , aValue[7] ) <NEW_LINE> self.theListStore.set_value ( anIter, 8 , aValue[8] )
in: list of [Name, Value, Settable, Creator Flag]
625941b8b57a9660fec336d4
def list_virtual_machine_scale_set_vm_network_interfaces( self, resource_group_name, virtual_machine_scale_set_name, virtualmachine_index, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces' <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), 'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.NetworkInterfacePaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Gets information about all network interfaces in a virtual machine in a virtual machine scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the virtual machine scale set. :type virtual_machine_scale_set_name: str :param virtualmachine_index: The virtual machine index. :type virtualmachine_index: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of NetworkInterface :rtype: ~azure.mgmt.network.v2016_09_01.models.NetworkInterfacePaged[~azure.mgmt.network.v2016_09_01.models.NetworkInterface] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
625941b85166f23b2e1a4fad
def add_tweet_enqueue_reply(output_obj, parent): <NEW_LINE> <INDENT> reply_queue = dataqueue.DataQueue('tweet-reply') <NEW_LINE> link = settings.WEB_LINK + '/?lat=%s&lng=%s&o=t' % ( output_obj['lat'], output_obj['lon'] ) <NEW_LINE> reply_data = { 'in_reply_to_status_id': output_obj['id'], 'screen_name': output_obj['screen_name'], 'link': link, 'status': "you're on the map! %s" % link, 'parent': parent, } <NEW_LINE> reply_queue.add(reply_data, output_obj['id'])
Enqueue the reply now we've worked out where it's for.
625941b8b7558d58953c4d6f
def _execute_single_test(self, config, failure_handler, naarad_obj, test): <NEW_LINE> <INDENT> if not failure_handler.get_abort_status(): <NEW_LINE> <INDENT> test.result = constants.SKIPPED <NEW_LINE> test.message += error_messages.TEST_ABORT <NEW_LINE> logger.debug("Skipping" + test.name + "due to too many setup/teardown failures") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setup_fail = False <NEW_LINE> if not self.master_config.mapping.get("no-perf", False): <NEW_LINE> <INDENT> test.naarad_config = self.dynamic_config_module.naarad_config(config.mapping, test_name=test.name) <NEW_LINE> test.naarad_id = naarad_obj.signal_start(test.naarad_config) <NEW_LINE> <DEDENT> test.start_time = time.time() <NEW_LINE> logger.debug("Setting up test: " + test.name) <NEW_LINE> try: <NEW_LINE> <INDENT> if hasattr(self.deployment_module, 'setup'): <NEW_LINE> <INDENT> self.deployment_module.setup() <NEW_LINE> <DEDENT> <DEDENT> except BaseException: <NEW_LINE> <INDENT> test.result = constants.SKIPPED <NEW_LINE> test.message += error_messages.SETUP_FAILED + traceback.format_exc() <NEW_LINE> setup_fail = True <NEW_LINE> failure_handler.notify_failure() <NEW_LINE> logger.debug("Aborting {0} due to setup failure:\n{1}".format(test.name, traceback.format_exc())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug("Executing test: " + test.name) <NEW_LINE> while (test.current_iteration < test.total_number_iterations): <NEW_LINE> <INDENT> test.current_iteration = test.current_iteration + 1 <NEW_LINE> if ( (test.result != constants.FAILED) or (runtime.get_active_config("consecutive_failures_per_test",0) > test.consecutive_failures) ): <NEW_LINE> <INDENT> self._run_and_verify_test(test) <NEW_LINE> <DEDENT> if ((test.current_iteration % (test.total_number_iterations/int(runtime.get_active_config("loop_all_tests",1))))== 0): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logger.debug("Tearing down test: " + test.name) <NEW_LINE> try: <NEW_LINE> <INDENT> if hasattr(self.deployment_module, 'teardown'): <NEW_LINE> <INDENT> self.deployment_module.teardown() <NEW_LINE> <DEDENT> if not setup_fail: <NEW_LINE> <INDENT> failure_handler.notify_success() <NEW_LINE> <DEDENT> <DEDENT> except BaseException: <NEW_LINE> <INDENT> test.message += error_messages.TEARDOWN_FAILED + traceback.format_exc() <NEW_LINE> if not setup_fail: <NEW_LINE> <INDENT> failure_handler.notify_failure() <NEW_LINE> <DEDENT> logger.debug(test.name + "failed teardown():\n{0}".format(traceback.format_exc())) <NEW_LINE> <DEDENT> test.end_time = time.time() <NEW_LINE> if not self.master_config.mapping.get("no-display", False): <NEW_LINE> <INDENT> naarad_obj.signal_stop(test.naarad_id) <NEW_LINE> <DEDENT> logger.debug("Execution of test: " + test.name + " complete")
Evaluates a single test :param config: :param failure_handler: :param naarad_obj: :param test: :return:
625941b863d6d428bbe44343
def main(): <NEW_LINE> <INDENT> new_products_inventory_df = run_site_crawl(BRANDS_PAGE_LINK) <NEW_LINE> save_data_to_database(new_products_inventory_df) <NEW_LINE> print('Total run time:', datetime.now() - startTime) <NEW_LINE> return
Main Function 1. Runs site crawl 2. Saves crawled data to the database
625941b87cff6e4e811177da
def test_gathernd(): <NEW_LINE> <INDENT> a = np.arange(12).reshape((2,2,3)) <NEW_LINE> print(a) <NEW_LINE> indice0 = [[[1],[0]]] <NEW_LINE> indice1 = [[[0,0], [1,1]]] <NEW_LINE> indice2 = [[[0,0,0], [1,1,2]]] <NEW_LINE> v0 = tf.gather_nd(a, indice0) <NEW_LINE> v1 = tf.gather_nd(a, indice1) <NEW_LINE> v2 = tf.gather_nd(a, indice2) <NEW_LINE> test_variable(v0) <NEW_LINE> test_variable(v1) <NEW_LINE> test_variable(v2)
tf.gather_nd()函数使用indice参数的前N-1维与之前的tf.gather的意义相同,第N维上的内容即为在param数组中 的索引,如果是一个元素则由N-1维的tensor的该位置将填充param[",".join(第N维的内容)] :return:
625941b850812a4eaa59c179
def create_recurring_invoice(self, product): <NEW_LINE> <INDENT> product['is_recurring'] = True <NEW_LINE> product['client_id'] = self.client['data']['id'] <NEW_LINE> product['auto_bill'] = True <NEW_LINE> today = datetime.datetime.now().date() <NEW_LINE> product['start_date'] = today.isoformat() <NEW_LINE> if product['recurring'] == 'monthly': <NEW_LINE> <INDENT> end_date = (today + datetime.timedelta(days=365)).isoformat() <NEW_LINE> end_date = (today + relativedelta(months=1)).isoformat() <NEW_LINE> product['end_date'] = end_date <NEW_LINE> product['frequency_id'] = self.get_frequency_id('monthly') <NEW_LINE> <DEDENT> if product['recurring'] == 'annually': <NEW_LINE> <INDENT> end_date = (today + relativedelta(years=1)).isoformat() <NEW_LINE> product['end_date'] = end_date <NEW_LINE> product['frequency_id'] = self.get_frequency_id('annually') <NEW_LINE> <DEDENT> del product['recurring'] <NEW_LINE> res = requests.post(self.url + 'invoices?include=invitations', json=product, headers=self.headers) <NEW_LINE> if res.status_code == 200: <NEW_LINE> <INDENT> self.invoice = res.json() <NEW_LINE> return self.invoice <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return res.json()
Create a recurring invoice for a client.
625941b84428ac0f6e5ba646
def unload(self): <NEW_LINE> <INDENT> for action in self.actions: <NEW_LINE> <INDENT> self.iface.removePluginMenu( self.tr(u"&Jacquez's Q Visualization"), action) <NEW_LINE> self.iface.removeToolBarIcon(action) <NEW_LINE> <DEDENT> del self.toolbar
Removes the plugin menu item and icon from QGIS GUI.
625941b897e22403b379cdec
def _set_state(self,inport): <NEW_LINE> <INDENT> self.logger.error( "_set_state: Switch method should be replaced by subclass") <NEW_LINE> self.state = inport <NEW_LINE> return self.state
Stub for real device method
625941b8283ffb24f3c55760
def _schedule_update_to_recheck_turn_off_sensor(self): <NEW_LINE> <INDENT> if not self.is_on: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not self.hass: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @callback <NEW_LINE> def _scheduled_update(now): <NEW_LINE> <INDENT> self._check_for_off_update_listener = None <NEW_LINE> self._update_from_data() <NEW_LINE> if not self.is_on: <NEW_LINE> <INDENT> self.async_write_ha_state() <NEW_LINE> <DEDENT> <DEDENT> self._check_for_off_update_listener = async_call_later( self.hass, TIME_TO_RECHECK_DETECTION.total_seconds(), _scheduled_update )
Schedule an update to recheck the sensor to see if it is ready to turn off.
625941b88a43f66fc4b53ebd
def get_calib_data(self, channel): <NEW_LINE> <INDENT> slope = float(self.send_message( "?AI{{{0}}}:SLOPE".format(channel)).split('=')[1]) <NEW_LINE> offset = float(self.send_message( "?AI{{{0}}}:OFFSET".format(channel)).split('=')[1]) <NEW_LINE> return slope, offset
Query the calibration parameters slope and offset for a given channel. The returned values are only valid for the currently selected voltage range. :param channel: the analog input channel to calibrate
625941b897e22403b379cded
def save_all(db: DB, settings: ty.Iterable[model.Setting]) -> None: <NEW_LINE> <INDENT> rows = [setting.to_row() for setting in settings] <NEW_LINE> with db.cursor() as cur: <NEW_LINE> <INDENT> cur.executemany( "DELETE FROM user_settings WHERE key=%s AND user_id=%s", [(setting.key, setting.user_id) for setting in settings], ) <NEW_LINE> cur.executemany(_INSERT_SQL, rows)
Save all settings
625941b83346ee7daa2b2bbd
def colorize(self, deg): <NEW_LINE> <INDENT> h, l, s = self.tohls() <NEW_LINE> h = clamp(deg * HUE_SCALE, 0.0, 1.0) <NEW_LINE> self.fromhls(h, l, s)
Colorize the color with the given hue.
625941b855399d3f05588507
def execute_function(self, func): <NEW_LINE> <INDENT> logging.info('Importing the function: %s' % func) <NEW_LINE> with h5py.File(self.filename, 'a') as h5_file: <NEW_LINE> <INDENT> input_cubes = [] <NEW_LINE> for input_cube_name in func.input_cube_names: <NEW_LINE> <INDENT> input_cubes.append(h5_file[input_cube_name]) <NEW_LINE> <DEDENT> logging.info('Executing the function: %s' % func) <NEW_LINE> func.__call__(input_cubes, func.output_cube_names, func.params) <NEW_LINE> sdcubes = load_attribute(h5_file, 'sdcubes') <NEW_LINE> for output_cube_name in func.output_cube_names: <NEW_LINE> <INDENT> sdcubes[output_cube_name] = self.filename <NEW_LINE> logging.info('Set the dirty status for the output datasets') <NEW_LINE> h5_file[output_cube_name].attrs['dirty'] = True <NEW_LINE> <DEDENT> store_attribute(h5_file, 'sdcubes', sdcubes)
Executes a function and set the output dataset status to dirty.
625941b899cbb53fe6792a3b
def collate(batch): <NEW_LINE> <INDENT> elem = batch[0] <NEW_LINE> elem_type = type(elem) <NEW_LINE> if isinstance(elem, torch.Tensor): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> out = None <NEW_LINE> if torch.utils.data.get_worker_info() is not None: <NEW_LINE> <INDENT> numel = sum([x.numel() for x in batch]) <NEW_LINE> storage = elem.storage()._new_shared(numel) <NEW_LINE> out = elem.new(storage) <NEW_LINE> <DEDENT> return torch.stack(batch, 0, out=out) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return torch.nn.utils.rnn.pad_sequence(batch, batch_first=True) <NEW_LINE> <DEDENT> <DEDENT> elif elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' and elem_type.__name__ != 'string_': <NEW_LINE> <INDENT> elem = batch[0] <NEW_LINE> if elem_type.__name__ == 'ndarray': <NEW_LINE> <INDENT> if np_str_obj_array_pattern.search(elem.dtype.str) is not None: <NEW_LINE> <INDENT> raise TypeError(default_collate_err_msg_format.format(elem.dtype)) <NEW_LINE> <DEDENT> return collate([torch.as_tensor(b) for b in batch]) <NEW_LINE> <DEDENT> elif elem.shape == (): <NEW_LINE> <INDENT> return torch.as_tensor(batch) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(elem, float): <NEW_LINE> <INDENT> return torch.tensor(batch, dtype=torch.float64) <NEW_LINE> <DEDENT> elif isinstance(elem, int_classes): <NEW_LINE> <INDENT> return torch.tensor(batch) <NEW_LINE> <DEDENT> elif isinstance(elem, string_classes): <NEW_LINE> <INDENT> return batch <NEW_LINE> <DEDENT> elif isinstance(elem, container_abcs.Mapping): <NEW_LINE> <INDENT> return {key: collate([d[key] for d in batch]) for key in elem} <NEW_LINE> <DEDENT> elif isinstance(elem, tuple) and hasattr(elem, '_fields'): <NEW_LINE> <INDENT> return elem_type(*(collate(samples) for samples in zip(*batch))) <NEW_LINE> <DEDENT> elif isinstance(elem, container_abcs.Sequence): <NEW_LINE> <INDENT> batch = [torch.stack(it) for it in batch] <NEW_LINE> elem_sizes = [it.shape for it in batch] <NEW_LINE> max_sizes = (max(sizes) for sizes in zip(*elem_sizes)) <NEW_LINE> batched = torch.zeros(len(batch), *max_sizes, dtype=batch[0].dtype) <NEW_LINE> for idx, (elem, elem_size) in enumerate(zip(batch, elem_sizes)): <NEW_LINE> <INDENT> size_1, size_2 = elem_size <NEW_LINE> batched[idx, :size_1, :size_2] = elem <NEW_LINE> <DEDENT> return batched <NEW_LINE> <DEDENT> raise TypeError(default_collate_err_msg_format.format(elem_type))
Puts each data field into a tensor with outer dimension batch size
625941b876e4537e8c3514cb
def setup_class(cls): <NEW_LINE> <INDENT> Data.shared = Data.testsuites[__class__.__name__]['shared_data'] <NEW_LINE> Data.cfm2utilapi.device.disconnect() <NEW_LINE> time.sleep(2) <NEW_LINE> Data.cfm2masterutilapi.zeroize_hsm(**Data.shared) <NEW_LINE> Data.cfm2masterutilapi.login_hsm_default_co(**Data.shared) <NEW_LINE> Data.cfm2masterutilapi.init_hsm1(**Data.shared) <NEW_LINE> Data.cfm2masterutilapi.login_hsm(**Data.shared) <NEW_LINE> Data.cfm2masterutilapi.create_partition2(**Data.shared) <NEW_LINE> Data.cfm2utilapi.device.connect() <NEW_LINE> time.sleep(2) <NEW_LINE> Data.cfm2utilapi.get_hsm_info() <NEW_LINE> Data.cfm2utilapi.zeroize_hsm() <NEW_LINE> Data.cfm2utilapi.login_default() <NEW_LINE> Data.cfm2utilapi.init_hsm4(**Data.shared) <NEW_LINE> Data.cfm2utilapi.login_hsm(**Data.shared) <NEW_LINE> Data.cfm2utilapi.create_user(**Data.shared) <NEW_LINE> cu_username = ['crypto_user%s '%str(i) for i in range(1, 11)] <NEW_LINE> for crypto_user in cu_username: <NEW_LINE> <INDENT> Data.shared["create_user"].update({'username': crypto_user}) <NEW_LINE> Data.cfm2utilapi.create_user(**Data.shared) <NEW_LINE> <DEDENT> part_name = Data.shared['partition_name'] <NEW_LINE> dev = connect_session(device=Data.cfm2utilapi, part_name=part_name) <NEW_LINE> dev.get_hsm_info() <NEW_LINE> dev.zeroize_hsm() <NEW_LINE> dev.login_default() <NEW_LINE> dev.init_hsm4(**Data.shared) <NEW_LINE> dev.login_hsm(**Data.shared) <NEW_LINE> Data.shared["create_user"].update({'username': 'crypto_user'}) <NEW_LINE> dev.create_user(**Data.shared) <NEW_LINE> cu_username = ['crypto_user%s '%str(i) for i in range(1, 8)] <NEW_LINE> for crypto_user in cu_username: <NEW_LINE> <INDENT> Data.shared["create_user"].update({'username': crypto_user}) <NEW_LINE> dev.create_user(**Data.shared)
This method sets the FIPS state of the HSM to 2. Create and Initialize Partitions. Create crypto_users.
625941b8046cf37aa974cb9f
def get_token(self): <NEW_LINE> <INDENT> url = base_url + "token/" <NEW_LINE> headers = {"Content-Type" : "application/json"} <NEW_LINE> obj = { "username" : self.username, "password" : self.password } <NEW_LINE> data = json.dumps(obj).encode("utf-8") <NEW_LINE> response = requests.post(url, data, headers=headers) <NEW_LINE> token = response.json()['token'] <NEW_LINE> return token
CALL /token
625941b82c8b7c6e89b35618
def model_dataset(input_dataset, output_types, output_shapes, algorithm=0, cpu_budget=0, name=None): <NEW_LINE> <INDENT> _ctx = _context._context or _context.context() <NEW_LINE> tld = _ctx._thread_local_data <NEW_LINE> if tld.is_eager: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _result = pywrap_tfe.TFE_Py_FastPathExecute( _ctx._context_handle, tld.device_name, "ModelDataset", name, tld.op_callbacks, input_dataset, "algorithm", algorithm, "cpu_budget", cpu_budget, "output_types", output_types, "output_shapes", output_shapes) <NEW_LINE> return _result <NEW_LINE> <DEDENT> except _core._NotOkStatusException as e: <NEW_LINE> <INDENT> _ops.raise_from_not_ok_status(e, name) <NEW_LINE> <DEDENT> except _core._FallbackException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return model_dataset_eager_fallback( input_dataset, algorithm=algorithm, cpu_budget=cpu_budget, output_types=output_types, output_shapes=output_shapes, name=name, ctx=_ctx) <NEW_LINE> <DEDENT> except _core._SymbolicException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if not isinstance(output_types, (list, tuple)): <NEW_LINE> <INDENT> raise TypeError( "Expected list for 'output_types' argument to " "'model_dataset' Op, not %r." % output_types) <NEW_LINE> <DEDENT> output_types = [_execute.make_type(_t, "output_types") for _t in output_types] <NEW_LINE> if not isinstance(output_shapes, (list, tuple)): <NEW_LINE> <INDENT> raise TypeError( "Expected list for 'output_shapes' argument to " "'model_dataset' Op, not %r." % output_shapes) <NEW_LINE> <DEDENT> output_shapes = [_execute.make_shape(_s, "output_shapes") for _s in output_shapes] <NEW_LINE> if algorithm is None: <NEW_LINE> <INDENT> algorithm = 0 <NEW_LINE> <DEDENT> algorithm = _execute.make_int(algorithm, "algorithm") <NEW_LINE> if cpu_budget is None: <NEW_LINE> <INDENT> cpu_budget = 0 <NEW_LINE> <DEDENT> cpu_budget = _execute.make_int(cpu_budget, "cpu_budget") <NEW_LINE> _, _, _op, _outputs = _op_def_library._apply_op_helper( "ModelDataset", input_dataset=input_dataset, output_types=output_types, output_shapes=output_shapes, algorithm=algorithm, cpu_budget=cpu_budget, name=name) <NEW_LINE> _result = _outputs[:] <NEW_LINE> if _execute.must_record_gradient(): <NEW_LINE> <INDENT> _attrs = ("algorithm", _op._get_attr_int("algorithm"), "cpu_budget", _op._get_attr_int("cpu_budget"), "output_types", _op.get_attr("output_types"), "output_shapes", _op.get_attr("output_shapes")) <NEW_LINE> _inputs_flat = _op.inputs <NEW_LINE> _execute.record_gradient( "ModelDataset", _inputs_flat, _attrs, _result) <NEW_LINE> <DEDENT> _result, = _result <NEW_LINE> return _result
Identity transformation that models performance. Identity transformation that models performance. Args: input_dataset: A `Tensor` of type `variant`. A variant tensor representing the input dataset. output_types: A list of `tf.DTypes` that has length `>= 1`. output_shapes: A list of shapes (each a `tf.TensorShape` or list of `ints`) that has length `>= 1`. algorithm: An optional `int`. Defaults to `0`. cpu_budget: An optional `int`. Defaults to `0`. name: A name for the operation (optional). Returns: A `Tensor` of type `variant`.
625941b8d58c6744b4257ab5
def getVelocity(r, Xi, ws): <NEW_LINE> <INDENT> nw = len(ws) <NEW_LINE> dr = np.zeros([3,nw], dtype=complex) <NEW_LINE> v = np.zeros([3,nw], dtype=complex) <NEW_LINE> a = np.zeros([3,nw], dtype=complex) <NEW_LINE> for i in range(nw): <NEW_LINE> <INDENT> dr[:,i] = Xi[:3,i] + SmallRotate(r, Xi[3:,i]) <NEW_LINE> v[ :,i] = 1j*ws[i]*dr[:,i] <NEW_LINE> a[ :,i] = 1j*ws[i]*v[ :,i] <NEW_LINE> <DEDENT> return dr, v, a
Get node complex velocity spectrum based on platform motion's and relative position from PRP
625941b8460517430c393fe2
def get_initial(self): <NEW_LINE> <INDENT> initial_args = self.initial.copy() <NEW_LINE> if 'username' in self.kwargs: <NEW_LINE> <INDENT> initial_args['owed_username'] = self.kwargs.get('username') <NEW_LINE> <DEDENT> initial_args['amount'] = 1 <NEW_LINE> return initial_args
Used to get the username if the form is going to be paid
625941b8a17c0f6771cbdea8
def decrypt_message(self): <NEW_LINE> <INDENT> testdict = {} <NEW_LINE> for s in range(27): <NEW_LINE> <INDENT> wordCount = 0 <NEW_LINE> decrypted = Message.apply_shift(self, s) <NEW_LINE> for word in decrypted.split(' '): <NEW_LINE> <INDENT> if is_word(self.valid_words, word): <NEW_LINE> <INDENT> wordCount += 1 <NEW_LINE> <DEDENT> <DEDENT> testdict[s] = wordCount <NEW_LINE> <DEDENT> bestVal = max(testdict, key=testdict.get) <NEW_LINE> answer = (bestVal, Message.apply_shift(self,bestVal)) <NEW_LINE> return answer
Decrypt self.message_text by trying every possible shift value and find the "best" one. We will define "best" as the shift that creates the maximum number of real words when we use apply_shift(shift) on the message text. If s is the original shift value used to encrypt the message, then we would expect 26 - s to be the best shift value for decrypting it. Note: if multiple shifts are equally good such that they all create the maximum number of you may choose any of those shifts (and their corresponding decrypted messages) to return Returns: a tuple of the best shift value used to decrypt the message and the decrypted message text using that shift value
625941b88e7ae83300e4ae20
def cmd_line_parse(iargs=None): <NEW_LINE> <INDENT> parser = create_parser() <NEW_LINE> inps = parser.parse_args(args=iargs) <NEW_LINE> atr1 = readfile.read_attribute(inps.dis_file) <NEW_LINE> atr2 = readfile.read_attribute(inps.geom_file) <NEW_LINE> coord1 = 'geo' if 'Y_FIRST' in atr1.keys() else 'radar' <NEW_LINE> coord2 = 'geo' if 'Y_FIRST' in atr2.keys() else 'radar' <NEW_LINE> proc = atr1.get('PROCESSOR', 'isce') <NEW_LINE> if coord1 == 'radar' and proc in ['gamma', 'roipac']: <NEW_LINE> <INDENT> msg = 'Radar-coded file from {} is NOT supported!'.format(proc) <NEW_LINE> msg += '\n Try to geocode the time-series and geometry files and re-run with them instead.' <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> if coord1 != coord2: <NEW_LINE> <INDENT> n = max(len(os.path.basename(i)) for i in [inps.dis_file, inps.geom_file]) <NEW_LINE> msg = 'Input time-series and geometry file are NOT in the same coordinate!' <NEW_LINE> msg += '\n file {f:<{n}} coordinate: {c}'.format(f=os.path.basename(inps.dis_file), n=n, c=coord1) <NEW_LINE> msg += '\n file {f:<{n}} coordinate: {c}'.format(f=os.path.basename(inps.geom_file), n=n, c=coord2) <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> if not inps.set_file: <NEW_LINE> <INDENT> geom_dir = os.path.dirname(inps.geom_file) <NEW_LINE> inps.set_file = os.path.join(geom_dir, 'SET.h5') <NEW_LINE> <DEDENT> if not inps.cor_dis_file: <NEW_LINE> <INDENT> dis_dir = os.path.dirname(inps.dis_file) <NEW_LINE> fbase, fext = os.path.splitext(os.path.basename(inps.dis_file)) <NEW_LINE> inps.cor_dis_file = os.path.join(dis_dir, '{}_SET{}'.format(fbase, fext)) <NEW_LINE> <DEDENT> return inps
Command line parser.
625941b860cbc95b062c639d
def info(self, task): <NEW_LINE> <INDENT> self.print_log('_______________dataX_______________') <NEW_LINE> self.print_log('dataX Shape: %s' % str(self.dataX.shape)) <NEW_LINE> self.print_log('_______________dataY_______________') <NEW_LINE> self.print_log('dataY Shape: %s' % str(self.dataY.shape)) <NEW_LINE> self.print_log('***************The %s Finished.****************\n' % task)
Print the status, dataX.shape, dataY.shape. :param task: task name. :return:
625941b8ec188e330fd5a5fa
def is_validated_english_sentence(user_input): <NEW_LINE> <INDENT> result = True <NEW_LINE> if re.search("[0-9]+", user_input) is not None: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> elif re.search(r"[^a-zA-Z|\s|\.|,|\?|!]", user_input) is not None: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> elif user_input == "" or re.fullmatch(r"[\s|\.|,|\?|!]+", user_input) is not None: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> return result
Input: - user_input : 문자열값으로 사용자가 입력하는 문자 Output: - 입력한 값이 아래에 해당될 경우 False, 그렇지 않으면 True 1) 숫자가 포함되어 있거나, 2) _@#$%^&*()-+=[]{}"';:\|`~ 와 같은 특수문자가 포함되어 있거나 3) 문장부호(.,!?)를 제외하면 입력값이 없거나 빈칸만 입력했을 경우 Examples: >>> import morsecode as mc >>> mc.is_validated_english_sentence("Hello 123") False >>> mc.is_validated_english_sentence("Hi!") True >>> mc.is_validated_english_sentence(".!.") False >>> mc.is_validated_english_sentence("!.!") False >>> mc.is_validated_english_sentence("kkkkk... ^^;") False >>> mc.is_validated_english_sentence("This is Gachon University.") True
625941b826238365f5f0ecbe
def compute_title(src, sdate, edate): <NEW_LINE> <INDENT> if src in ["mrms", "ifc"]: <NEW_LINE> <INDENT> if sdate == edate: <NEW_LINE> <INDENT> title = sdate.strftime("%-d %B %Y") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = f"{sdate:%-d %b} to {edate:%-d %b %Y} (inclusive)" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if sdate == edate: <NEW_LINE> <INDENT> title = ( f"{(sdate - timedelta(days=1)):%-d %B %Y} ~12z to " f"{edate:%-d %B %Y} ~12z" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = ( f"{(sdate - timedelta(days=1)):%-d %B %Y} ~12z to " f"{(edate + timedelta(days=1)):%-d %B %Y} ~12z" ) <NEW_LINE> <DEDENT> <DEDENT> return title
Figure out how to label this fun.
625941b845492302aab5e114
def send_artifact(artifact, destination=None): <NEW_LINE> <INDENT> return get_experiment().log_artifact(artifact, destination)
Save an artifact (file) in experiment storage. Alias for :meth:`~neptune.experiments.Experiment.log_artifact`
625941b86aa9bd52df036bf7
def download_program(self): <NEW_LINE> <INDENT> self.request = requests.get(self.download_url) <NEW_LINE> self.request_content = self.request.content <NEW_LINE> self.file = open(self.file_name, mode="wb") <NEW_LINE> self.file.write(self.request_content) <NEW_LINE> self.file.close()
Downloads actual program executable
625941b892d797404e303fde