code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def measure_curvature_real(self, left_fitx, right_fitx, ploty): <NEW_LINE> <INDENT> ym_per_pix = YM_PER_PIX <NEW_LINE> xm_per_pix = XM_PER_PIX <NEW_LINE> y_eval = np.max(ploty) <NEW_LINE> left_fit_cr = np.polyfit(ploty * ym_per_pix, left_fitx*xm_per_pix, 2) <NEW_LINE> right_fit_cr = np.polyfit(ploty * ym_per_pix, right_fitx* xm_per_pix, 2) <NEW_LINE> left_curverad = ((1 + (2*left_fit_cr[0]*y_eval*ym_per_pix + left_fit_cr[1])**2)**1.5) / np.absolute(2*left_fit_cr[0]) <NEW_LINE> right_curverad = ((1 + (2*right_fit_cr[0]*y_eval*ym_per_pix + right_fit_cr[1])**2)**1.5) / np.absolute(2*right_fit_cr[0]) <NEW_LINE> return left_curverad, right_curverad
Calculates the curvature of polynomial functions in meters.
625941b64f88993c3716be82
def _get_snapshot_for_path(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> path.rstrip(path_separator) <NEW_LINE> snapshot = self.snapshot_for_path[path] <NEW_LINE> return (path, snapshot) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> path = get_parent_dir_path(path) <NEW_LINE> if not path: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> return self._get_snapshot_for_path(path)
The FSEvents API calls back with paths within the 'watched' directory. So get back to the root path for which we have snapshots and return the snapshot path and snapshot.
625941b6287bf620b61d387e
def logistic_regression(input_dim, output_dim): <NEW_LINE> <INDENT> tf.reset_default_graph() <NEW_LINE> x = tf.placeholder(tf.float32, [None, input_dim]) <NEW_LINE> y = tf.placeholder(tf.float32, [None, output_dim]) <NEW_LINE> learning_r = tf.placeholder(tf.float32, 1)[0] <NEW_LINE> drop_out = tf.placeholder(tf.float32, 1)[0] <NEW_LINE> w_init = tf.contrib.layers.xavier_initializer() <NEW_LINE> b_init = tf.initializers.truncated_normal(mean=0.1, stddev=0.025) <NEW_LINE> w = tf.get_variable('weights1', shape=[ input_dim, output_dim], initializer=w_init) <NEW_LINE> b = tf.get_variable('bias1', shape=[output_dim], initializer=b_init) <NEW_LINE> logits = tf.matmul(tf.nn.dropout(x, keep_prob=drop_out), w) + b <NEW_LINE> y_ = tf.nn.softmax(logits) <NEW_LINE> [print(var) for var in tf.trainable_variables()] <NEW_LINE> return x, y, logits, y_, learning_r, drop_out
Simple logistic regression Returns x and y placeholders, logits and y_ (y hat)
625941b6e5267d203edcdaaf
def log1p(x, name=None): <NEW_LINE> <INDENT> _ctx = _context.context() <NEW_LINE> if _ctx.in_graph_mode(): <NEW_LINE> <INDENT> _, _, _op = _op_def_lib._apply_op_helper( "Log1p", x=x, name=name) <NEW_LINE> _result = _op.outputs[:] <NEW_LINE> _inputs_flat = _op.inputs <NEW_LINE> _attrs = ("T", _op.get_attr("T")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _attr_T, (x,) = _execute.args_to_matching_eager([x], _ctx) <NEW_LINE> _attr_T = _attr_T.as_datatype_enum <NEW_LINE> _inputs_flat = [x] <NEW_LINE> _attrs = ("T", _attr_T) <NEW_LINE> _result = _execute.execute(b"Log1p", 1, inputs=_inputs_flat, attrs=_attrs, ctx=_ctx, name=name) <NEW_LINE> <DEDENT> _execute.record_gradient( "Log1p", _inputs_flat, _attrs, _result, name) <NEW_LINE> _result, = _result <NEW_LINE> return _result
Computes natural logarithm of (1 + x) element-wise. I.e., \\(y = \log_e (1 + x)\\). Args: x: A `Tensor`. Must be one of the following types: `half`, `float32`, `float64`, `complex64`, `complex128`. name: A name for the operation (optional). Returns: A `Tensor`. Has the same type as `x`.
625941b66aa9bd52df036bb0
def get_ref_sha1(self, ref): <NEW_LINE> <INDENT> (ret, sha1) = self.call("show-ref", "--verify", "--hash", ref, raises=False) <NEW_LINE> if ret == 0: <NEW_LINE> <INDENT> return sha1
Return the sha1 from a ref. None if not found.
625941b64d74a7450ccd3fd1
def reset( self): <NEW_LINE> <INDENT> self.candidates = []
resets the candidates for the dominant wave this should not be necessary on a rolling platform Args: None Returns: None Raises: None
625941b6f9cc0f698b140414
@app.route('/updateNames', methods=["PUT"]) <NEW_LINE> @hasRole <NEW_LINE> @login_required <NEW_LINE> @validUpdateNames <NEW_LINE> def updateNames(): <NEW_LINE> <INDENT> if request.json is None: <NEW_LINE> <INDENT> return jsonify(Error="An error has occurred. Please verify the submitted data."), 400 <NEW_LINE> <DEDENT> return UsersHandler().updateNames(request.json)
Route used to modify the name and last name of a user. :param: { "FName": "", "LName": "" } :return: A response object with a message stating that the update was successful
625941b630bbd722463cbbd0
def get_logger(logger_name: str) -> logging.Logger: <NEW_LINE> <INDENT> if logger_name not in logging.Logger.manager.loggerDict and logger_name not in logging.root.manager.loggerDict: <NEW_LINE> <INDENT> raise ValueError('Logger "'+str(logger_name)+'" not defined.') <NEW_LINE> <DEDENT> return logging.getLogger(logger_name)
Returns an already existing logger. Args: logger_name: Name of the logger to get. Returns: The logger object. Raises: ValueError: If the logger does not exist.
625941b6293b9510aa2c30a7
def fibonacci(n, d): <NEW_LINE> <INDENT> if n in d: <NEW_LINE> <INDENT> return d[n] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = fibonacci(n-2, d) + fibonacci(n-1, d) <NEW_LINE> d[n] = x <NEW_LINE> return x
input: n an int >= 0 output: nth Fibonacci number
625941b6796e427e537b03d0
def __init__( self, parameters=None, on_open_callback=None, on_open_error_callback=None, on_close_callback=None, stop_ioloop_on_close=False, custom_ioloop=None, on_signal_callback=None ): <NEW_LINE> <INDENT> self.ioloop = custom_ioloop or pyev.default_loop() <NEW_LINE> self._on_signal_callback = on_signal_callback <NEW_LINE> self._sigint_watcher = None <NEW_LINE> self._sigterm_watcher = None <NEW_LINE> self._io_watcher = None <NEW_LINE> self._active_timers = {} <NEW_LINE> self._stopped_timers = deque() <NEW_LINE> super(LibevConnection, self).__init__( parameters, on_open_callback, on_open_error_callback, on_close_callback, self.ioloop, stop_ioloop_on_close )
Create a new instance of the LibevConnection class, connecting to RabbitMQ automatically :param pika.connection.Parameters parameters: Connection parameters :param on_open_callback: The method to call when the connection is open :type on_open_callback: method :param on_open_error_callback: Method to call if the connection cant be opened :type on_open_error_callback: method :param bool stop_ioloop_on_close: Call ioloop.stop() if disconnected :param custom_ioloop: Override using the default IOLoop in libev :param on_signal_callback: Method to call if SIGINT or SIGTERM occur :type on_signal_callback: method
625941b66fb2d068a760eeaf
def certify_roman_invalid_spec_asdf_schema(): <NEW_LINE> <INDENT> pass
Required Roman test: confirm that a spectroscopic asdf file that does not conform to its schema definition triggers an error in DataModels. >>> old_state = test_config.setup(url="https://roman-crds-serverless.stsci.edu", observatory="roman", cache=test_config.CRDS_TESTING_CACHE) >>> certify.certify_file("data/roman_wfi16_grism_flat_invalid_schema.asdf", "roman_0003.pmap", observatory="roman") # doctest: +ELLIPSIS CRDS - INFO - Certifying 'data/roman_wfi16_grism_flat_invalid_schema.asdf' as 'ASDF' relative to context 'roman_0003.pmap' CRDS - ERROR - data/roman_wfi16_grism_flat_invalid_schema.asdf Validation error : mismatched tags, wanted 'tag:stsci.edu:asdf/time/time-1.1.0', got 'tag:yaml.org,2002:str' ... >>> test_config.cleanup(old_state)
625941b6462c4b4f79d1d4de
def display( self ): <NEW_LINE> <INDENT> print( self.name.center(80) ) <NEW_LINE> print() <NEW_LINE> for code, item in enumerate(self.item_list): <NEW_LINE> <INDENT> print( "({0}) {1}".format(string.ascii_lowercase[code], item.title) ) <NEW_LINE> <DEDENT> print( "(z) EXIT this program" )
Display the QuickTable menu.
625941b65166f23b2e1a4f67
def setDefault(self): <NEW_LINE> <INDENT> if self.valueList: <NEW_LINE> <INDENT> if self._default is not None: <NEW_LINE> <INDENT> self.setValue(self._default) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setValue(self.valueList[0])
reset to default value.
625941b6baa26c4b54cb0f32
def kthSmallest(self, root, k): <NEW_LINE> <INDENT> current_node = root <NEW_LINE> stack = [] <NEW_LINE> while stack or current_node: <NEW_LINE> <INDENT> if current_node: <NEW_LINE> <INDENT> stack.append(current_node) <NEW_LINE> current_node = current_node.left <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_node = stack.pop() <NEW_LINE> k -= 1 <NEW_LINE> if k == 0: <NEW_LINE> <INDENT> return current_node.val <NEW_LINE> <DEDENT> current_node = current_node.right <NEW_LINE> <DEDENT> <DEDENT> return
:type root: TreeNode :type k: int :rtype: int
625941b6bf627c535bc12fe5
def __init__(self, contract_id=None, address=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._contract_id = None <NEW_LINE> self._address = None <NEW_LINE> self.discriminator = None <NEW_LINE> if contract_id is not None: <NEW_LINE> <INDENT> self.contract_id = contract_id <NEW_LINE> <DEDENT> self.address = address
SmartBchContractResponse - a model defined in OpenAPI
625941b67cff6e4e81117795
def problem_5(): <NEW_LINE> <INDENT> num_divisors = [] <NEW_LINE> for divisor in range(2, 20 + 1): <NEW_LINE> <INDENT> prime_factors = list_prime_factors(divisor) <NEW_LINE> unique_factors = set(prime_factors) <NEW_LINE> for factor in unique_factors: <NEW_LINE> <INDENT> factor_count_diff = prime_factors.count( factor) - num_divisors.count(factor) <NEW_LINE> if (factor_count_diff > 0): <NEW_LINE> <INDENT> num_divisors.extend([factor] * factor_count_diff) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> num = reduce((lambda x, y: x * y), num_divisors) <NEW_LINE> return num
What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20?
625941b6d53ae8145f87a085
def append(self,data): <NEW_LINE> <INDENT> assert len(data) == 3 <NEW_LINE> with open(self.file,'a') as f_obj: <NEW_LINE> <INDENT> new_user_id = max([int(self.peek()[usr][2]) for usr in self.peek()]) <NEW_LINE> f_obj.write(','.join([data[0],hexmd5(data[1]),str(data[2]),str(new_user_id+1),'\n'])) <NEW_LINE> f_obj.close() <NEW_LINE> <DEDENT> return(0)
.append(self, [usrname, password , authentication]
625941b66e29344779a62424
def IndexOf(self, value): <NEW_LINE> <INDENT> pass
IndexOf(self: StringCollection, value: str) -> int Searches for the specified string and returns the zero-based index of the first occurrence within the System.Collections.Specialized.StringCollection. value: The string to locate. The value can be null. Returns: The zero-based index of the first occurrence of value in the System.Collections.Specialized.StringCollection, if found; otherwise, -1.
625941b60383005118ecf3f3
def header(self, content): <NEW_LINE> <INDENT> header = Element('Header', ns=envns) <NEW_LINE> header.append(content) <NEW_LINE> return header
Build the B{<Body/>} for a SOAP outbound message. @param content: The header content. @type content: L{Element} @return: the SOAP body fragment. @rtype: L{Element}
625941b63539df3088e2e15a
def create_business_rules(self, hosts, services, hostgroups, servicegroups, macromodulations, timeperiods): <NEW_LINE> <INDENT> for item in self: <NEW_LINE> <INDENT> item.create_business_rules(hosts, services, hostgroups, servicegroups, macromodulations, timeperiods)
Loop on hosts or services and call SchedulingItem.create_business_rules :param hosts: hosts to link to :type hosts: alignak.objects.host.Hosts :param services: services to link to :type services: alignak.objects.service.Services :param hostgroups: hostgroups to link to :type hostgroups: alignak.objects.hostgroup.Hostgroups :param servicegroups: servicegroups to link to :type servicegroups: alignak.objects.servicegroup.Servicegroups :param macromodulations: macromodulations to link to :type macromodulations: alignak.objects.macromodulation.Macromodulations :param timeperiods: timeperiods to link to :type timeperiods: alignak.objects.timeperiod.Timeperiods :return: None
625941b6cc40096d61595762
def get( self, resource_group_name, network_interface_name, expand=None, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}' <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> if expand is not None: <NEW_LINE> <INDENT> query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('NetworkInterface', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Gets information about the specified network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param expand: Expands referenced resources. :type expand: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: NetworkInterface or ClientRawResponse if raw=true :rtype: ~azure.mgmt.network.v2016_09_01.models.NetworkInterface or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
625941b699fddb7c1c9de1a2
def cancel_export_task(ExportTaskId=None): <NEW_LINE> <INDENT> pass
Cancels an active export task. The request removes all artifacts of the export, including any partially-created Amazon S3 objects. If the export task is complete or is in the process of transferring the final disk image, the command fails and returns an error. See also: AWS API Documentation :example: response = client.cancel_export_task( ExportTaskId='string' ) :type ExportTaskId: string :param ExportTaskId: [REQUIRED] The ID of the export task. This is the ID returned by CreateInstanceExportTask .
625941b63c8af77a43ae35ad
def process_song_file(cur, filepath): <NEW_LINE> <INDENT> df = pd.read_json(filepath, lines=True); <NEW_LINE> song_data = list(df[['song_id','title','artist_id','year', 'duration']].values[0]) <NEW_LINE> cur.execute(song_table_insert, song_data) <NEW_LINE> artist_data = list(df[['artist_id','artist_name','artist_location','artist_latitude','artist_longitude']].values[0]) <NEW_LINE> cur.execute(artist_table_insert, artist_data)
This Function process song data to insert into song and artist tables
625941b6711fe17d8254218a
def bark2hz(z): <NEW_LINE> <INDENT> return 600. * numpy.sinh(z / 6.)
Converts frequencies Bark to Hertz (Hz) :param z: :return:
625941b6090684286d50eaee
def num_of_paths(from_x: int, from_y: int) -> int: <NEW_LINE> <INDENT> if from_x == 0 or from_y == 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return num_of_paths(from_x - 1, from_y) + num_of_paths(from_x, from_y - 1)
Calculates the number of paths from the given point to the origin (0, 0). :param from_x: int :param from_y: int :return: int
625941b6462c4b4f79d1d4df
def configure_ptagent(ptagent_config_file): <NEW_LINE> <INDENT> with open(ptagent_config_file) as fp: <NEW_LINE> <INDENT> lines = fp.readlines() <NEW_LINE> <DEDENT> configs = {"rest_ip": "0.0.0.0:8086", "rest_auth": "disabled", "monitor_restarts_on_exit": "enabled"} <NEW_LINE> lines = [line for line in lines if line.strip()] <NEW_LINE> for key in configs: <NEW_LINE> <INDENT> new_lines = lines[:] <NEW_LINE> for i, line in enumerate(lines): <NEW_LINE> <INDENT> if key in line: <NEW_LINE> <INDENT> new_lines.pop(i) <NEW_LINE> new_line = "%s=%s\n" % (key, configs[key]) <NEW_LINE> new_lines.insert(i, new_line) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> new_lines.append("%s=%s\n" % (key, configs[key])) <NEW_LINE> <DEDENT> lines = new_lines[:] <NEW_LINE> <DEDENT> with open(ptagent_config_file, "w") as fp: <NEW_LINE> <INDENT> fp.writelines(lines)
Args: ptagent_config_file(str): path to the config file. Returns: None
625941b6460517430c393f9e
def __generate_params(self, callback_url='', oauth_token='', oauth_verifier=''): <NEW_LINE> <INDENT> timestamp = int(time.time()) <NEW_LINE> nonce = generate_nonce() <NEW_LINE> print('timestamp', timestamp) <NEW_LINE> print('nonce', nonce) <NEW_LINE> print('-----------------------------------------\r\n') <NEW_LINE> params = OrderedDict() <NEW_LINE> if callback_url: <NEW_LINE> <INDENT> params['oauth_callback'] = callback_url <NEW_LINE> <DEDENT> params['oauth_consumer_key'] = self.consumer_key <NEW_LINE> params['oauth_nonce'] = nonce <NEW_LINE> params['oauth_signature_method'] = 'HMAC-SHA1' <NEW_LINE> params['oauth_timestamp'] = int(time.time()) <NEW_LINE> if oauth_token: <NEW_LINE> <INDENT> params['oauth_token'] = oauth_token <NEW_LINE> <DEDENT> if oauth_verifier: <NEW_LINE> <INDENT> params['oauth_verifier'] = oauth_verifier <NEW_LINE> <DEDENT> params['oauth_version'] = '1.0' <NEW_LINE> return params
生成验证头参数 :param callback_url: :param oauth_token: :param oauth_verifier: :return:
625941b6e5267d203edcdab0
def load_data(): <NEW_LINE> <INDENT> data = np.load("mnist-hw1.pkl", allow_pickle=True) <NEW_LINE> train_data = tf.data.Dataset.from_tensor_slices(data["train"]) <NEW_LINE> test_data = tf.data.Dataset.from_tensor_slices(data["test"]) <NEW_LINE> return train_data, test_data
Not a perfect split as should have some validation data. Uses test data as validation here since our interest is generative. :return: train data, test data as tf.data.Datasets
625941b68a349b6b435e7f83
def export_to_vtk(self, **kwds): <NEW_LINE> <INDENT> if "data" in kwds: <NEW_LINE> <INDENT> super(Experiment, self).export_to_vtk(**kwds) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> recompute = kwds.get("recompute", True) <NEW_LINE> if recompute: <NEW_LINE> <INDENT> import pynoddy <NEW_LINE> import pynoddy.output <NEW_LINE> tmp_his_file = "tmp_section.his" <NEW_LINE> tmp_out_file = "tmp_section_out" <NEW_LINE> if "model_type" in kwds and (kwds['model_type'] == 'base'): <NEW_LINE> <INDENT> import copy <NEW_LINE> tmp_his = copy.deepcopy(self) <NEW_LINE> tmp_his.events = self.base_events.copy() <NEW_LINE> tmp_his.write_history(tmp_his_file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write_history(tmp_his_file) <NEW_LINE> <DEDENT> pynoddy.compute_model(tmp_his_file, tmp_out_file) <NEW_LINE> super(Experiment, self).set_basename(tmp_out_file) <NEW_LINE> super(Experiment, self).load_model_info() <NEW_LINE> super(Experiment, self).load_geology() <NEW_LINE> <DEDENT> super(Experiment, self).export_to_vtk(**kwds)
Export model to VTK Export the geology blocks to VTK for visualisation of the entire 3-D model in an external VTK viewer, e.g. Paraview. ..Note:: Requires pyevtk, available for free on: https://github.com/firedrakeproject/firedrake/tree/master/python/evtk **Optional keywords**: - *vtk_filename* = string : filename of VTK file (default: output_name) - *data* = np.array : data array to export to VKT (default: entire block model) - *recompute* = bool : recompute the block model (default: True) - *model_type* = 'current', 'base' : model type (base "freezed" model can be plotted for comparison) ..Note:: If data is defined, the model is not recomputed and the data from this array is plotted
625941b631939e2706e4cc80
def return_file_and_folder_links(html, url): <NEW_LINE> <INDENT> soup = bs4.BeautifulSoup(html, 'html.parser') <NEW_LINE> all_links = soup.find_all('a', href=True) <NEW_LINE> relative_directories = [] <NEW_LINE> for link in all_links: <NEW_LINE> <INDENT> if link['href'].endswith('/') and link.get('href')[:1] != '/': <NEW_LINE> <INDENT> relative_directories.append(link.get('href')) <NEW_LINE> <DEDENT> <DEDENT> relative_file_links = [ link.get('href') for link in all_links if not link['href'].endswith('/')] <NEW_LINE> absoulute_file_links = [ url + link if not re.match('^(https://|http://)', link) else url for link in relative_file_links] <NEW_LINE> absolute_directories = [] <NEW_LINE> for link in relative_directories: <NEW_LINE> <INDENT> if not re.match('^(https://|http://)', link) and link != url: <NEW_LINE> <INDENT> absolute_directories.append(url + link) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> absolute_directories.append(url) <NEW_LINE> <DEDENT> <DEDENT> absoulute_file_links = purge_negative_links(absoulute_file_links, url) <NEW_LINE> absolute_directories = purge_negative_links(absolute_directories, url) <NEW_LINE> return absolute_directories, absoulute_file_links
Parse HTML and return a list of urls to directories and file links
625941b65fcc89381b1e14d3
def hash_file(filename: str) -> str: <NEW_LINE> <INDENT> func = hashlib.md5() <NEW_LINE> with open(filename, "rb") as f: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> block = f.read(1024 * func.block_size) <NEW_LINE> if not block: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> func.update(block) <NEW_LINE> <DEDENT> <DEDENT> return func.hexdigest()
https://stackoverflow.com/questions/22733826
625941b6377c676e91271fb9
def status(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Method to query status of the plugin activity
625941b666656f66f7cbbfb9
def strategy(self, opponent: Player) -> Action: <NEW_LINE> <INDENT> if not self.history: <NEW_LINE> <INDENT> return C <NEW_LINE> <DEDENT> last_round = (self.history[-1], opponent.history[-1]) <NEW_LINE> if last_round == (C, C) or last_round == (D, D): <NEW_LINE> <INDENT> return C <NEW_LINE> <DEDENT> return D
Actual strategy definition that determines player's action.
625941b63d592f4c4ed1ce90
def get_parser(language): <NEW_LINE> <INDENT> if language not in parser_routing_dict.keys(): <NEW_LINE> <INDENT> raise KeyError( f'"{language}" is not a valid option for parser language.') <NEW_LINE> <DEDENT> return parser_routing_dict[language]()
get parser function This function routes the user to appriopriate parser. Inputs: - language (str) - string specyfying which wikitionary to use Possible language values: - wiktionary | en | english | enwiki | enwiktionary - english wiktionary parser - wikisłownik | wikislownik | pl | polish | plwiki | plwiktionary - polish wiktionary parser
625941b62ae34c7f2600cf41
def call(self, inputs, state): <NEW_LINE> <INDENT> input_size = inputs.get_shape()[1] <NEW_LINE> if tensor_shape.dimension_value(input_size) is None: <NEW_LINE> <INDENT> raise ValueError("Could not infer input size from inputs.get_shape()[-1]") <NEW_LINE> <DEDENT> feedforward_weight, gate_weight = array_ops.split( value=self.kernel, num_or_size_splits=[tensor_shape.dimension_value(input_size), 2 * self.units], axis=0) <NEW_LINE> feedforward = math_ops.matmul(inputs, feedforward_weight) <NEW_LINE> feedforward = self.activation(feedforward) <NEW_LINE> gate_inputs = math_ops.matmul( array_ops.concat([feedforward, state], 1), gate_weight) <NEW_LINE> gate_inputs = nn_ops.bias_add(gate_inputs, self.bias) <NEW_LINE> u = math_ops.sigmoid(gate_inputs) <NEW_LINE> new_h = u * state + (1 - u) * feedforward <NEW_LINE> return new_h, new_h
Run one step of MinimalRNN. Args: inputs: input Tensor, must be 2-D, `[batch, input_size]`. state: state Tensor, must be 2-D, `[batch, state_size]`. Returns: A tuple containing: - Output: A `2-D` tensor with shape `[batch_size, state_size]`. - New state: A `2-D` tensor with shape `[batch_size, state_size]`. Raises: ValueError: If input size cannot be inferred from inputs via static shape inference.
625941b615fb5d323cde0918
def GetSQLInput(): <NEW_LINE> <INDENT> SQLInput = "" <NEW_LINE> try: <NEW_LINE> <INDENT> SQLInput = input() <NEW_LINE> if SQLInput.startswith("--"): SQLInput = "" <NEW_LINE> while not (SQLInput.endswith(';') or SQLInput.upper().endswith('.EXIT')): <NEW_LINE> <INDENT> line = input() <NEW_LINE> if line.startswith("--"): line = "" <NEW_LINE> SQLInput += ' ' + line <NEW_LINE> <DEDENT> return SQLInput <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> SQLInput += "\n" <NEW_LINE> return SQLInput
Purpose : Get SQL input from the user at the interpreter Note : Must end with either ; or .exit (or some variation) Parameters : None Returns: A string containing the SQL input entered by the user.
625941b6dc8b845886cb5343
def replay_max_iterations(self, new_dag, label, ignore_runtime_stats=False): <NEW_LINE> <INDENT> if self.transform_dag: <NEW_LINE> <INDENT> log.info("Transforming dag") <NEW_LINE> new_dag = self.transform_dag(new_dag) <NEW_LINE> log.info("Proceeding with normal replay") <NEW_LINE> <DEDENT> for i in range(0, self.max_replays_per_subsequence): <NEW_LINE> <INDENT> bug_found = self.replay(new_dag, label, ignore_runtime_stats=ignore_runtime_stats) <NEW_LINE> if bug_found: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return (bug_found, i)
Attempt to reproduce the bug up to self.max_replays_per_subsequence times. Returns a tuple (bug found, 0-indexed iteration at which bug was found)
625941b61d351010ab85592d
def add_invalid_references(self, r: Iterable[InvalidSourceReference]): <NEW_LINE> <INDENT> self.invalid_references.extend(r)
Calls `add_invalid_reference` for each element in `r`.
625941b6099cdd3c635f0a6c
def to_dict(self): <NEW_LINE> <INDENT> result = self.get_properties() <NEW_LINE> for name in self.section_names: result[name] = self.sections[name].to_dict() <NEW_LINE> return result
This function ... :return:
625941b65166f23b2e1a4f68
def srvctl(self, service_name, action): <NEW_LINE> <INDENT> __LOG.log_d("action '%s' on service %s" % (action, service_name)) <NEW_LINE> config = retrieve_config_infos(self, service_name, action) <NEW_LINE> if config is None: <NEW_LINE> <INDENT> __LOG.log_c("action '%s' on service %s aborted" % (action, service_name)) <NEW_LINE> return False <NEW_LINE> <DEDENT> context = { 'service_name': service_name, 'action': action, 'step': None } <NEW_LINE> fapiexec = config['use_sudo'] and self.trk.fapi.sudo or self.trk.fapi.run <NEW_LINE> if config['pre'] is not None: <NEW_LINE> <INDENT> context.update({'step': 'pre'}) <NEW_LINE> run = config['pre'] % (context) <NEW_LINE> out = fapiexec(run, nocheck=True) <NEW_LINE> __LOG.log_d('pre out: %s' % (out)) <NEW_LINE> if out.failed: <NEW_LINE> <INDENT> __LOG.log_c('pre command failed: %s' % (run)) <NEW_LINE> __LOG.log_c('output message: %s' % (out)) <NEW_LINE> if config['fallback'] != None: <NEW_LINE> <INDENT> out = fapiexec(config['fallback'] % (context)) <NEW_LINE> __LOG.log_d('fallback out: %s' % (out)) <NEW_LINE> <DEDENT> return (not out.failed, out) <NEW_LINE> <DEDENT> <DEDENT> context.update({'step': 'cmd'}) <NEW_LINE> run = config['cmd'] % (context) <NEW_LINE> out = fapiexec(run, nocheck=True) <NEW_LINE> __LOG.log_d('cmd out: %s' % (out)) <NEW_LINE> if out.failed: <NEW_LINE> <INDENT> __LOG.log_c('command failed: %s' % (run)) <NEW_LINE> __LOG.log_c('output message: %s' % (out)) <NEW_LINE> if config['fallback'] is not None: <NEW_LINE> <INDENT> out = fapiexec(config['fallback'] % (context)) <NEW_LINE> __LOG.log_d('fallback out: %s' % (out)) <NEW_LINE> <DEDENT> return (not out.failed, out) <NEW_LINE> <DEDENT> if config['post'] is not None: <NEW_LINE> <INDENT> context.update({'step': 'post'}) <NEW_LINE> run = config['post'] % (context) <NEW_LINE> out = fapiexec(run, nocheck = True) <NEW_LINE> __LOG.log_d('post out: %s' % (out)) <NEW_LINE> if out.failed: <NEW_LINE> <INDENT> __LOG.log_c('post command failed: %s' % (run)) <NEW_LINE> __LOG.log_c('output message: %s' % (out)) <NEW_LINE> if config['fallback'] is not None: <NEW_LINE> <INDENT> out = fapiexec(config['fallback'] % (context)) <NEW_LINE> __LOG.log_d('fallback out: %s' % (out)) <NEW_LINE> <DEDENT> return (not out.failed, out) <NEW_LINE> <DEDENT> <DEDENT> return (not out.failed, out)
Control lambda services based on configuration files. Passed arguments types are both string and should refer to config entries. See also configuration documentation for more details. This method return a tuple containing: - the return boolean - the fabric api execution object (or None)
625941b626238365f5f0ec78
def make_acct_entries_discount(user, invoice, acct_entry, d, **kwargs): <NEW_LINE> <INDENT> myamount = d['original_invoice_total'] - invoice.total <NEW_LINE> if d['original_invoice_balance'] <= 0: <NEW_LINE> <INDENT> discount_number = d['discount_account_number'] <NEW_LINE> reverse_sale = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> discount_number = '220000' <NEW_LINE> reverse_sale = False <NEW_LINE> <DEDENT> acct = Acct.objects.get(account_number=discount_number) <NEW_LINE> AcctTran.objects.create_acct_tran(user, acct_entry, acct, myamount) <NEW_LINE> acct = Acct.objects.get(account_number=120000) <NEW_LINE> AcctTran.objects.create_acct_tran(user, acct_entry, acct, myamount * (-1)) <NEW_LINE> if reverse_sale: <NEW_LINE> <INDENT> acct = Acct.objects.get(account_number=120000) <NEW_LINE> AcctTran.objects.create_acct_tran(user, acct_entry, acct, myamount) <NEW_LINE> acct = Acct.objects.get(account_number=106000) <NEW_LINE> AcctTran.objects.create_acct_tran(user, acct_entry, acct, myamount * (-1))
Payment has now been received and we want to update the accounting entries ***in this case the original accounting entry is different than the current current invoice total - adjust the discount accounts accordingly DEBIT Discount Account (L) CREDIT Accounts Receviable (A) NOTE - For the purpose of storing the amounts in tendenci, all credits will be a negative number.
625941b6498bea3a759b98c1
def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(PaginatedJSONListView, self).get_context_data(**kwargs) <NEW_LINE> page = context.pop('page_obj') <NEW_LINE> paginator = context.pop('paginator') <NEW_LINE> count_only = self.get_count_only() <NEW_LINE> if paginator: <NEW_LINE> <INDENT> pages = paginator.num_pages <NEW_LINE> count = paginator.count <NEW_LINE> ispag = page.has_other_pages() <NEW_LINE> perpage = paginator.per_page <NEW_LINE> ppage = page.previous_page_number() if page.has_previous() else None <NEW_LINE> npage = page.next_page_number() if page.has_next() else None <NEW_LINE> cpage = page.number <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pages = 1 <NEW_LINE> count = self.get_queryset().count() <NEW_LINE> ispag = False <NEW_LINE> perpage = count <NEW_LINE> cpage = 1 <NEW_LINE> npage = None <NEW_LINE> ppage = None <NEW_LINE> <DEDENT> _data = {'pages': pages, 'count': count, 'per_page': perpage, 'is_paginated': ispag, 'current': cpage, 'pre': ppage, 'next': npage} <NEW_LINE> if count_only: <NEW_LINE> <INDENT> return _data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> context.update(_data) <NEW_LINE> <DEDENT> return context
Removes paginator objects and instead supplies the pages and the count data as part of the paginated framework. Leaves in the ``is_paginated`` boolean value. Also tests to see if get_count_only is True -- if so, it returns only the pages and the count rather than the entire context.
625941b629b78933be1e54c9
def shaker_sort(a: MutableSequence) -> None: <NEW_LINE> <INDENT> left = 0 <NEW_LINE> right = len(a) -1 <NEW_LINE> last = right <NEW_LINE> while left < right: <NEW_LINE> <INDENT> for j in range(right, left, -1): <NEW_LINE> <INDENT> if a[j-1] > a[j]: <NEW_LINE> <INDENT> a[j-1], a[j] = a[j], a[j-1] <NEW_LINE> last = j <NEW_LINE> <DEDENT> <DEDENT> left = last <NEW_LINE> for j in range(left, right): <NEW_LINE> <INDENT> if a[j] > a[j+1]: <NEW_LINE> <INDENT> a[j], a[j+1] = a[j+1], a[j] <NEW_LINE> last = j <NEW_LINE> <DEDENT> <DEDENT> right = last
셰이커 정렬
625941b64a966d76dd550e1b
def sanitize_sequence(s): <NEW_LINE> <INDENT> seq = s.upper() <NEW_LINE> for a in seq: <NEW_LINE> <INDENT> if a not in configs.AAs: <NEW_LINE> <INDENT> error_exit('---- Found invalid amino acid (%s)'%(a)) <NEW_LINE> <DEDENT> <DEDENT> return seq
Function that parses protein string (one-letter amino acid sequence) and verifies each residue can be correctly dealt with
625941b6379a373c97cfa95a
def testCompTextFile (self, cName, source, projSty = None) : <NEW_LINE> <INDENT> if self.cType == 'usfm' : <NEW_LINE> <INDENT> if self.project.components[cName].usfmTextFileIsValid(source, projSty) : <NEW_LINE> <INDENT> self.project.log.writeToLog('TEXT-150', [source]) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> self.project.log.writeToLog('TEXT-005', [self.cType]) <NEW_LINE> self.tools.dieNow()
This will direct a request to the proper validator for testing the source of a component text file.
625941b67d43ff24873a2ab3
def validate_all_keys(obj_name, obj, validation_fun): <NEW_LINE> <INDENT> for key, value in obj.items(): <NEW_LINE> <INDENT> validation_fun(obj_name, key) <NEW_LINE> if isinstance(value, dict): <NEW_LINE> <INDENT> validate_all_keys(obj_name, value, validation_fun)
Validate all (nested) keys in `obj` by using `validation_fun`. Args: obj_name (str): name for `obj` being validated. obj (dict): dictionary object. validation_fun (function): function used to validate the value of `key`. Returns: None: indicates validation successful Raises: ValidationError: `validation_fun` will raise this error on failure
625941b6507cdc57c6306ae2
def stop_trigger(self, trigger: Trigger): <NEW_LINE> <INDENT> target_trigger = TriggerManager.get_tname(trigger) <NEW_LINE> self.log.debug("stopping {}".format(target_trigger)) <NEW_LINE> self.running[target_trigger].shutdown() <NEW_LINE> del self.running[target_trigger] <NEW_LINE> self.trigger_dao.delete_trigger(trigger.ensemble_id, trigger.name)
Stop a trigger thread. :param trigger: the trigger to be stopped :type trigger: Trigger
625941b60a50d4780f666c9e
def test_reject(self): <NEW_LINE> <INDENT> self.form.data = {'reject': True} <NEW_LINE> self.assertTrue(self.form.is_valid()) <NEW_LINE> edit = self.form.save() <NEW_LINE> self.assertEqual(edit.status, EditRequest.REJECTED)
Rejection updates status
625941b621a7993f00bc7af8
def rnn_wrapper(encoder, inputs, lens, cell='lstm'): <NEW_LINE> <INDENT> sorted_lens, sort_key = torch.sort(lens, descending=True) <NEW_LINE> nonzero_num, total_num = torch.sum(sorted_lens > 0).item(), sorted_lens.size(0) <NEW_LINE> sort_key = sort_key[:nonzero_num] <NEW_LINE> sorted_inputs = torch.index_select(inputs, dim=0, index=sort_key) <NEW_LINE> packed_inputs = rnn_utils.pack_padded_sequence(sorted_inputs, sorted_lens[:nonzero_num].tolist(), batch_first=True) <NEW_LINE> packed_out, sorted_h = encoder(packed_inputs) <NEW_LINE> sorted_out, _ = rnn_utils.pad_packed_sequence(packed_out, batch_first=True) <NEW_LINE> if cell.upper() == 'LSTM': <NEW_LINE> <INDENT> sorted_h, sorted_c = sorted_h <NEW_LINE> <DEDENT> out_shape = list(sorted_out.size()) <NEW_LINE> out_shape[0] = total_num <NEW_LINE> out = sorted_out.new_zeros(*out_shape).scatter_(0, sort_key.unsqueeze(-1).unsqueeze(-1).repeat(1, *out_shape[1:]), sorted_out) <NEW_LINE> h_shape = list(sorted_h.size()) <NEW_LINE> h_shape[1] = total_num <NEW_LINE> h = sorted_h.new_zeros(*h_shape).scatter_(1, sort_key.unsqueeze(0).unsqueeze(-1).repeat(h_shape[0], 1, h_shape[-1]), sorted_h) <NEW_LINE> if cell.upper() == 'LSTM': <NEW_LINE> <INDENT> c = sorted_c.new_zeros(*h_shape).scatter_(1, sort_key.unsqueeze(0).unsqueeze(-1).repeat(h_shape[0], 1, h_shape[-1]), sorted_c) <NEW_LINE> return out, (h.contiguous(), c.contiguous()) <NEW_LINE> <DEDENT> return out, h.contiguous()
@args: encoder(nn.Module): rnn series bidirectional encoder, batch_first=True inputs(torch.FloatTensor): rnn inputs, [bsize x max_seq_len x in_dim] lens(torch.LongTensor): seq len for each sample, allow length=0, padding with 0-vector, [bsize] @return: out(torch.FloatTensor): output of encoder, bsize x max_seq_len x hidden_dim*2 hidden_states([tuple of ]torch.FloatTensor): final hidden states, num_layers*2 x bsize x hidden_dim
625941b68e71fb1e9831d5bd
@click.command('stations') <NEW_LINE> @click.pass_context <NEW_LINE> @cli_options.OPTION_COUNTRY <NEW_LINE> @cli_options.OPTION_ENV <NEW_LINE> @cli_options.OPTION_VERBOSITY <NEW_LINE> @click.option('--program', '-p', help='Program Affiliation') <NEW_LINE> @click.option('--station-type', '-st', help='Station type', type=click.Choice(FACILITY_TYPE_LOOKUP)) <NEW_LINE> def stations(ctx, env, program=None, country=None, station_type=None, verbosity=None): <NEW_LINE> <INDENT> if verbosity is not None: <NEW_LINE> <INDENT> logging.basicConfig(level=getattr(logging, verbosity)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.getLogger(__name__).addHandler(logging.NullHandler()) <NEW_LINE> <DEDENT> o = OSCARClient(env=env) <NEW_LINE> matching_stations = o.get_stations(program=program, country=country) <NEW_LINE> result = json.dumps(matching_stations, indent=4) <NEW_LINE> click.echo(f'Number of stations: {len(matching_stations)}\nStations:\n{result}')
get list of OSCAR stations
625941b656b00c62f0f1446c
def to_json(self, file=None): <NEW_LINE> <INDENT> def write_episodes(f): <NEW_LINE> <INDENT> for episode in self: <NEW_LINE> <INDENT> json.dump({'sequence_of_events': episode, 'abs_support': episode.abs_support, 'rel_support': episode.rel_support, 'allow_intermediate_events': episode.allow_intermediate_events}, fp=f, ensure_ascii=False) <NEW_LINE> f.write('\n') <NEW_LINE> <DEDENT> <DEDENT> if file == None: <NEW_LINE> <INDENT> f = StringIO() <NEW_LINE> write_episodes(f) <NEW_LINE> f.seek(0) <NEW_LINE> return f.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(file, mode='w') as f: <NEW_LINE> <INDENT> write_episodes(f)
Write episodes to file or return a string of episodes. If file is given, creates file *file*. Each line of output contains episode serialized by json. The lines of the output created by to_json() and examples_to_json() correspond to each other. Parameters ---------- file: str (default: None) Name of the file of episodes. Returns ------- str If file is not given, returns string of episodes serialized by json.
625941b60fa83653e4656dcd
def _populateQueue(stream, queue): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> line = stream.readline() <NEW_LINE> if line: <NEW_LINE> <INDENT> queue.put(line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass
Collect lines from 'stream' and put them in 'quque'.
625941b626068e7796caeae7
def p_comparison_op(p): <NEW_LINE> <INDENT> p[0] = p[1]
comparison_op : GE | GT | LE | LT | NE | EQUAL | EQUAL_NULL
625941b626068e7796caeae8
def getParams(self,xs,ys,parnames=None,contracty=True): <NEW_LINE> <INDENT> if len(xs) != len(ys): <NEW_LINE> <INDENT> raise ValueError("xs and ys don't match") <NEW_LINE> <DEDENT> return np.array([self.getParam(x,y,parnames,contracty) for x,y in zip(xs,ys)])
Get parameters for an array of inputs - `xs` and `ys` should be matching sequences which each item matches the input and ouptut types of the models. For more info, see ModelSequence.getParam
625941b6dc8b845886cb5344
def computeMinExtentLattice(self,minextent=0): <NEW_LINE> <INDENT> extent=self.context.attributesPrime(set()) <NEW_LINE> intent=self.context.objectsPrime(extent) <NEW_LINE> curConcept=formalConcept(extent,intent,self.context.indexList(intent)) <NEW_LINE> self.concepts=[curConcept] <NEW_LINE> self.extentToConceptDict[curConcept.extent]=curConcept <NEW_LINE> curConceptIndex=0 <NEW_LINE> numComputedConcepts=0 <NEW_LINE> while True: <NEW_LINE> <INDENT> lowerNeighbours=self.computeLowerNeighbours(curConcept,minextent) <NEW_LINE> for lowerNeighbour in lowerNeighbours: <NEW_LINE> <INDENT> lowerNeighbourIndex=bisect.bisect(self.concepts,lowerNeighbour) <NEW_LINE> if lowerNeighbourIndex==0 or self.concepts[lowerNeighbourIndex-1]!=lowerNeighbour: <NEW_LINE> <INDENT> self.concepts.insert(lowerNeighbourIndex,lowerNeighbour) <NEW_LINE> <DEDENT> curConcept.lowerNeighbours+=[lowerNeighbour] <NEW_LINE> lowerNeighbour.upperNeighbours+=[curConcept] <NEW_LINE> <DEDENT> curConceptIndex+=1 <NEW_LINE> if curConceptIndex>=len(self.concepts): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> curConcept=self.concepts[curConceptIndex] <NEW_LINE> numComputedConcepts+=1 <NEW_LINE> if numComputedConcepts % 100 == 0: <NEW_LINE> <INDENT> print("Computed lower neighbours of %d concepts" % numComputedConcepts,gc.collect()) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> self.numberConceptsAndComputeIntroduced()
Computes concepts and lattice. self.concepts contains lectically ordered list of concepts after completion.
625941b650812a4eaa59c135
def check_permissions(self): <NEW_LINE> <INDENT> error = None <NEW_LINE> install_dir = self.site.install_dir <NEW_LINE> if os.path.exists(install_dir): <NEW_LINE> <INDENT> if os.listdir(install_dir) != []: <NEW_LINE> <INDENT> error = ( 'The directory already contains files. Make sure you ' 'are providing a path for a new site directory.' ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> temp_file = os.path.join(install_dir, '.rb-site-tmp') <NEW_LINE> with open(temp_file, 'w'): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> os.unlink(temp_file) <NEW_LINE> <DEDENT> except (IOError, OSError): <NEW_LINE> <INDENT> error = ( "The directory could not be written to. Make sure " "it has the correct permissions for your user, " "or that you're running as an administrator." ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.mkdir(install_dir) <NEW_LINE> os.rmdir(install_dir) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> error = ( "The directory could not be created. Make sure the " "parent directory is writable by your user, or that " "you're running as an administrator." ) <NEW_LINE> <DEDENT> <DEDENT> if error: <NEW_LINE> <INDENT> console.error('Unable to install a new Review Board site in %s. %s' % (install_dir, error)) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
Check that permissions are usable. If not, this will show an error to the user.
625941b67c178a314d6ef267
def win(self): <NEW_LINE> <INDENT> title = 'پیروزی!' <NEW_LINE> message = 'شما با موفقیت به 2048 رسیدید' <NEW_LINE> btn_go_on = {'text': 'ادامه دادن', 'type': 'InfoButton', 'on_press': 'app.stop_win_sound'} <NEW_LINE> popup = dynamic_popup(title, message, [btn_go_on]) <NEW_LINE> if self.is_stopped_background_music is False: <NEW_LINE> <INDENT> self.toggle_music() <NEW_LINE> popup.on_dismiss = self.toggle_music <NEW_LINE> <DEDENT> self.winSound = SoundLoader.load('data/audio/win.mp3') <NEW_LINE> self.winSound.play()
پیام موفقیت در یک popup نمایش داده می شود و آهنگ پس زمینه اگر در حال پخش باشد متوقف می شود و در نهایت آهنگ موفقیت پخش خواهد شد
625941b676d4e153a657e93f
def _diff_set(self, level): <NEW_LINE> <INDENT> t1_hashtable = self._create_hashtable(level, 't1') <NEW_LINE> t2_hashtable = self._create_hashtable(level, 't2') <NEW_LINE> t1_hashes = set(t1_hashtable.keys()) <NEW_LINE> t2_hashes = set(t2_hashtable.keys()) <NEW_LINE> hashes_added = t2_hashes - t1_hashes <NEW_LINE> hashes_removed = t1_hashes - t2_hashes <NEW_LINE> items_added = [t2_hashtable[i].item for i in hashes_added] <NEW_LINE> items_removed = [t1_hashtable[i].item for i in hashes_removed] <NEW_LINE> for item in items_added: <NEW_LINE> <INDENT> if self._count_diff() is StopIteration: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> change_level = level.branch_deeper( notpresent, item, child_relationship_class=SetRelationship) <NEW_LINE> self._report_result('set_item_added', change_level) <NEW_LINE> <DEDENT> for item in items_removed: <NEW_LINE> <INDENT> if self._count_diff() is StopIteration: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> change_level = level.branch_deeper( item, notpresent, child_relationship_class=SetRelationship) <NEW_LINE> self._report_result('set_item_removed', change_level)
Difference of sets
625941b694891a1f4081b8b7
def save_source_fields(self): <NEW_LINE> <INDENT> if self.include_original_fields and len(self.predicate_uuids) > 0: <NEW_LINE> <INDENT> limit_obs = False <NEW_LINE> if isinstance(self.obs_limits, list): <NEW_LINE> <INDENT> if len(self.obs_limits) > 0: <NEW_LINE> <INDENT> limit_obs = True <NEW_LINE> <DEDENT> <DEDENT> pred_uuid_list = [] <NEW_LINE> for predicate_uuid, pred_dict in self.predicate_uuids.items(): <NEW_LINE> <INDENT> field_num = self.get_add_predicate_field_number(predicate_uuid) <NEW_LINE> pred_uuid_list.append(predicate_uuid) <NEW_LINE> <DEDENT> rows = UUIDsRowsExportTable(self.table_id).rows <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> if limit_obs: <NEW_LINE> <INDENT> item_data = Assertion.objects.filter(uuid=row['uuid'], predicate_uuid__in=pred_uuid_list, obs_num__in=self.obs_limits) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item_data = Assertion.objects.filter(uuid=row['uuid'], predicate_uuid__in=pred_uuid_list) <NEW_LINE> <DEDENT> if len(item_data) > 0: <NEW_LINE> <INDENT> self.add_source_cells(row['uuid'], row['row_num'], item_data)
Creates fields for source data, then saves records of source data for each item in the export table
625941b6d18da76e235322e1
def test_combined(self): <NEW_LINE> <INDENT> subj = hawkey.Subject(INP_FOF) <NEW_LINE> nevras = subj.nevra_possibilities() <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish', epoch=8, version='3.6.9', release='11.fc100', arch='x86_64')) <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish', epoch=8, version='3.6.9', release='11.fc100.x86_64', arch=None)) <NEW_LINE> self.assertRaises(StopIteration, next, nevras) <NEW_LINE> subj = hawkey.Subject(INP_FOF_NOEPOCH) <NEW_LINE> nevras = subj.nevra_possibilities() <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish', epoch=None, version='3.6.9', release='11.fc100', arch='x86_64')) <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish-3.6.9-11.fc100', epoch=None, version=None, release=None, arch='x86_64')) <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish-3.6.9-11.fc100.x86_64', epoch=None, version=None, release=None, arch=None)) <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish', epoch=None, version='3.6.9', release='11.fc100.x86_64', arch=None)) <NEW_LINE> self.assertEqual(next(nevras), NEVRA(name='four-of-fish-3.6.9', epoch=None, version='11.fc100.x86_64', release=None, arch=None)) <NEW_LINE> self.assertRaises(StopIteration, next, nevras)
Test we get all the possible NEVRA parses.
625941b645492302aab5e0cf
def remove_stats(self, ieee: str, handler): <NEW_LINE> <INDENT> self._extra_state_attributes.pop(ieee)
remove gateway stats
625941b6a8ecb033257d2ee6
def find_average_similarity(sen, sen_set, voting_dict): <NEW_LINE> <INDENT> result = 0 <NEW_LINE> for n in sen_set: <NEW_LINE> <INDENT> new_result = policy_compare(sen, n, voting_dict) <NEW_LINE> result += new_result <NEW_LINE> <DEDENT> return result / len(sen_set)
Input: the name of a senator, a set of senator names, and a voting dictionary. Output: the average dot-product between sen and those in sen_set. Example: >>> vd = {'Klein': [1,1,1], 'Fox-Epstein': [1,-1,0], 'Ravella': [-1,0,0]} >>> find_average_similarity('Klein', {'Fox-Epstein','Ravella'}, vd) -0.5
625941b6a219f33f34628785
def wait_for_data(self, timeout: float = 0.2) -> bool: <NEW_LINE> <INDENT> start_time = time.time() <NEW_LINE> while time.time() - start_time < timeout: <NEW_LINE> <INDENT> if self.interface.in_waiting: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> time.sleep(0.02) <NEW_LINE> <DEDENT> return False
Wait for :timeout: seconds or until there is data in the input buffer. Parameters ---------- timeout : float, optional Time in seconds to wait. The default is 0.2. Returns ------- bool True iff the input buffer is not empty.
625941b6d8ef3951e324334d
def test_parser(self): <NEW_LINE> <INDENT> parser = create_parser(['/localsytem/']) <NEW_LINE> self.assertFalse(parser.overwrite_title)
Is generated parser correctly
625941b67c178a314d6ef268
def takeoff(self, alt_min=30, takeoff_throttle=1700, require_absolute=True, mode="STABILIZE", timeout=30): <NEW_LINE> <INDENT> self.progress("TAKEOFF") <NEW_LINE> self.change_mode(mode) <NEW_LINE> if not self.armed(): <NEW_LINE> <INDENT> self.wait_ready_to_arm(require_absolute=require_absolute) <NEW_LINE> self.zero_throttle() <NEW_LINE> self.arm_vehicle() <NEW_LINE> <DEDENT> self.set_rc(3, takeoff_throttle) <NEW_LINE> self.wait_for_alt(alt_min=alt_min, timeout=timeout) <NEW_LINE> self.hover() <NEW_LINE> self.progress("TAKEOFF COMPLETE")
Takeoff get to 30m altitude.
625941b65fdd1c0f98dc0041
def ai_move(self): <NEW_LINE> <INDENT> sleep(1) <NEW_LINE> if self._lvl == "Easy": <NEW_LINE> <INDENT> possible_moves = self.get_correct_moves() <NEW_LINE> self.make_move(possible_moves[0]) <NEW_LINE> <DEDENT> elif self._lvl == "Medium": <NEW_LINE> <INDENT> possible_moves = self.get_correct_moves() <NEW_LINE> self.make_move(choice(possible_moves)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> possible_moves = self.get_correct_moves() <NEW_LINE> good_moves = [(0, 0), (0, self._field.size - 1), (self._field.size - 1, 0), (self._field.size - 1, self._field.size - 1)] <NEW_LINE> moved = False <NEW_LINE> for coords in good_moves: <NEW_LINE> <INDENT> if coords in possible_moves: <NEW_LINE> <INDENT> self.make_move(coords) <NEW_LINE> moved = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not moved: <NEW_LINE> <INDENT> greedy = dict(zip(map(lambda x: len(self.is_correct_move(x)), possible_moves), possible_moves)) <NEW_LINE> self.make_move(greedy[max(greedy.keys())])
AI makes move.
625941b615baa723493c3d82
@blog_blueprint.route('/tag/<string:tag_name>') <NEW_LINE> def tag(tag_name): <NEW_LINE> <INDENT> tag = db.session.query(Tag).filter_by(name=tag_name).first_or_404() <NEW_LINE> posts = tag.posts.order_by(Post.publish_date.desc()).all() <NEW_LINE> recent, top_tags = sidebar_data() <NEW_LINE> return render_template('blog/tag.html', tag=tag, posts=posts, recent=recent, top_tags=top_tags)
View function for tag page
625941b64e4d5625662d41ed
def clean_resume_file(self): <NEW_LINE> <INDENT> resume_file = self.cleaned_data.get('resume_file') <NEW_LINE> if get_file_mimetype(resume_file) != 'application/pdf': <NEW_LINE> <INDENT> raise forms.ValidationError('Uploaded file must be a PDF file.') <NEW_LINE> <DEDENT> return resume_file
Check if uploaded file is of an acceptable format.
625941b6b57a9660fec3368f
def download_sqlite_file(): <NEW_LINE> <INDENT> bucket_name = os.environ["BUCKET_NAME"] <NEW_LINE> try: <NEW_LINE> <INDENT> client = boto3.client( "s3", aws_access_key_id=os.environ["KEY_ID"], aws_secret_access_key=os.environ["ACCESS_KEY"], ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return "Problem initializing client {}".format(e) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> client.download_file(bucket_name, "data.db", "/tmp/datas3.db") <NEW_LINE> return "OK" <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return "Problem downloading file {}".format(e)
get from S3 using boto3
625941b60a366e3fb873e627
def _generate_insert_sql(pardic, param_list, table): <NEW_LINE> <INDENT> columns = "(" <NEW_LINE> values = "(" <NEW_LINE> for param in param_list: <NEW_LINE> <INDENT> if not pardic[param] is None: <NEW_LINE> <INDENT> columns += (param + ", ") <NEW_LINE> values += "'%s', " % (pardic[param],) <NEW_LINE> <DEDENT> <DEDENT> columns = columns[:-2] + ')' <NEW_LINE> values = values[:-2] + ')' <NEW_LINE> sql = "INSERT INTO %s %s VALUES %s;" % (table, columns, values) <NEW_LINE> return sql
generate the sql for an insert command Args: pardic (dict): (same as given to calling function) param_list (list): list of names of parameters to insert table (str): name of table Returns: sql string
625941b6e5267d203edcdab1
@project.command("delete") <NEW_LINE> @click.argument( "provider", required=True, metavar="PROVIDER", type=click.Choice(Provider.choices), callback=Provider.click_callback, ) <NEW_LINE> @click.argument("project", type=str, required=True) <NEW_LINE> @click.pass_obj <NEW_LINE> @handle_client_errors(m404="Project not found.") <NEW_LINE> def project_delete(config, provider, project): <NEW_LINE> <INDENT> if config.client.project_delete(provider=provider, project=project): <NEW_LINE> <INDENT> click.echo(f"Project {provider.value}:{project} successfully deleted.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> click.secho(f"Failed to delete {provider}/{project}", fg="red")
Delete project.
625941b6596a8972360898da
def hug(self): <NEW_LINE> <INDENT> print(self.name + ' hugs you back') <NEW_LINE> return True
Hugs your friend
625941b69f2886367277a6a1
@log_with(log) <NEW_LINE> def get_token_by_otp(token_list, otp="", window=10): <NEW_LINE> <INDENT> result_token = None <NEW_LINE> result_list = [] <NEW_LINE> for token in token_list: <NEW_LINE> <INDENT> log.debug("checking token {0!r}".format(token.get_serial())) <NEW_LINE> try: <NEW_LINE> <INDENT> r = token.check_otp_exist(otp=otp, window=window) <NEW_LINE> log.debug("result = {0:d}".format(int(r))) <NEW_LINE> if r >= 0: <NEW_LINE> <INDENT> result_list.append(token) <NEW_LINE> <DEDENT> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> log.warning("error in calculating OTP for token {0!s}: " "{1!s}".format(token.token.serial, err)) <NEW_LINE> <DEDENT> <DEDENT> if len(result_list) == 1: <NEW_LINE> <INDENT> result_token = result_list[0] <NEW_LINE> <DEDENT> elif result_list: <NEW_LINE> <INDENT> raise TokenAdminError('multiple tokens are matching this OTP value!', id=1200) <NEW_LINE> <DEDENT> return result_token
search the token in the token_list, that creates the given OTP value. The tokenobject_list would be created by get_tokens() :param token_list: the list of token objects to be investigated :type token_list: list of token objects :param otp: the otp value, that needs to be found :type otp: basestring :param window: the window of search :type window: int :return: The token, that creates this OTP value :rtype: Tokenobject
625941b6287bf620b61d3880
def distance_to_line(self,line): <NEW_LINE> <INDENT> if self.is_parallel(line): <NEW_LINE> <INDENT> return self.distance_to_point(line.P0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> w0=self.P0-line.P0 <NEW_LINE> u=self.vL <NEW_LINE> v=line.vL <NEW_LINE> a=u.dot(u) <NEW_LINE> b=u.dot(v) <NEW_LINE> c=v.dot(v) <NEW_LINE> d=u.dot(w0) <NEW_LINE> e=v.dot(w0) <NEW_LINE> sc=(b*e-c*d) / (a*c-b**2) <NEW_LINE> tc=(a*e-b*d) / (a*c-b**2) <NEW_LINE> Pc=self.calculate_point(sc) <NEW_LINE> Qc=line.calculate_point(tc) <NEW_LINE> return (Pc-Qc).length
Returns the distance from this line to the supplied line. :param line: A line. :type line: Line3D :return: The distance between the two lines. :rtype: float :Example: .. code-block:: python >>> l1 = Line3D(Point3D(0,0,0), Vector3D(1,0,0)) >>> l2 = Line3D(Point3D(0,1,0), Vector3D(0,0,1)) >>> result = l1.distance_to_line(l2) >>> print(result) 1 .. seealso:: `<https://geomalgorithms.com/a07-_distance.html>`_
625941b66fece00bbac2d54b
def draw_tri(sideSize, turtleObject): <NEW_LINE> <INDENT> for i in range(3): <NEW_LINE> <INDENT> turtleObject.forward(sideSize) <NEW_LINE> turtleObject.right(120)
This function is used to draw an equilateral triangle of arbitary size, with the starting postion based on the position of the turtle object passed in.
625941b6046cf37aa974cb5b
def filter_pathname(val: Optional[str]) -> str: <NEW_LINE> <INDENT> return os.path.splitext(os.path.basename(val or ''))[0]
Base name of a path, without its extension.
625941b692d797404e303f9a
def run_analysis(): <NEW_LINE> <INDENT> input_dir=sys.argv[1] <NEW_LINE> file_list=os.listdir(input_dir) <NEW_LINE> input_file=input_dir+file_list[0] <NEW_LINE> Certified_Total=0.0 <NEW_LINE> occupations={} <NEW_LINE> working_states={} <NEW_LINE> with open(input_file) as jsonfile: <NEW_LINE> <INDENT> records=json.load(jsonfile) <NEW_LINE> <DEDENT> pprint(records["orders"][0]["line_items"])
'the main function to read the json file from input directory and start parsing'
625941b638b623060ff0abff
def materialize(strategy: tf.distribute.Strategy, value_or_nested_dict): <NEW_LINE> <INDENT> if isinstance(value_or_nested_dict, dict): <NEW_LINE> <INDENT> nested_dict = value_or_nested_dict <NEW_LINE> return { key: materialize(strategy, value) for key, value in nested_dict.items() } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return tf.concat( strategy.experimental_local_results(value_or_nested_dict), axis=0).numpy()
Materializes locally (possibly nested dict with) PerReplica values. Args: strategy: The strategy that will be used to evaluate. value_or_nested_dict: Either a single `PerReplica` object, or a nested dict with `PerReplica` values at the deepest level. Returns: Same type and format as the input, with PerReplica values replaced with corresponding `tf.Tensor`s.
625941b61f5feb6acb0c4966
def clean_done(self): <NEW_LINE> <INDENT> ongoing = [] <NEW_LINE> status_file = "/tmp/farm-%d" % (os.getpid()) <NEW_LINE> os.system("squeue > %s 2>/dev/null" % status_file) <NEW_LINE> f = open(status_file) <NEW_LINE> f.readline() <NEW_LINE> me = getpass.getuser() <NEW_LINE> for l in f: <NEW_LINE> <INDENT> toks = list(filter(lambda x: x != "", l.rstrip().split(" "))) <NEW_LINE> user = toks[3] <NEW_LINE> if user != me: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> pid = toks[0] <NEW_LINE> ongoing.append(int(pid)) <NEW_LINE> <DEDENT> my_dels = [] <NEW_LINE> for r_idx, p in enumerate(self.running): <NEW_LINE> <INDENT> if p not in ongoing: <NEW_LINE> <INDENT> my_dels.append(r_idx) <NEW_LINE> <DEDENT> <DEDENT> my_dels.reverse() <NEW_LINE> for my_del in my_dels: <NEW_LINE> <INDENT> del self.running[my_del]
Removes dead processes from the running list.
625941b67b25080760e3926c
def finalize(self): <NEW_LINE> <INDENT> utpbinder.set_stopflag(self.cobj, 1) <NEW_LINE> self.thread.join() <NEW_LINE> utpbinder.finalize(self.cobj)
destructor. stop a thread and call telehashbinder's finalization.
625941b6f548e778e58cd38c
def makingList(self,n): <NEW_LINE> <INDENT> l = [""] <NEW_LINE> while (n>0): <NEW_LINE> <INDENT> res1 = ["1" + x for x in l] <NEW_LINE> res2 = ["0" + x for x in l] <NEW_LINE> res1.extend(res2) <NEW_LINE> l = res1 <NEW_LINE> n -= 1 <NEW_LINE> <DEDENT> return l
Returns a list of all the possible combination of a size n list with only two choices 0 and 1 ex : 00,01,10,11
625941b621bff66bcd684766
def size(self): <NEW_LINE> <INDENT> return self._encode_invoke(transactional_queue_size_codec)
Transactional implementation of :func:`Queue.size() <hazelcast.proxy.queue.Queue.size>` :return: (int), size of the queue.
625941b6cb5e8a47e48b78c0
def dump_obj_as_yaml_to_file(filename, obj): <NEW_LINE> <INDENT> with io.open(filename, 'w', encoding="utf-8") as output: <NEW_LINE> <INDENT> _dump_yaml(obj, output)
Writes data (python dict) to the filename in yaml repr.
625941b6be8e80087fb20a5f
def convert(self, value, invalid, measurement_system=DisplayMeasure.metric): <NEW_LINE> <INDENT> return [FieldValue(self, value, invalid, **{self._name: self._convert_many(value, invalid)})]
Return a FieldValue as intepretted by the field's rules.
625941b616aa5153ce362289
def get_conditions_series(simulator, reactor, solution, basics= ['time','temperature','pressure','density','volume','enthalpy','internal energy']): <NEW_LINE> <INDENT> conditions = pd.Series() <NEW_LINE> if 'time' in basics: <NEW_LINE> <INDENT> conditions['time (s)'] = simulator.time <NEW_LINE> <DEDENT> if 'temperature' in basics: <NEW_LINE> <INDENT> conditions['temperature (K)'] = solution.T <NEW_LINE> <DEDENT> if 'pressure' in basics: <NEW_LINE> <INDENT> conditions['pressure (Pa)'] = solution.P <NEW_LINE> <DEDENT> if 'density' in basics: <NEW_LINE> <INDENT> conditions['density (kmol/m3)'] = solution.density_mole <NEW_LINE> <DEDENT> if 'volume' in basics: <NEW_LINE> <INDENT> conditions['volume (m3)'] = reactor.volume <NEW_LINE> <DEDENT> if 'cp' in basics: <NEW_LINE> <INDENT> conditions['heat capacity, cp (J/kmol/K)'] = solution.cp_mole <NEW_LINE> <DEDENT> if 'cv' in basics: <NEW_LINE> <INDENT> conditions['heat capacity, cv (J/kmol/K)'] = solution.cv_mole <NEW_LINE> <DEDENT> if 'enthalpy' in basics: <NEW_LINE> <INDENT> conditions['enthalpy (J/kg)'] = solution.enthalpy_mass <NEW_LINE> <DEDENT> if 'internal energy' in basics: <NEW_LINE> <INDENT> conditions['internal energy (J/kg)'] = solution.int_energy_mass <NEW_LINE> <DEDENT> return conditions
returns the current conditions of a Solution object contianing ReactorNet object (simulator) as a pd.Series. simulator = the ReactorNet object of the simulation solution = solution object to pull values from basics =a list of state variables to save The following are enabled for the conditions: * time * temperature * pressure * density * volume * cp (constant pressure heat capacity) * cv (constant volume heat capacity) * enthalpy
625941b6e8904600ed9f1d39
def parse_R718WA(self, data): <NEW_LINE> <INDENT> raise <NEW_LINE> if data[2] == 0x01: <NEW_LINE> <INDENT> return self.parse_by_format(data, [ ( "batt", self.parse_batt, 3, 4 ), ( "adc_raw_value_1", self.parse_number, 4, 6 ), ( "adc_raw_value_2", self.parse_number, 6, 8 ), ]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("ERROR: unsupported DataType {} for DeviceType {} of netvox" .format(data[2],data[1])) <NEW_LINE> return False
Device: R718IA2, R718IB2, R730IA2, R730IB2 nb_bytes Field 1 DeviceType: 0x41, 0x42, 0x76, 0x77 1 ReportType: 0x01 1 Battery, 0.1V 2 ADCRawValue1, 1mV 2 ADCRawValue2, 1mV 3 Reserved
625941b6c432627299f04a55
def test_empty_objects(self): <NEW_LINE> <INDENT> for json_obj in [{}, [], 0, '', False]: <NEW_LINE> <INDENT> obj = self.json_model.objects.create(json=json_obj) <NEW_LINE> new_obj = self.json_model.objects.get(id=obj.id) <NEW_LINE> self.failUnlessEqual(json_obj, obj.json) <NEW_LINE> self.failUnlessEqual(json_obj, new_obj.json)
Test storing empty objects
625941b663b5f9789fde6ef6
def __init__(self, rwe, address): <NEW_LINE> <INDENT> self.rwe = rwe <NEW_LINE> self.address = address <NEW_LINE> self.readPCI = lambda : self.rwe.readPCI(self.address.Bus, self.address.Device, self.address.Function) <NEW_LINE> self.writePCI = lambda data: rwe.writePCI(self.address.Bus, self.address.Device, self.address.Function, data) <NEW_LINE> self.getPCIClassCode = lambda : self.rwe.getPCIClassCode(self.address.Bus, self.address.Device, self.address.Function) <NEW_LINE> self.getPCIBarAddresses = lambda : self.rwe.getPCIBarAddresses(self.address.Bus, self.address.Device, self.address.Function)
Brief: Initializer for the object. Takes a RWE instance and the PCI Address
625941b64a966d76dd550e1c
def __init__(self, settings): <NEW_LINE> <INDENT> self.bugs = get_reported_bugs()
Pull bug numbers out of local reports
625941b6099cdd3c635f0a6d
def tabRemoved(self): <NEW_LINE> <INDENT> return int()
int KTabWidget.tabRemoved()
625941b656ac1b37e6263ff1
def get_source(self, fullname): <NEW_LINE> <INDENT> import tokenize <NEW_LINE> path = self.get_filename(fullname) <NEW_LINE> try: <NEW_LINE> <INDENT> source_bytes = self.get_data(path) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> raise ImportError("source not available through get_data()", name=fullname) <NEW_LINE> <DEDENT> encoding = tokenize.detect_encoding(_io.BytesIO(source_bytes).readline) <NEW_LINE> newline_decoder = _io.IncrementalNewlineDecoder(None, True) <NEW_LINE> return newline_decoder.decode(source_bytes.decode(encoding[0]))
Concrete implementation of InspectLoader.get_source.
625941b631939e2706e4cc81
def __repr__(self): <NEW_LINE> <INDENT> return self.line_chart.render()
Retorna gráfico renderizado.
625941b6627d3e7fe0d68c5f
def validate_winner(move, game_board: GameBoard): <NEW_LINE> <INDENT> _, char = move <NEW_LINE> data_board = game_board.data_pass_delegate(validate_winner) <NEW_LINE> if data_board is None: <NEW_LINE> <INDENT> raise GameInvalidAction(GameInvalidAction.generic_action_error) <NEW_LINE> <DEDENT> if data_board[1] == data_board[2] == data_board[3] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[4] == data_board[5] == data_board[6] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[7] == data_board[8] == data_board[9] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[1] == data_board[4] == data_board[7] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[2] == data_board[5] == data_board[8] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[3] == data_board[6] == data_board[9] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[1] == data_board[5] == data_board[9] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if data_board[3] == data_board[5] == data_board[7] == char: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Method for validating who wins :param move: Move Tuple :param game_board: GameBoard :return: Boolean
625941b6009cb60464c631cf
def cookie_is_encoded(data): <NEW_LINE> <INDENT> return data.startswith('!') and '?' in data
Tests whether or not a cookie is encoded / HMAC signed -> #bool True if encoded .. from vital.security import cookie_is_encoded cookie_is_encoded( "!YuOoKwDp8GhrwwojdjTxSCj1c2Z+7yz7r6cC7E3hBWo=?IkhlbGxvLCB3b3JsZC4i") # -> True ..
625941b6d8ef3951e324334e
def OnHandleCreated(self,*args): <NEW_LINE> <INDENT> pass
OnHandleCreated(self: MonthCalendar,e: EventArgs) Overrides the System.Windows.Forms.Control.OnHandleCreated(System.EventArgs) method. e: An System.EventArgs that contains the event data.
625941b65fcc89381b1e14d5
def __call__(self,n_samples=None,threshold=None,write=False): <NEW_LINE> <INDENT> print('the number of samples is {}'.format(n_samples)) <NEW_LINE> if threshold is not None: <NEW_LINE> <INDENT> self.threshold=threshold <NEW_LINE> <DEDENT> if n_samples is None: <NEW_LINE> <INDENT> total_samples=len(self.x_train) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> total_samples=n_samples <NEW_LINE> <DEDENT> l1=np.zeros(int(total_samples/2*(total_samples-1))) <NEW_LINE> if write: <NEW_LINE> <INDENT> with open(os.path.join(self.results_dir,'different_correlation'),'a') as fp: <NEW_LINE> <INDENT> for i in tqdm(range(int(total_samples*(total_samples-1)/2)),ascii=True,desc='Rotation Test with write'): <NEW_LINE> <INDENT> first_image_name=choice(self.x_train) <NEW_LINE> second_image_name=choice(self.x_train) <NEW_LINE> while first_image_name==second_image_name: <NEW_LINE> <INDENT> second_image_name=choice(self.x_train) <NEW_LINE> <DEDENT> first_image=self.load_image(first_image_name) <NEW_LINE> second_image=self.load_image(second_image_name) <NEW_LINE> first_image_np=self.image_np(first_image) <NEW_LINE> second_image_np=self.image_np(second_image) <NEW_LINE> logging.debug(first_image_np.shape) <NEW_LINE> logging.debug(second_image_np.shape) <NEW_LINE> first_hash=self.find_output(first_image_np/255) <NEW_LINE> second_hash=self.find_output(second_image_np/255) <NEW_LINE> corr=np.corrcoef(first_hash,second_hash)[0][1] <NEW_LINE> logging.debug('correlation coefficient is {}'.format(corr)) <NEW_LINE> first_image.close() <NEW_LINE> second_image.close() <NEW_LINE> del first_image_np,first_hash <NEW_LINE> del second_image_np,second_hash <NEW_LINE> gc.collect() <NEW_LINE> if corr>=self.threshold: <NEW_LINE> <INDENT> self.fpr_counter+=1 <NEW_LINE> <DEDENT> fp.write(str(corr)+'\n') <NEW_LINE> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l1=np.zeros((total_samples*(total_samples-1))//2) <NEW_LINE> for i in tqdm(range(int(total_samples*(total_samples-1)/2)),ascii=True,desc='Rotation Test without write'): <NEW_LINE> <INDENT> first_image_name=choice(self.x_train) <NEW_LINE> second_image_name=choice(self.x_train) <NEW_LINE> while first_image_name==second_image_name: <NEW_LINE> <INDENT> second_image_name=choice(self.x_train) <NEW_LINE> <DEDENT> first_image=self.load_image(first_image_name) <NEW_LINE> second_image=self.load_image(second_image_name) <NEW_LINE> first_image_np=self.image_np(first_image) <NEW_LINE> second_image_np=self.image_np(second_image) <NEW_LINE> logging.debug(first_image_np.shape) <NEW_LINE> logging.debug(second_image_np.shape) <NEW_LINE> first_hash=self.find_output(first_image_np/255) <NEW_LINE> second_hash=self.find_output(second_image_np/255) <NEW_LINE> corr=np.corrcoef(first_hash,second_hash)[0][1] <NEW_LINE> logging.debug('correlation coefficient is {}'.format(corr)) <NEW_LINE> first_image.close() <NEW_LINE> second_image.close() <NEW_LINE> del first_image_np,first_hash <NEW_LINE> del second_image_np,second_hash <NEW_LINE> gc.collect() <NEW_LINE> if corr>=self.threshold: <NEW_LINE> <INDENT> self.fpr_counter+=1 <NEW_LINE> <DEDENT> l1[i]=corr <NEW_LINE> <DEDENT> <DEDENT> return l1,self.fpr_counter/n_samples
n_samples is the number of samples for which the discerniblity test will be performed
625941b631939e2706e4cc82
def increase_exc_level(self, path, entry): <NEW_LINE> <INDENT> self._modify_exception_level(path, entry, 1)
Increase the exception level of an access exception. Parameters ---------- path : unicode Path of the node in which the exception to increase is. entry : unicode Entry whose access exception should be increased.
625941b68a43f66fc4b53e7b
def getInConnections(self, layer) : <NEW_LINE> <INDENT> return list(self.inConnections[layer])
return a layer's incoming connections
625941b6377c676e91271fbb
def convert_images2PDF_more_dirs(dirPath): <NEW_LINE> <INDENT> __dirs = {} <NEW_LINE> for parent, dirnames, filenames in os.walk(dirPath): <NEW_LINE> <INDENT> for dirname in dirnames: <NEW_LINE> <INDENT> dirData = {"name": "", "pages": [], "isBook": False} <NEW_LINE> dirName = dirname.split('/')[0] <NEW_LINE> dirData['name'] = dirName <NEW_LINE> __dirs[dirName] = dirData <NEW_LINE> <DEDENT> for filename in filenames: <NEW_LINE> <INDENT> real_filename = os.path.join(parent, filename) <NEW_LINE> parentDirName = real_filename.split('/')[-2] <NEW_LINE> if parentDirName in __dirs.keys(): <NEW_LINE> <INDENT> dirJsonData = __dirs[parentDirName] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if __isAllow_file(real_filename) : <NEW_LINE> <INDENT> dirJsonData['pages'].append(real_filename) <NEW_LINE> if not dirJsonData['isBook']: <NEW_LINE> <INDENT> dirJsonData['isBook'] = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> index = 1 <NEW_LINE> for dirName in __dirs.keys(): <NEW_LINE> <INDENT> dirData = __dirs[dirName] <NEW_LINE> if dirData['isBook']: <NEW_LINE> <INDENT> print("[*][转换PDF] : 开始. [名称] > [%s]" % (dirName)) <NEW_LINE> beginTime = time.clock() <NEW_LINE> __converted(os.path.join(dirPath,(dirData['name'] + ".pdf")) , dirData['pages']) <NEW_LINE> endTime = time.clock() <NEW_LINE> print("[*][转换PDF] : 结束. [名称] > [%s] , 耗时 %f s " % (dirName, (endTime - beginTime))) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> <DEDENT> print("[*][所有转换完成] : 本次转换检索目录数 %d 个,共转换的PDF %d 本 " % (len(__dirs), index - 1))
转换一个目录文件夹下的图片至 PDF :param file_dir: :param filename_sort_fn:
625941b63317a56b86939a7b
def sorted_filenames(name="*.vlsv"): <NEW_LINE> <INDENT> import glob <NEW_LINE> fileNames=glob.glob(name) <NEW_LINE> fileNames.sort() <NEW_LINE> return fileNames
Gets the file names in the current directory and sorts them. :param name: Name of the file(s), for example "*.vlsv" :returns: a list of file names in sorted order
625941b62ae34c7f2600cf43