code
stringlengths
0
30.8k
source
stringclasses
6 values
language
stringclasses
9 values
__index_level_0__
int64
0
100k
def clean_uv_fits(uv_fits_path, out_fits_path, stokes, beam=None, mapsize_clean=None, mapsize_fits_path=None, pixsize_fits_path=None, pixel_per_beam=None, mapsize=None, beamsize_fits_path=None, mapsize_restore=None, path_to_script=None, shift=None): stokes = list(stokes) stokes = [stoke.upper() for stoke in stokes] stokes.sort() curdir = os.getcwd() beam_pars = None if beam is not None: beam_pars = beam if beam_pars is None and beamsize_fits_path is not None: map_info = get_fits_image_info(beamsize_fits_path) beam_pars = (map_info['bmaj'] / mas_to_rad, map_info['bmin'] / mas_to_rad, map_info['bpa'] / degree_to_rad) map_pars = None if mapsize_clean is not None: map_pars = mapsize_clean if map_pars is None and mapsize_fits_path is not None: map_info = get_fits_image_info(mapsize_fits_path) map_pars = (map_info['imsize'][0], abs(map_info['pixsize'][0]) / mas_to_rad) pixsize = None if pixel_per_beam is not None: pixsize = beam_pars[0] / pixel_per_beam if pixsize is None and pixsize_fits_path is not None: map_info = get_fits_image_info(pixsize_fits_path) pixsize = abs(map_info['pixsize'][0]) / mas_to_rad imsize = map_pars[0] * abs(map_pars[1]) / abs(pixsize) print(imsize) powers = np.array([float(imsize) / (2 ** i) for i in range(15)]) print(powers) imsize = 2**(list(np.array(powers <= 1, dtype=int)).index(1)) map_pars = (imsize, pixsize) print "Selected image and pixel size: {}".format(map_pars) for stoke in stokes: print "Cleaning stokes {}", stoke uv_fits_dir, uv_fits_fname = os.path.split(uv_fits_path) out_fits_dir, out_fits_fname = os.path.split(out_fits_path) print("Cleaning {} to {} stokes {}, mapsize_clean {}, beam_restore" " {} with shift {}".format(uv_fits_fname, os.path.join(out_fits_path, out_fits_fname), stokes, map_pars, beam_pars, shift)) os.chdir(curdir)
function
python
800
def pprint_themroles(self, vnclass, indent=""): if isinstance(vnclass, str): vnclass = self.vnclass(vnclass) pieces = [] for themrole in self.themroles(vnclass): piece = indent + "* " + themrole.get("type") modifiers = [ modifier["value"] + modifier["type"] for modifier in themrole["modifiers"] ] if modifiers: piece += "[{}]".format(" ".join(modifiers)) pieces.append(piece) return "\n".join(pieces)
function
python
801
fn mark_user_as( conn: &PgConnection, user_id: i32, is_banned: bool ) -> FieldResult<User> { let res = users.find(user_id).get_result::<User>(conn); // Poor man's Ternary operator for error output text let msg = if is_banned { "banned" } else { "not banned" }; match res { Ok(user) => { if user.banned == is_banned { let err = FieldError::new( format!("User already marked as {}", msg), // TODO: better error output graphql_value!({ "cannot_update": "confict"}), ); FieldResult::Err(err) } else { let res = diesel::update(users.find(user_id)) .set(banned.eq(is_banned)) .get_result::<User>(conn); graphql_translate(res) } } Err(e) => FieldResult::Err(FieldError::from(e)), } }
function
rust
802
[HttpPost, ActionName("Delete")] [ValidateAntiForgeryToken] public async Task<IActionResult> DeleteConfirmed(decimal id) { var order = await _context.Order.FindAsync(id); _context.Order.Remove(order); await _context.SaveChangesAsync(); return RedirectToAction(nameof(Index)); }
function
c#
803
static DrmData * newItem(void) { DrmData *d = (DrmData *)malloc(sizeof(DrmData)); if (d != NULL) { d->id = -1; d->next = NULL; } return d; }
function
c
804
def load_from_app_path(app_path): if not app_path: raise SystemEntityError( "App path must be valid to load entity recognizer config." ) if is_duckling_configured(app_path): url = get_system_entity_url_config(app_path=app_path) return DucklingRecognizer.get_instance(url) else: return NoOpSystemEntityRecognizer.get_instance()
function
python
805
protected override bool AuthorizeCore(HttpContextBase httpContext) { if (httpContext == null) throw new ArgumentNullException("httpContext"); try { if (!GetApplicationContext().IsConfigured) { return true; } var umbCtx = GetUmbracoContext(); var isLoggedIn = GetUmbracoContext().Security.ValidateCurrentUser(); if (isLoggedIn) { return true; } return false; } catch (Exception) { return false; } }
function
c#
806
public class AggregatedTestExecutor extends TestExecutor { /** * Instantiates a new AggregatedTestExecutor * * @param queryGenerationFactory a QueryGenerationFactory */ public AggregatedTestExecutor(QueryGenerationFactory queryGenerationFactory) { super(queryGenerationFactory); } /** {@inheritDoc} */ @Override protected Collection<TestCaseResult> executeSingleTest(TestSource testSource, TestCase testCase) throws TestCaseExecutionException { int total = -1, prevalence = -1; try { Query prevalenceQuery = testCase.getSparqlPrevalenceQuery(); if (prevalenceQuery != null) { prevalence = getCountNumber(testSource.getExecutionFactory(), testCase.getSparqlPrevalenceQuery(), "total"); } else { Logger.getGlobal().warning("no prevalence pattern defined"); } } catch (QueryExceptionHTTP e) { if (SparqlUtils.checkStatusForTimeout(e)) { prevalence = -2; Logger.getGlobal().warning("Query timeout"); } else { prevalence = -3; Logger.getGlobal().warning(e.toString()); } } if (prevalence != 0) { // if prevalence !=0 calculate total try { total = getCountNumber(testSource.getExecutionFactory(), queryGenerationFactory.getSparqlQuery(testCase), "total"); // total = 10; } catch (QueryExceptionHTTP e) { if (SparqlUtils.checkStatusForTimeout(e)) { total = -1; } else { total = -2; } } } else { // else total will be 0 anyway total = 0; } // No need to throw exception here, class supports status return Collections.singletonList(new AggregatedTestCaseResultImpl(testCase, total, prevalence)); } public abstract class QueryExecutionFactoryBackQuery2 implements QueryExecutionFactory { @Override public QueryExecution createQueryExecution(String queryString) { Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); QueryExecution result = createQueryExecution(query); return result; } @SuppressWarnings("unchecked") @Override public <T> T unwrap(Class<T> clazz) { T result = getClass().isAssignableFrom(clazz) ? (T)this : null; return result; } @Override public void close() { // Noop by default } } private int getCountNumber(QueryExecutionFactory model, Query query, String var) { checkNotNull(query); checkNotNull(var); int result = 0; try ( QueryExecution qe = model.createQueryExecution(query); ) { ResultSet results = qe.execSelect(); if (results != null && results.hasNext()) { QuerySolution qs = results.next(); result = qs.get(var).asLiteral().getInt(); } } catch ( RuntimeException re) { result = -2; } // System.out.println(query); // QueryExecution qe = model.createQueryExecution(query); // try { // ResultSet results = qe.execSelect(); // if (results != null && results.hasNext()) { // QuerySolution qs = results.next(); // result = qs.get(var).asLiteral().getInt(); // } // } catch (Exception e) { // System.out.println("catch"); // e.printStackTrace(); // result = -4; // } finally { // if(qe != null) qe.close(); // } return result; } }
class
java
807
public class ObservableTextWatcher extends Observable<String> implements TextWatcher { private ObservableTextWatcher() { } public static ObservableTextWatcher create() { return new ObservableTextWatcher(); } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { // do nothing } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { // do nothing } @Override public void afterTextChanged(Editable s) { String newText = s.toString(); if (Objects.equals(newText, value)) return; set(newText); } }
class
java
808
pub fn from<I, T>(args: I) -> Options where I: IntoIterator<Item = T>, T: Into<OsString> + Clone, { // create a new parser for our args let parser = Options::create_parser(); // parse out the arguments into matching opts let options = parser.get_matches_from(args); // attempt to parse the provided filter let filter = value_t!(options.value_of("filter"), FilterKind); // create opts Options { // grab and store statistics flags statistics: options.is_present("statistics"), // grab and store inversion flags inverted: options.is_present("invert"), // store the filter to use for unique detection filter: filter.unwrap_or(FilterKind::Digest), // own all inputs inputs: options .values_of("inputs") .unwrap() .map(|s| s.to_owned()) .collect(), } }
function
rust
809
public void Declare(ColumnSymbol column, List<Diagnostic> diagnostics, SyntaxNode location, bool replace = false) { if (_declaredNames.Contains(column.Name)) { diagnostics.Add(DiagnosticFacts.GetDuplicateColumnDeclaration(column.Name).WithLocation(location)); return; } if (replace && _columnIndexMap.TryGetValue(column.Name, out var index)) { _projection[index] = column; _declaredNames.Add(column.Name); } else { var added = Add(column); if (added != null) { _declaredNames.Add(added.Name); } } }
function
c#
810
public class COModelWithoutUI { protected COModelBuilder sBuilder; /** * This method constructs a new simulation and COModelBuilder, including an argument to specify * the percentage of faults to insert (or a specific fault to activate) * @param percentageFaults (double - value of 0..1 (incl) to set % of faults to be injected * into the model OR index of single fault to be injected) * @param mapNo (long - unique identifier for results files, may include search effort, run index, R/SB differentiation) * @param inWantRandomFaults (boolean - true if faults should be activated at random at supplied frequency, false to specify one active fault) */ public COModelWithoutUI(double percentageFaults, long mapNo, boolean inWantRandomFaults) { sBuilder = new COModelBuilder(new COModel( System.nanoTime(), Constants.WorldXVal, Constants.WorldYVal, false, percentageFaults, mapNo, inWantRandomFaults)); //System.out.println("COModelWithoutUI is being called!"+ "it's state(model)is: "+ sBuilder.getSim().toString()); } /** * Dummy constructor to get around some initialisation errors in RunComparison.java * * TODO: fix this in a more elegant way */ public COModelWithoutUI() { // DO nothing } /** * This method executes a batch run with the same external seed/map - this reuses the * COModel object and as a result single log/output files are used for the entire batch. * The map is regenerated during the start routine (see below) * @param noRuns (int - number of times to repeat the simulation on this map) * @param newExternalSeed (long - external seed to use to generate the map) */ public void runBatch(int noRuns, long newExternalSeed) { for (int i=0; i<noRuns; i++) { start(newExternalSeed); } } /** * This method updates the internal random seed and resets other simulation parameters, it sets * the external seed to the supplied parameter, and then generates the corresponding map. The * simulation is then executed in a loop until the simulation terminates on its own, or the number * of steps reaches 5500 - note that this loop constraint is unlikely to ever be reached as the * accident detector code contains a timeout at about 5000 steps which will cause the simulation * to terminate itself earlier. * @param newExternalSeed (long - external random seed to generate the map) */ public void start(long newExternalSeed) { //System.out.println("COModelWithoutUI.start is called "+ sBuilder.sim); sBuilder.updateSeed(new SecureRandom().nextInt()); sBuilder.sim.reset(); sBuilder.sim.setExternalSeed(newExternalSeed); sBuilder.generateSimulation(); sBuilder.sim.start(); do { if (!sBuilder.sim.schedule.step(sBuilder.sim)) { System.out.println("COModelWithoutUI.start finished on its own after "+ sBuilder.sim.schedule.getSteps() + " steps."); break; } } while (sBuilder.sim.schedule.getSteps() < 5500); // Report to the console that we have completed, and call finish() on the sim System.out.println("COModelWithoutUI finished."); sBuilder.sim.finish(); } }
class
java
811
def merge_fragments( self, fragments:list[geom.Polygon]) -> list[geom.Polygon]: if len(fragments) == 1: return fragments changes_made = True tile = self.tile.geometry[0] reg_tile = self.regularised_tile.geometry[0] while changes_made: changes_made = False for v in self.vectors: next_frags = [] t_fragments = [affine.translate(f, v[0], v[1]) for f in fragments] matches = set() for i, f1 in enumerate(fragments): for j, f2, in enumerate(t_fragments): if i != j and f1.distance(f2) < 1e-3: matches.add((i, j)) fragments_to_remove = set() for i, j in matches: f1 = fragments[i] f2 = t_fragments[j] u1 = (f1.buffer(self.fudge_factor) | f2.buffer(self.fudge_factor)) u2 = affine.translate(u1, -v[0], -v[1]) if tile.intersection(u1).area > tile.intersection(u2).area: next_frags.append(u1) reg_tile = reg_tile | u1 reg_tile = reg_tile - u2 else: next_frags.append(u2) reg_tile = reg_tile | u2 reg_tile = reg_tile - u1 changes_made = True fragments_to_remove.add(i) fragments_to_remove.add(j) fragments = [f for i, f in enumerate(fragments) if not (i in fragments_to_remove)] fragments = next_frags + fragments self.regularised_tile.geometry[0] = reg_tile return fragments
function
python
812
static void ips_cpu_lower(struct ips_driver *ips) { u64 turbo_override; u16 cur_limit, new_limit; rdmsrl(TURBO_POWER_CURRENT_LIMIT, turbo_override); cur_limit = turbo_override & TURBO_TDP_MASK; new_limit = cur_limit - 8; if (new_limit < (ips->orig_turbo_limit & TURBO_TDP_MASK)) new_limit = ips->orig_turbo_limit & TURBO_TDP_MASK; thm_writew(THM_MPCPC, (new_limit * 10) / 8); turbo_override |= TURBO_TDC_OVR_EN | TURBO_TDP_OVR_EN; wrmsrl(TURBO_POWER_CURRENT_LIMIT, turbo_override); turbo_override &= ~TURBO_TDP_MASK; turbo_override |= new_limit; wrmsrl(TURBO_POWER_CURRENT_LIMIT, turbo_override); }
function
c
813
def loading(file_path): total={} file_data = [] new_time = [] print("Loading files and classifing") for root, subdirs, files in os.walk(file_path): for file in files: array = np.fromfile(file_path +"/"+ file, dtype=np.float32) for root, subdirs, files in os.walk(file_path): for file in files: match=file.split("_")[1] date = pd.to_datetime(match, format = "%Y%m%d%H").strftime('%d/%m/%Y') time = (datetime.strptime(match, "%Y%m%d%H") + timedelta(hours=6)).strftime('%H:%M') new_time.append(date + " " + time) for file in os.listdir(file_path): start = 0 step_3d = 13*9*26 end = step_3d features_3d = { "HGTprs": {'dimesiones': [13, 9, 26], 'data': None}, "CLWMRprs": {'dimesiones': [13, 9, 26], 'data': None}, "RHprs": {'dimesiones': [13, 9, 26], 'data': None}, "Velprs": {'dimesiones': [13, 9, 26], 'data': None}, "UGRDprs": {'dimesiones': [13, 9, 26], 'data': None}, "VGRDprs": {'dimesiones': [13, 9, 26], 'data': None}, "TMPprs": {'dimesiones': [13, 9, 26], 'data': None} } end = end - step_3d step_2d = 13*9 end = end +step_2d features_2d = { "HGTsfc": {'dimesiones': [13, 9, 1], 'data': None}, "MSLETmsl": {'dimesiones': [13, 9, 1], 'data': None}, "PWATclm": {'dimesiones': [13, 9, 1], 'data': None}, "RH2m": {'dimesiones': [13, 9, 1], 'data': None}, "Vel100m": {'dimesiones': [13, 9, 1], 'data': None}, "UGRD100m": {'dimesiones': [13, 9, 1], 'data': None}, "VGRD100m": {'dimesiones': [13, 9, 1], 'data': None}, "Vel80m": {'dimesiones': [13, 9, 1], 'data': None}, "UGRD80m": {'dimesiones': [13, 9, 1], 'data': None}, "VGRD80m": {'dimesiones': [13, 9, 1], 'data': None}, "Vel10m":{'dimesiones': [13, 9, 1], 'data': None}, "UGRD10m": {'dimesiones': [13, 9, 1], 'data': None}, "VGRD10m": {'dimesiones': [13, 9, 1], 'data': None}, "GUSTsfc": {'dimesiones': [13, 9, 1], 'data': None}, "TMPsfc": {'dimesiones': [13, 9, 1], 'data': None}, "TMP2m": {'dimesiones': [13, 9, 1], 'data': None}, "no4LFTXsfc":{'dimesiones': [13, 9, 1], 'data': None}, "CAPEsfc": {'dimesiones': [13, 9, 1], 'data': None}, "SPFH2m": {'dimesiones': [13, 9, 1], 'data': None}, "SPFH80m": {'dimesiones': [13, 9, 1], 'data': None}, } size_3d = 13*9*26 array_3d = array[:size_3d*7] for variable, length in zip(features_3d.keys(), range(len(features_3d))): features_3d[variable]["data"] = array_3d[length*size_3d:(length +1)*size_3d] size_2d = 13*9 array_2d = array[size_3d*7:] for variable, length in zip(features_2d.keys(), range(len(features_2d))): features_2d[variable]["data"] = array_2d[length*size_2d:(length +1)*size_2d] file_data.append( { "file_name": file, "var_3d": features_3d, "var_2d":features_2d, }) for i in range(len(new_time)): total.update({new_time[i]:file_data[i]}) print("It's done!") return total
function
python
814
func (c *client) RunAndWait(stop <-chan struct{}) { c.kubeInformer.Start(stop) c.dynamicInformer.Start(stop) c.metadataInformer.Start(stop) c.istioInformer.Start(stop) c.serviceapisInformers.Start(stop) c.kubeInformer.WaitForCacheSync(stop) c.dynamicInformer.WaitForCacheSync(stop) c.metadataInformer.WaitForCacheSync(stop) c.istioInformer.WaitForCacheSync(stop) c.serviceapisInformers.WaitForCacheSync(stop) }
function
go
815
def is_str_subset(s1, s2): all_indices = [find_all_indices(s2.split(" "), x) for x in s1.split()] if not all(all_indices): return False for combination in itertools.product(*all_indices): if strictly_increasing(combination): return True return False
function
python
816
public static string FormatBytes(this ulong bytes) { int i; double dblSByte = bytes; for (i = 0; i < ByteSuffixes.Length && bytes >= 1024; i++, bytes /= 1024) { dblSByte = bytes / 1024.0; } return $"{dblSByte:0.##} {ByteSuffixes[i]}"; }
function
c#
817
private static String unquote(String url) { if (url.length() < 2) return url; char first = url.charAt(0); char last = url.charAt(url.length()-1); if ((first == '\'' && last == '\'') || first == '"' && last == '"') return url.substring(1, url.length()-1); else return url; }
function
java
818
private boolean expect(PushbackInputStream is, byte[] stuff) throws IOException { int len = stuff.length; boolean inNewline = false; for (int i = 0; i < len; ) { int c = is.read(); if (c == 10 || c == 13) { if (inNewline) { continue; } else { inNewline = true; c = 10; } } else { inNewline = false; } if (c != stuff[i++]) { return false; } } if (stuff[len - 1] == 10) { int c = is.read(); if (c != -1 && c != 10 && c != 13) { is.unread(c); } } return true; }
function
java
819
func (dag *BlockDAG) UTXOConfirmations(outpoint *domainmessage.Outpoint) (uint64, bool) { dag.dagLock.RLock() defer dag.dagLock.RUnlock() utxoEntry, ok := dag.GetUTXOEntry(*outpoint) if !ok { return 0, false } confirmations := dag.SelectedTipBlueScore() - utxoEntry.BlockBlueScore() + 1 return confirmations, true }
function
go
820
public static DataTable GetCPUbyDatabase(this smo.Server s) { smo.Database d = s.Databases["master"]; string sql = @"WITH DB_CPU_Stats AS ( SELECT DatabaseID , DB_Name(DatabaseID) AS DatabaseName , SUM(total_worker_time) / 1000 AS CPU_Time_Ms FROM sys.dm_exec_query_stats AS qs (NOLOCK) CROSS APPLY (SELECT CONVERT(int, value) AS DatabaseID FROM sys.dm_exec_plan_attributes(qs.plan_handle) WHERE attribute = N'dbid') AS F_DB GROUP BY DatabaseID ) SELECT DatabaseName AS [Database Name] , CPU_Time_Ms AS [CPU Time Ms] , CAST(CPU_Time_Ms * 1.0 / SUM(CPU_Time_Ms) OVER() * 100.0 AS DECIMAL(5, 2)) AS [CPU Percent] FROM DB_CPU_Stats WHERE DatabaseID != 32767 ORDER BY ROW_NUMBER() OVER(ORDER BY CPU_Time_Ms DESC) OPTION (RECOMPILE)"; return d.ExecuteWithResults(sql).Tables[0]; }
function
c#
821
public static Object compute(CompilationInfo info, TreePath tp, boolean resolveCompileTimeConstants, boolean enhanced) { boolean save = false; ElementValue v = null; Map<Object, ElementValue> cache = null; if (tp.getParentPath() != null) { Tree parentL = tp.getParentPath().getLeaf(); switch (parentL.getKind()) { case IF: case DO_WHILE_LOOP: case CONDITIONAL_EXPRESSION: case FOR_LOOP: case ASSIGNMENT: case VARIABLE: save = true; break; case ASSERT: save = ((AssertTree)parentL).getCondition() == tp.getLeaf(); break; } if (save) { cache = VisitorImpl.getValueCache(info); v = cache.get(tp.getLeaf()); if (v != null) { if (enhanced && v.constant != null) { return v.constant == UNKNOWN ? null : v.constant; } else if (!enhanced && v.jlsConstant != null) { return v.jlsConstant == UNKNOWN ? null : v.jlsConstant; } } } } Object o; try { o = new VisitorImpl(info, resolveCompileTimeConstants, enhanced).scan(tp, null); } catch (ArithmeticException | IndexOutOfBoundsException | IllegalArgumentException ex) { o = null; } if (save) { if (v == null) { v = new ElementValue(); cache.put(tp.getLeaf(), v); } if (enhanced) { v.constant = o == null ? UNKNOWN : o; } else { v.jlsConstant = o == null ? UNKNOWN : o; } } return o; }
function
java
822
public class DBDeleteByExample extends DBDelete { private List<DBRow> savedRows = new ArrayList<DBRow>(); /** * Creates a DBDeleteByExample action for the supplied example DBRow on the * supplied database. * * @param <R> the table affected * @param row the example to be deleted */ protected <R extends DBRow> DBDeleteByExample(R row) { super(row); } private <R extends DBRow> DBDeleteByExample(DBDatabase db, R row) throws SQLException { super(row); List<R> gotRows = db.get(row); for (R gotRow : gotRows) { savedRows.add(DBRow.copyDBRow(gotRow)); } } @Override protected DBActionList execute(DBDatabase db) throws SQLException { DBRow row = getRow(); final DBDeleteByExample deleteAction = new DBDeleteByExample(row); DBActionList actions = new DBActionList(deleteAction); List<DBRow> rowsToBeDeleted = db.get(row); for (DBRow deletingRow : rowsToBeDeleted) { deleteAction.savedRows.add(DBRow.copyDBRow(deletingRow)); } DBStatement statement = db.getDBStatement(); try { for (String str : getSQLStatements(db)) { statement.execute(str); } } finally { statement.close(); } return actions; } @Override public List<String> getSQLStatements(DBDatabase db) { DBRow row = getRow(); DBDefinition defn = db.getDefinition(); String whereClause = ""; for (String clause : row.getWhereClausesWithoutAliases(db)) { whereClause += defn.beginAndLine() + clause; } ArrayList<String> strs = new ArrayList<String>(); strs.add(defn.beginDeleteLine() + defn.formatTableName(row) + defn.beginWhereClause() + defn.getWhereClauseBeginningCondition() + whereClause + defn.endDeleteLine()); return strs; } @Override protected DBActionList getRevertDBActionList() { DBActionList reverts = new DBActionList(); for (DBRow savedRow : savedRows) { reverts.add(new DBInsert(savedRow)); } return reverts; } @Override protected DBActionList getActions() {//DBRow row) { return new DBActionList(new DBDeleteByExample(getRow())); } /** * Returns the list of actions required to delete rows matching the example * supplied on the database supplied. * * <p> * While it is unlikely that more than one action is required to delete, all * actions return a list to allow for complex actions. * * @param db the target database * @param row the row to be deleted * @throws SQLException Database actions can throw SQLException * <p style="color: #F90;">Support DBvolution at * <a href="http://patreon.com/dbvolution" target=new>Patreon</a></p> * * @return the list of actions required to delete all the rows. */ @Override protected DBActionList getActions(DBDatabase db, DBRow row) throws SQLException { return new DBActionList(new DBDeleteByExample(db, row)); } }
class
java
823
def backup_restore_secret_sample(self): first_vault = self.create_vault() secret_name = KeyVaultSample.get_unique_name() secret_value = 'this is a secret value to be migrated from one vault to another' secret = self.keyvault_data_client.set_secret(first_vault.properties.vault_uri, secret_name, secret_value) print(secret) backup = self.keyvault_data_client.backup_secret(first_vault.properties.vault_uri, secret_name) print(backup) second_vault = self.create_vault() self.keyvault_data_client.restore_secret(second_vault.properties.vault_uri, backup.value) restored_secret = self.keyvault_data_client.get_secret(second_vault.properties.vault_uri, secret_name) print(restored_secret)
function
python
824
def wbar_adk(self, inten): p = self.at.adk_params() F = np.sqrt(inten / 355.0) + 1e-24 w_adk = p['abs_Cnl_sq'] * p['G_lm'] * p['Ip'] / 27.2 * (2 * p['F0'] / F) ** ( 2 * p['n_star'] - p['ang_m'] - 1) * np.exp(- 2 * p['F0'] / 3 / F) w_bar_adk = np.sqrt(3 * F / np.pi / p['F0']) * w_adk return w_bar_adk * 41.341 * 10 ** 15
function
python
825
@Test public void testDefragmentationCancelInProgress() throws Exception { IgniteEx ig = startGrid(0); ig.cluster().state(ClusterState.ACTIVE); IgniteCache<Object, Object> cache = ig.getOrCreateCache(DEFAULT_CACHE_NAME); for (int i = 0; i < 1024; i++) cache.put(i, i); forceCheckpoint(ig); DefragmentationMXBean mxBean = defragmentationMXBean(ig.name()); mxBean.schedule(""); stopGrid(0); blockCdl = new CountDownLatch(128); UnaryOperator<IgniteConfiguration> cfgOp = cfg -> { DataStorageConfiguration dsCfg = cfg.getDataStorageConfiguration(); FileIOFactory delegate = dsCfg.getFileIOFactory(); dsCfg.setFileIOFactory((file, modes) -> { if (file.getName().contains("dfrg")) { if (blockCdl.getCount() == 0) { try { Thread.sleep(100); } catch (InterruptedException ignore) { } } else blockCdl.countDown(); } return delegate.create(file, modes); }); return cfg; }; IgniteInternalFuture<?> fut = GridTestUtils.runAsync(() -> { try { startGrid(0, cfgOp); } catch (Exception e) { throw new RuntimeException(e); } }); blockCdl.await(); mxBean = defragmentationMXBean(ig.name()); assertTrue(mxBean.cancel()); fut.get(); assertTrue(mxBean.cancel()); }
function
java
826
func (in *CellSelector) DeepCopy() *CellSelector { if in == nil { return nil } out := new(CellSelector) in.DeepCopyInto(out) return out }
function
go
827
class Docset: """A docset configuration item, computing defaults based on the given name. Comparison and hashing is reduced to the name attribute only. This is important when building sets, as later additions are discarded. """ def __init__(self, name, namespace=None, selector=None): self.name = name self.namespace = namespace or name.lower().replace(" ", "-") self.selector = selector or "source.{}".format(self.namespace) def score(self, scope): return sublime.score_selector(scope, self.selector) def __repr__(self): return ( "{self.__class__.__name__}" "(name={self.name!r}" ", namespace={self.namespace!r}" ", selector={self.selector!r}" ")".format(self=self) ) def __gt__(self, other): return self.name > other.name def __eq__(self, other): return self.name == other.name def __hash__(self): return hash(self.name)
class
python
828
class isab::AudioCtrlLanguage* NewSyntaxLC(enum languageCode aNav2Lang, TInt aLangCodeRes) { TInt32 tmpcode = 0; class AudioCtrlLanguage* lang = NewSyntaxLC(tmpcode, aLangCodeRes); aNav2Lang = languageCode(tmpcode); return lang; }
function
c++
829
void wpi_assertEqual_common_impl(int valueA, int valueB, llvm::StringRef equalityType, llvm::StringRef message, llvm::StringRef fileName, int lineNumber, llvm::StringRef funcName) { std::stringstream error; if (message.size() > 0) { error << "Assertion failed: \"" << message << "\", \"" << valueA << "\" " << equalityType << " \"" << valueB << "\" in " << funcName << "() in " << fileName << " at line " << lineNumber << "\n"; } else { error << "Assertion failed: \"" << valueA << "\" " << equalityType << " \"" << valueB << "\" in " << funcName << "() in " << fileName << " at line " << lineNumber << "\n"; } std::cout << "\n\n>>>>" << error.str(); wpi_handleTracing(); }
function
c++
830
def serialize(self): comp_dict = super().serialize() comp_dict['commodity_rate_min'] = self.op_rate_min comp_dict['commodity_rate_max'] = self.op_rate_max comp_dict['commodity_rate_fix'] = self.op_rate_fix comp_dict['commodity_cost_time_series'] = \ self.commodity_cost_time_series comp_dict['commodity_revenues_time_series'] = \ self.commodity_revenues_time_series return comp_dict
function
python
831
removeGifFromFavorites(gif) { let { favorites, results } = this.state; const resultGif = results[gif.id]; if (resultGif) { resultGif.isFavorite = false; } delete favorites[gif.id]; this.setState({ favorites }); }
function
javascript
832
def apply_snapshot(self, argset): clone = self.Clone() args = [var for var in argset if not ( var.name.startswith('binWidth_obs_x_') or var.name.startswith('gamma_stat') or var.name.startswith('nom_'))] nargs = [] for var in args: is_norm = False name = var.name.replace('alpha_', '') for sample in clone.samples: if sample.GetNormFactor(name) is not None: log.info("applying snapshot of {0} on sample {1}".format( name, sample.name)) is_norm = True sample *= var.value osys = OverallSys(name, low=1. - var.error / var.value, high=1. + var.error / var.value) sample.AddOverallSys(osys) sample.RemoveNormFactor(name) if not is_norm: nargs.append(var) for sample in clone.samples: if sample.hist is None: raise RuntimeError( "sample {0} does not have a " "nominal histogram".format(sample.name)) nominal = sample.hist.Clone(shallow=True) for var in nargs: name = var.name.replace('alpha_', '') if not sample.has_sys(name): continue log.info("applying snapshot of {0} on sample {1}".format( name, sample.name)) low, high = sample.sys_hist(name) val = var.value if val > 0: sample.hist += (high - nominal) * val elif val < 0: sample.hist += (nominal - low) * val return clone
function
python
833
public static final DatagramPacket receive(DatagramSocket socket) throws IOException { byte[] receive = new byte[15000]; DatagramPacket packet = new DatagramPacket(receive, receive.length); socket.receive(packet); byte[] data = ByteArray.trim(receive); packet.setData(data); Logger.info(TAG, format("%s[%d]\tPacket received\tlength: %d", packet.getAddress().getHostAddress(), packet.getPort(), data.length)); return packet; }
function
java
834
public void insertIdentityStmts(SootClass declaringClass) { final Jimple jimple = Jimple.v(); final PatchingChain<Unit> unitChain = getUnits(); final Chain<Local> localChain = getLocals(); Unit lastUnit = null; if (!getMethod().isStatic()) { if (declaringClass == null) { throw new IllegalArgumentException( String.format("No declaring class given for method %s", method.getSubSignature())); } Local l = jimple.newLocal("this", RefType.v(declaringClass)); Stmt s = jimple.newIdentityStmt(l, jimple.newThisRef((RefType) l.getType())); localChain.add(l); unitChain.addFirst(s); lastUnit = s; } int i = 0; for (Type t : getMethod().getParameterTypes()) { Local l = jimple.newLocal("parameter" + i, t); Stmt s = jimple.newIdentityStmt(l, jimple.newParameterRef(l.getType(), i)); localChain.add(l); if (lastUnit == null) { unitChain.addFirst(s); } else { unitChain.insertAfter(s, lastUnit); } lastUnit = s; i++; } }
function
java
835
def shape_sanity_check(image: np.ndarray, ddf: np.ndarray): if len(image.shape) not in [3, 4]: raise ValueError( f"image shape must be (m_dim1, m_dim2, m_dim3) " f"or (m_dim1, m_dim2, m_dim3, ch)," f" got {image.shape}" ) if not (len(ddf.shape) == 4 and ddf.shape[-1] == 3): raise ValueError( f"ddf shape must be (f_dim1, f_dim2, f_dim3, 3), got {ddf.shape}" )
function
python
836
def select(self, model, filters, **kwargs): if 'paging' in kwargs: paging = kwargs['paging'] elif 'limit' in kwargs: paging = Paging(0, kwargs['limit']) else: paging = None try: filters = tuple(filters) except TypeError: filters = (filters,) q = queries.GetQuery(model.tableName(), filters=filters, properties=kwargs.get('properties', tuple()), order=kwargs.get('order', None), paging=paging) with self._slave() as client: res = client.do(q) if res.error is not None: raise RequestError(res.error) objs = res.load(model) if kwargs.get('withTotal'): return objs, res.total else: return objs
function
python
837
fn lex_start(l: &mut Lexer) -> Option<StateFn> { if let Some(ch) = l.next() { match ch { '(' => { l.emit(TokenType::OpenParen); Some(StateFn(lex_start)) } ')' => { l.emit(TokenType::CloseParen); Some(StateFn(lex_start)) } '"' => Some(StateFn(lex_string)), ' ' | '\t' | '\r' | '\n' => Some(StateFn(lex_separator)), ';' => Some(StateFn(lex_comment)), '#' => Some(StateFn(lex_hash)), '[' | ']' | '{' | '}' => errorf(l, "use of reserved character"), '\'' | '`' => Some(StateFn(lex_quote)), ',' => Some(StateFn(lex_unquote)), '0'..='9' => { l.rewind(); Some(StateFn(lex_number)) } '+' | '-' => Some(StateFn(lex_explicit_sign)), '.' => Some(StateFn(lex_dot)), '@' => errorf(l, "@ cannot be the start of a token"), '\\' => errorf(l, "\\ cannot be the start of a token"), '|' => Some(StateFn(lex_pipe_identifier)), _ => { // almost certainly an identifier l.rewind(); Some(StateFn(lex_identifier)) } } } else { l.emit(TokenType::EndOfFile); None } }
function
rust
838
public void convertDataTable2Log(BufferedDataTable csvData, ExecutionContext exec) throws CanceledExecutionException { List<String> traceColumns= config.getMTraceAttrSet().getIncludeList(); List<String> eventColumns= config.getMEventAttrSet().getIncludeList(); int[] traceColIndices = csvData.getDataTableSpec().columnsToIndices(traceColumns.toArray(new String[0])); int[] eventColIndices = csvData.getDataTableSpec().columnsToIndices(eventColumns.toArray(new String[0])); boolean[] traceColVisited = new boolean[traceColIndices.length]; boolean[] eventColVisited = new boolean[eventColIndices.length]; int caseIDIdx = -1, eventClassIdx, tsIdx = -1; caseIDIdx = traceColumns.indexOf(config.getMCaseID().getStringValue()); eventClassIdx = eventColumns.indexOf(config.getMEventClass().getStringValue()); tsIdx = eventColumns.indexOf(config.getMTimeStamp().getStringValue()); boolean withLifecycle = false; int lifecycleIdx = -1; if(!config.getMLifecycle().getStringValue().equals(SMTable2XLogConfig.CFG_NO_OPTION)) { withLifecycle = true; lifecycleIdx = eventColumns.indexOf(config.getMLifecycle().getStringValue()); eventColVisited[lifecycleIdx] = true; } traceColVisited[caseIDIdx] = true; eventColVisited[eventClassIdx] =true; eventColVisited[tsIdx] =true; String currentCaseID = "-1", newCaseID=""; String logName = csvData.getSpec().getName(); startLog(logName + " event log"); for(DataRow row : csvData) { exec.checkCanceled(); DataCell traceIDData = row.getCell(traceColIndices[caseIDIdx]); newCaseID = traceIDData.toString(); if(!newCaseID.equals(currentCaseID)) { if(!currentCaseID.equals("-1")) endTrace(currentCaseID); currentCaseID = newCaseID; startTrace(currentCaseID); } for(int tIdx = 0; tIdx< traceColIndices.length ; tIdx++) { if(traceColVisited[tIdx]) continue; if(traceAttrMap.containsKey(traceColumns.get(tIdx))) { if(!traceAttrMap.get(traceColumns.get(tIdx)).equals(row.getCell(traceColIndices[tIdx]))) { errorDetected = true; break; } }else { traceAttrMap.put(traceColumns.get(tIdx), row.getCell(traceColIndices[tIdx])); } } String eventClass = null; DataCell eventClassData = row.getCell(eventColIndices[eventClassIdx]); eventClass = ((StringCell)eventClassData).getStringValue(); try { Date timeStamp = convertString2Date( row.getCell(eventColIndices[tsIdx])); String lifecycle = null ; if(withLifecycle) { DataCell lifecycleData = row.getCell(eventColIndices[lifecycleIdx]); lifecycle = ((StringCell)lifecycleData).getStringValue(); } startEvent(eventClass, timeStamp, lifecycle); } catch (ParseException e) { e.printStackTrace(); } for(int eIdx =0; eIdx< eventColIndices.length; eIdx++) { if(eventColVisited[eIdx]) continue; DataCell otherData = row.getCell(eventColIndices[eIdx]); String attrName = eventColumns.get(eIdx); assignAttributeWithDataCell(currentEvent, otherData, attrName); } endEvent(); } endTrace(currentCaseID + ""); endLog(); }
function
java
839
func (d *Datastore) deleteEntityByName(dbTable string, name string) error { deleteStmt := fmt.Sprintf("DELETE FROM %s WHERE name = ?", dbTable) result, err := d.db.Exec(deleteStmt, name) if err != nil { if isMySQLForeignKey(err) { return foreignKey(dbTable, name) } return errors.Wrapf(err, "delete %s", dbTable) } rows, _ := result.RowsAffected() if rows != 1 { return notFound(dbTable).WithName(name) } return nil }
function
go
840
public boolean startClient(final String host, final int port) { try { client = new Client(host, port); client.start(); } catch (final IOException ex) { return false; } return true; }
function
java
841
def dict_diff( dict0, dict1 ): result = {} for key, _ in dict0.items(): result[key] = dict1[key] - dict0[key] return result
function
python
842
void IStream.Read(byte[] pv, int cb, IntPtr pcbRead) { int bytesRead = _internalStream.Read(pv, 0, cb); if (pcbRead != IntPtr.Zero) { Marshal.WriteInt32(pcbRead, bytesRead); } }
function
c#
843
def read_vcf_multi(dir_path: str, variant_caller: str = 'manta', as_breakpoint: bool = False, exclude_empty_cases: bool = False, file_extension: str = 'vcf', escape_dot_files: bool = True): ls_vcf = [] ls_names = [] for f in os.listdir(dir_path): if escape_dot_files and f.startswith('.'): continue if (file_extension is not None) and (f.split('.')[-1] != file_extension): continue abspath = os.path.abspath(os.path.join(dir_path, f)) vcf = read_vcf(abspath, variant_caller=variant_caller) if exclude_empty_cases & (vcf.sv_count == 0): continue if as_breakpoint: vcf = vcf.breakend2breakpoint() ls_vcf.append(vcf) patient_id = f.replace('.vcf', '') ls_names.append(patient_id) multi_vcf = MultiVcf(ls_vcf, ls_names) return multi_vcf
function
python
844
public static UserType parseUserType (String userType) throws ParseException { requireNonNull(userType); String trimmedUserType = userType.trim(); if (trimmedUserType.matches("[A\\s].*")) { return UserType.ADMIN; } else if (trimmedUserType.matches("[G\\s].*")) { return UserType.GENERAL; } else { throw new ParseException(UserType.MESSAGE_CONSTRAINTS); } }
function
java
845
def __extract_property(self, params, lang): data = self.__extract(params, lang) pageid = list(data['query']['pages'].keys())[0] root = data['query']['pages'][pageid]['revisions'][0] if 'comment' not in root: root['comment'] = '' return root['revid'], root['timestamp'], root['user'], root['comment'], root['size']
function
python
846
public void CopyFromParent(BlendingParameters parent) { if (Source == BlendingType.Inherit) Source = parent.Source; if (Destination == BlendingType.Inherit) Destination = parent.Destination; if (SourceAlpha == BlendingType.Inherit) SourceAlpha = parent.SourceAlpha; if (DestinationAlpha == BlendingType.Inherit) DestinationAlpha = parent.DestinationAlpha; if (RGBEquation == BlendingEquation.Inherit) RGBEquation = parent.RGBEquation; if (AlphaEquation == BlendingEquation.Inherit) AlphaEquation = parent.AlphaEquation; }
function
c#
847
public class BEDeviceModel : BEGattModelBase<BluetoothLEDevice> { #region ------------------------------ Properties ------------------------------ private DeviceInformation _deviceInfo; public List<BEServiceModel> ServiceModels { get; private set; } private BluetoothLEDevice _device { get; set; } public String Name { get { return _device.Name.Trim(); } } public UInt64 BluetoothAddress { get { return _device.BluetoothAddress; } } public String DeviceId { get { return _device.DeviceId; } } public bool Connected; #endregion #region ------------------------------ Constructor/Initialize ------------------------------ public BEDeviceModel() { ServiceModels = new List<BEServiceModel>(); this._viewModelInstances = new List<BEGattVMBase<BluetoothLEDevice>>(); } public void Initialize(BluetoothLEDevice device, DeviceInformation deviceInfo) { if (device == null) { throw new ArgumentNullException("In BEDeviceVM, BluetoothLEDevice cannot be null."); } if (deviceInfo == null) { throw new ArgumentNullException("In BEDeviceVM, DeviceInformation cannot be null."); } _device = device; _deviceInfo = deviceInfo; if (_device.ConnectionStatus == BluetoothConnectionStatus.Connected) { Connected = true; } foreach (GattDeviceService service in _device.GattServices) { BEServiceModel serviceM = new BEServiceModel(); serviceM.Initialize(service, this); ServiceModels.Add(serviceM); } _device.ConnectionStatusChanged += OnConnectionStatusChanged; _device.NameChanged += OnNameChanged; _device.GattServicesChanged += OnGattervicesChanged; NOTE: This has the effect of telling the OS that we're interested in these devices, and for it to automatically connect to them when they are advertising. Utilities.RunFuncAsTask(RegisterNotificationsAsync); } #endregion #region ---------------------------- Event Handlers ---------------------------- / NameChanged event handler / <param name="sender"></param> / <param name="obj"></param> private void OnNameChanged(BluetoothLEDevice sender, Object obj) { SignalChanged("Name"); } / GattServicesChanged event handler / <param name="sender"></param> / <param name="obj"></param> private void OnGattervicesChanged(BluetoothLEDevice sender, Object obj) { Utilities.MakeAlertBox("Services on '" + Name + "' has changed! Please navigate back to the main page and refresh devices if you would like to update the device."); Slightly hacky way of making sure that 1) nothing breaks if services/characteristics of this device are currently being viewed while 2) ensuring that everything gets refreshed properly upon pressing the button on the main page. if (GlobalSettings.PairedDevices.Contains(this)) { GlobalSettings.PairedDevices.Remove(this); } } / ConnectionStatusChanged event handler / <param name="sender"></param> / <param name="obj"></param> private void OnConnectionStatusChanged(BluetoothLEDevice sender, Object obj) { bool value = false; if (_device.ConnectionStatus == BluetoothConnectionStatus.Connected) { value = true; } if (value != Connected) { Change internal boolean and signal UI Connected = value; SignalChanged("ConnectString"); SignalChanged("ConnectColor"); } } #endregion event handlers #region ---------------------------- Registering Notifications ---------------------------- / Registers notifications for all characteristics in all services in this device private bool _notificationsRegistered; public async Task RegisterNotificationsAsync() { Don't need to register notifications multiple times. if (_notificationsRegistered) { return; } foreach (var serviceM in ServiceModels) { await serviceM.RegisterNotificationsAsync(); } Notifications now registered. _notificationsRegistered = true; } / Unregisters notifications for all characteristics in all services in this devices / <returns></returns> public async Task UnregisterNotificationsAsync() { try { foreach (var serviceM in ServiceModels) { await serviceM.UnregisterNotificationsAsync(); } } catch (Exception ex) { There's a chance the unregister will fail, as the device has been removed. Utilities.OnExceptionWithMessage(ex, "This failure may be expected as we're trying to unregister a device upon removal."); } _notificationsRegistered = false; } #endregion registering notifications }
class
c#
848
def FullyValidatePackage(package_folder, package_name): print(f" - Validating package: {package_name} in folder {package_folder}....") for expected_file in CommonUtils.GetPackageParts(package_name): expected_abspath = os.path.join(package_folder, expected_file) if not os.path.exists(expected_abspath): print(f" - FAILED! Expected package part is missing: {expected_abspath}") return False archive_path = os.path.join(package_folder, package_name + CommonUtils.package_extension) archive_hash_path = os.path.join(package_folder, package_name + CommonUtils.package_hash_extension) hash_result = CommonUtils.ComputeHashOfFile(archive_path) try: package_full_name = package_name + CommonUtils.package_extension package_sums = CommonUtils.ParseSHA256SumsFile(archive_hash_path) if len(package_sums) != 1: print(f"Package sums file {archive_hash_path} is invalid - should only have one entry.") return False if package_full_name not in package_sums: print(f"Package sums file {archive_hash_path} is invalid - does not reference the actual package") print(f"Hash had: {package_sums} - filename is {package_full_name}") return False if hash_result != package_sums[package_full_name]: print(f"Package hash mismatch. {archive_hash_path} has a different hash than the actual package.") return False except InvalidHashFormatException as e: print(f"Hash file parse failed for package: {e}") return False all_ok = False with tempfile.TemporaryDirectory() as tmpdirname: with tarfile.open(archive_path) as archive_file: archive_file.extractall(tmpdirname) all_ok = CommonUtils.VerifyPackageImage(tmpdirname) return all_ok
function
python
849
private static JaasAuthenticationMode lookupLoginAuthMode(VariableSpace varSpace) { JaasAuthenticationMode authMode; try { authMode = JaasAuthenticationMode.valueOf(varSpace.getVariable(PENTAHO_JAAS_AUTH_MODE)); } catch (Exception ex) { authMode = JaasAuthenticationMode.KERBEROS_USER; } return authMode; }
function
java
850
public ImageInfo Upload(FileInfo imageFile) { if (imageFile == null) { throw new ArgumentNullException("imageFile"); } if (!imageFile.Exists) { throw new FileNotFoundException("Unable to find selected image file for upload.", imageFile.FullName); } if (!this.RegistrationIsValid()) { throw new ConfigurationErrorsException("Your ImageShack user key is invalid. Please visit the options page and verify you entered it correctly."); } Dictionary<string, string> postData = new Dictionary<string, string>(); postData.Add("rembar", "yes"); postData.Add("cookie", this.RegistrationCode); postData.Add("key", DeveloperKey); XDocument doc = this.ExecuteWebRequest(UploadUrl, postData, imageFile); XNamespace ns = doc.Root.GetDefaultNamespace(); var resolution = doc.Root.Element(ns + "resolution"); string url = doc.Root.Element(ns + "links").Element(ns + "image_link").Value; ImageInfo info = new ImageInfo() { Url = new Uri(url).ToLoadBalancedImageUri(), Width = Convert.ToInt32(resolution.Element(ns + "width").Value), Height = Convert.ToInt32(resolution.Element(ns + "height").Value) }; return info; }
function
c#
851
public static UrlSigner FromServiceAccountPath(string credentialFilePath) { GaxPreconditions.CheckNotNull(credentialFilePath, nameof(credentialFilePath)); using (var credentialData = File.OpenRead(credentialFilePath)) { return FromServiceAccountData(credentialData); } }
function
c#
852
def to_array(self, normalize=True, batch_size=512): if self.hilbert.is_indexable: psi = self.log_val(self.hilbert.all_states()) logmax = psi.real.max() psi = _np.exp(psi - logmax) if normalize: norm = _np.linalg.norm(psi) psi /= norm return psi else: raise RuntimeError("The hilbert space is not indexable")
function
python
853
def calc_p_values(X, y, y_hat, coefficients): if X.shape[1] != len(coefficients): raise Exception('X shape ' + repr(X.shape) + ' not match coefficients length ' + repr(len(coefficients))) DOF = len(X) - len(coefficients) - 1 if DOF < 1: raise Exception('Degrees Of Freedom must be greater or equals to 1') t_statistics = calc_t_values(X, y, y_hat, coefficients) p_values = map(lambda t: 2 * stats.t.sf(t, DOF), np.abs(t_statistics)) return list(p_values)
function
python
854
async function removeLiquidity(traderSeed, assetId, assetAmount, minAssetWithdraw, minCoreWithdraw, nodeApi = bootNodeApi){ const spotX = await initSpotX(nodeApi) const trans = spotX.removeLiquidity(assetId, assetAmount, minAssetWithdraw, minCoreWithdraw) const txResult = await node.signAndSendTx(trans, traderSeed) if (!checkTxEvent(txResult, 'RemoveLiquidity')){ txResult.bSucc = false } return txResult }
function
javascript
855
func validateInsertDimensionSuccessful(t *testing.T, datasetAPIMock *mocks.IClientMock, storerMock *storertest.StorerMock) { Convey("Then storerMock.InsertDimension is called 2 times with the expected parameters", func() { validateStorerInsertDimensionCalls(storerMock, 3, instance.DbModel().InstanceID, d1.DbModel(), d2.DbModel(), d3.DbModel()) }) Convey("Then store.CreateCodeRelationshipCalls is called 3 times with the expected parameters", func() { calls := storerMock.CreateCodeRelationshipCalls() So(calls, ShouldHaveLength, 3) d1Called := false d2Called := false d3Called := false for _, call := range calls { switch call.Code { case d1.DbModel().Option: So(d1Called, ShouldBeFalse) d1Called = true So(call.CodeListID, ShouldEqual, testCodeListID) So(call.InstanceID, ShouldEqual, testInstanceID) case d2.DbModel().Option: So(d2Called, ShouldBeFalse) d2Called = true So(call.CodeListID, ShouldEqual, testCodeListID) So(call.InstanceID, ShouldEqual, testInstanceID) case d3.DbModel().Option: So(d3Called, ShouldBeFalse) d3Called = true So(call.CodeListID, ShouldEqual, testCodeListID) So(call.InstanceID, ShouldEqual, testInstanceID) default: t.Fail() } } }) }
function
go
856
private static SbiNePolicyRoute findCorrespondNbiModel(TrafficPolicyList staticRoute, List<SbiNePolicyRoute> nbiRoutes) { for(SbiNePolicyRoute nbiRoute : nbiRoutes) { if(nbiRoute.getUuid().equals(staticRoute.getId())) { return nbiRoute; } } return null; }
function
java
857
@SuppressWarnings("serial") public class Upload extends HttpServlet { private static final Logger log = LoggerFactory.getLogger(Upload.class.getName()); private Config config; private Composer composer; private boolean doTermination; private boolean doCreation; @Override public void init() throws ServletException { log.debug("Initialize Upload servlet"); try { config = new Config(getServletConfig()); composer = new Composer(config); doTermination = composer.datastore.getExtensions().contains("termination"); doCreation = composer.datastore.getExtensions().contains("creation"); } catch(ServletException se) { log.error("", se); throw se; } catch(Exception e) { log.error("", e); throw new ServletException(e); } } @Override public void destroy() { try { composer.datastore.destroy(); super.destroy(); } catch(Exception e) { log.error("", e); } } @Override public void service(HttpServletRequest request, HttpServletResponse servletResponse) throws IOException, ServletException { Response response = new Response(servletResponse); try { log.debug("UPLOAD SERVLET " + request.getMethod() + " " + request.getRequestURL() + ". User = " + getAuthenticatedUser(request)); checkVersion(request, response); String method = request.getMethod(); if (method.equals("OPTIONS")) { new OptionsHandler(composer, request, response).go(); } else if (method.equals("HEAD")) { new HeadHandler(composer, request, response).go(); } else if (method.equals("PATCH")) { new PatchHandler(composer, request, response).go(); } else if (method.equals("POST") && doCreation) { new PostHandler(composer, request, response).go(); } else if (method.equals("DELETE") && doTermination) { new DeleteHandler(composer, request, response).go(); } else { log.info("Method " + request.getMethod() + " not allowed."); throw new TusException.MethodNotAllowed(); } } catch (TusException texc) { response.setStatus(texc.getStatus()).setText(texc.getText()); } catch(Exception e) { log.error("", e); response.setStatus(500).setText((e.getMessage() == null) ? "Server Error" : "Server Error: " + e.getMessage()); } send(request, response); } /* User authentication, if needed, is handled outside of the servlet and the information is passed to the servlet via the request.getUserPrincipal(). If a user has been authenticated, request.getUserPrincipal will be non null will contain the user's name. Usually, when using authentication, a filter is configured to prevent the servlet from running if a user hasn't logged in.. */ public static String getAuthenticatedUser(HttpServletRequest request) { Principal principal = request.getUserPrincipal(); if (principal != null) { return principal.getName(); } return null; } private void checkVersion(HttpServletRequest request, Response response) throws Exception { String clientVersion = request.getHeader("tus-resumable"); if (!request.getMethod().equals("OPTIONS") && (clientVersion == null || !clientVersion.equals(config.tusApiVersionSupported))) { throw new TusException.UnsupportedVersion(); } } private void send(HttpServletRequest request, Response response) throws IOException { response.setHeader("Tus-Resumable", config.tusApiVersionSupported); response.setHeader("X-Content-Type-Options", "nosniff"); addAccessHeaders(request, response); if (request.getMethod().equals("HEAD")) { response.setText(""); } String body = response.getText(); if (body.length() > 0) { body += "\n"; response.setHeader("Content-Type", "text/plain; charset=utf-8"); response.setHeader("Content-Length", Long.toString(body.length())); } response.setText(body); response.write(); } private void addAccessHeaders(HttpServletRequest request, Response response) { String origin = request.getHeader("Origin"); if (origin != null && origin.length() > 0) { response.setHeader("Access-Control-Allow-Origin", origin); if (request.getMethod().equals("OPTIONS")) { response.setHeader("Access-Control-Allow-Methods", "POST, GET, HEAD, PATCH, DELETE, OPTIONS"); response.setHeader("Access-Control-Allow-Headers", "Origin, " + "X-Requested-With, " + "Content-Type, " + "Upload-Length, " + "Upload-Offset, " + "Tus-Resumable, " + "Upload-Metadata"); response.setHeader("Access-Control-Max-Age", "86400"); } else { response.setHeader("Access-Control-Expose-Headers", "Upload-Offset, " + "Location, " + "Upload-Length, " + "Tus-Version, " + "Tus-Resumable, " + "Tus-Max-Size, " + "Tus-Extension, " + "Upload-Metadata"); } } } }
class
java
858
ABool AInitializeGarbageCollector(unsigned long maxHeap) { if (maxHeap == 0) AMaxHeapSize = A_DEFAULT_MAX_HEAP_SIZE; else AMaxHeapSize = maxHeap; if (!AInitializeHeap(A_INITIAL_OLD_GEN_SIZE)) return FALSE; ANurserySize = A_INITIAL_NURSERY_SIZE; ANurseryBegin = AGrowNursery(NULL, 0, ANurserySize); if (ANurseryBegin == NULL) { AFreeHeap(); return FALSE; } NurseryPtr = ANurseryBegin; ANurseryEnd = NurseryPtr + AGetBitFieldIndex(ANurserySize); AGCStat.nurserySize = ANurseryEnd - ANurseryBegin; NewGenBigBlocks = NULL; ANewGenLargestBlockSize = 0; ALiveDataSize = A_MIN_LIVE_DATA_SIZE; AOldGenSize = 0; AFloatList = NULL; StaticBlocks.next = &StaticBlocks; StaticBlocks.prev = &StaticBlocks; AAllocAmount = 0; AllocCounter = 0; AGCState = A_GC_NONE; NewGenStack = NULL; OldGenStack = NULL; if (!GrowTraverseStack(&NewGenStack, &NewGenStackLength, A_NEW_GEN_STACK_INITIAL_LENGTH) || !GrowTraverseStack(&OldGenStack, &OldGenStackLength, A_OLD_GEN_STACK_INITIAL_LENGTH)) { ADeinitializeGarbageCollector(); return FALSE; } AOldGenFinalizeInst = NULL; ANewGenFinalizeInst = NULL; OldGenGCDisallowCount = 0; return TRUE; }
function
c
859
def _best_local_candidate(local_candidates, git_repo): best_candidate = None for candidate in local_candidates: if best_candidate is None: best_candidate = candidate elif candidate.found_match and not best_candidate.found_match: best_candidate = candidate return best_candidate
function
python
860
def importBodyCSVDataset(testSplit: float, local_import: bool): assert 0.0 <= testSplit <= 1.0 datasetPath = DATASETS_PATH / "BodyPose_Dataset.csv" datasetURL = "https://raw.githubusercontent.com/ArthurFDLR/pose-classification-kit/master/pose_classification_kit/datasets/BodyPose_Dataset.csv" if local_import: dataset_df = pd.read_csv(datasetPath) else: dataset_df = pd.read_csv(datasetURL) bodyLabels_df = dataset_df.groupby("label") labels = list(dataset_df.label.unique()) total_size_cat = bodyLabels_df.size().min() test_size_cat = int(total_size_cat * testSplit) train_size_cat = total_size_cat - test_size_cat x_train = [] x_test = [] y_train = [] y_test = [] for label, group in bodyLabels_df: group_array = group.drop(["label", "accuracy"], axis=1).to_numpy() np.random.shuffle(group_array) group_array_2D = [np.array((x[::2], x[1::2])).T for x in group_array] x_train.append(group_array_2D[:train_size_cat]) y_train.append([label] * train_size_cat) x_test.append(group_array_2D[train_size_cat : train_size_cat + test_size_cat]) y_test.append([label] * test_size_cat) x_train = np.concatenate(x_train, axis=0) x_test = np.concatenate(x_test, axis=0) y_train = np.concatenate(y_train, axis=0) y_test = np.concatenate(y_test, axis=0) return x_train, x_test, y_train, y_test, labels
function
python
861
public void Write(Log source, LogMessageType type, string msg, object context) { LogEntry entry; entry = new LogEntry(source, type, msg, context); data.Add(entry); this.OnNewEntry(entry); }
function
c#
862
@SuppressWarnings("ArgNamesWarningsInspection") @Aspect public class TraceMessagingAspect { private static final Log log = org.apache.commons.logging.LogFactory.getLog(TraceMessagingAspect.class); static final String MESSAGING_CONTROLLER_CLASS_KEY = "messaging.controller.class"; static final String MESSAGING_CONTROLLER_METHOD_KEY = "messaging.controller.method"; private final Tracer tracer; private final SpanNamer spanNamer; public TraceMessagingAspect(Tracer tracer, SpanNamer spanNamer) { this.tracer = tracer; this.spanNamer = spanNamer; } @Pointcut("@annotation(org.springframework.messaging.handler.annotation.MessageMapping)") private void anyMessageMappingAnnotated() { } // NOSONAR @Around("anyMessageMappingAnnotated()") @SuppressWarnings("unchecked") public Object addTags(ProceedingJoinPoint pjp) throws Throwable { Object object = pjp.proceed(); String methodName = pjp.getSignature().getName(); String className = pjp.getTarget().getClass().getName(); Span currentSpan = currentSpan(pjp); currentSpan.tag(MESSAGING_CONTROLLER_CLASS_KEY, className); currentSpan.tag(MESSAGING_CONTROLLER_METHOD_KEY, methodName); return object; } private Span currentSpan(ProceedingJoinPoint pjp) { Span currentSpan = this.tracer.currentSpan(); if (currentSpan == null) { if (log.isDebugEnabled()) { log.debug("No span found - will create a new one"); } currentSpan = this.tracer.nextSpan().name(name(pjp)).start(); } return currentSpan; } private String name(ProceedingJoinPoint pjp) { return this.spanNamer.name(getMethod(pjp, pjp.getTarget()), SpanNameUtil.toLowerHyphen(pjp.getSignature().getName())); } private Method getMethod(ProceedingJoinPoint pjp, Object object) { MethodSignature signature = (MethodSignature) pjp.getSignature(); Method method = signature.getMethod(); return ReflectionUtils.findMethod(object.getClass(), method.getName(), method.getParameterTypes()); } }
class
java
863
public class HeuristicSymmetricPayoffMatrix implements Iterable<Entry<PayoffEntry, Double[]>> { private static final double MINIMUM_DELTA = 0.0000001; private final int agents; private final int actions; private final Map<PayoffEntry, Mean[]> tableMean; private final Map<PayoffEntry, Variance[]> tableVariance; private final Map<PayoffEntry, Integer> tableCount; private final long numberOfCombinations; private final Mean externalityMean; private final Variance externalityVariance; private int externalitySamples; /** * Default constructor using the dimensions of the table. and having only * all multicombinations as entries. * * @param agents the amount of agents. * @param actions the amount of actions. */ public HeuristicSymmetricPayoffMatrix(final int agents, final int actions) { this.agents = agents; this.actions = actions; this.tableMean = Maps.newLinkedHashMap(); this.tableVariance = Maps.newLinkedHashMap(); this.tableCount = Maps.newLinkedHashMap(); this.numberOfCombinations = MathUtils.multiCombinationSize(actions, agents); this.externalityMean = new Mean(); this.externalityVariance = new Variance(); this.externalitySamples = 0; } /** * Returns true if every space in the tableMean is filled in with a value. * * @return true if every entry has a value. */ public boolean isComplete() { return this.tableMean.size() == getNumberOfPossibilities(); } private long getNumberOfPossibilities() { return numberOfCombinations; } /** * Adds a new entry to this payoff matrix. * * @param value [] The payoff values. * @param key The population shares as indices for the value */ public void addEntry(final Double[] value, final int... key) { checkArgument(testKey(key)); checkArgument(testValues(value)); final PayoffEntry entry = PayoffEntry.from(key); if (getEntryCount(entry) == 0) { newEntry(entry, value); } else { updateEntry(entry, value); } } private boolean testKey(final int[] key) { if (key.length != actions) { return false; } int count = 0; for (final int i : key) { count += i; } return count == agents; } private boolean testValues(final Double[] value) { return value.length == agents; } private void updateEntry(final PayoffEntry entry, final Double[] value) { for (int i = 0; i < value.length; i++) { tableMean.get(entry)[i].increment(value[i]); tableVariance.get(entry)[i].increment(value[i]); } int nplus1 = getEntryCount(entry) + 1; this.tableCount.put(entry, nplus1); } private void newEntry(final PayoffEntry entry, final Double[] value) { final Mean[] means = new Mean[value.length]; final Variance[] vars = new Variance[value.length]; for (int i = 0; i < value.length; i++) { means[i] = new Mean(); means[i].increment(value[i]); vars[i] = new Variance(); vars[i].increment(value[i]); } this.tableMean.put(entry, means); this.tableVariance.put(entry, vars); this.tableCount.put(entry, 1); } public int getEntryCount(final PayoffEntry entry) { if (this.tableCount.containsKey(entry)) { return this.tableCount.get(entry); } return 0; } /** * Returns an entry in the payoff matrix. * * @param key the index keys. * @return the value recorded in the matrix. */ public Double[] getEntry(final int... key) { checkArgument(testKey(key)); final PayoffEntry entry = PayoffEntry.from(key); checkArgument(tableCount.containsKey(entry)); Mean[] meen = tableMean.get(entry); Double[] toRet = new Double[meen.length]; for (int i = 0; i < meen.length; i++) { toRet[i] = meen[i].getResult(); } return toRet; } /** * Returns an entry in the payoff matrix. * * @param key the index keys. * @return the value recorded in the matrix. */ public Double[] getVariance(final int... key) { checkArgument(testKey(key)); final PayoffEntry entry = PayoffEntry.from(key); checkArgument(tableCount.containsKey(entry)); Variance[] vars = tableVariance.get(entry); Double[] toRet = new Double[vars.length]; for (int i = 0; i < vars.length; i++) { toRet[i] = vars[i].getResult(); } return toRet; } /** * Add externality value to accumulate basic statistics. * * @param value The externality value. */ public void addExternalityValue(double value) { if (!Double.isNaN(value)) { this.externalityMean.increment(value); this.externalityVariance.increment(value); this.externalitySamples++; } } double getExternalityMean() { return externalityMean.getResult(); } int getExternalitySamples() { return externalitySamples; } double getExternalityVariance() { return externalityVariance.getResult(); } public ConfidenceInterval getExternalityCI(ConfidenceLevel level) { double mean = getExternalityMean(); double std = Math.sqrt(getExternalityVariance()); int sampleSize = getExternalitySamples(); double error = level.getConfideneCoeff() * std / sqrt((double) sampleSize); //hack to allow creating CI's if (error == 0) { error = MINIMUM_DELTA; } return new ConfidenceInterval(mean - error, mean + error, level.getConfidenceLevel()); } @Override public String toString() { final StringBuilder b = new StringBuilder(); for (final Entry<PayoffEntry, Double[]> e : this) { b.append("V:").append(e.getKey()).append("->") .append(Arrays.toString(this.getEntry(e.getKey().getEntries()))).append("\n"); b.append("C:").append(e.getKey()).append("->").append(tableCount.get(e.getKey())) .append("\n"); } return b.toString(); } @Override public Spliterator<Entry<PayoffEntry, Double[]>> spliterator() { Map<PayoffEntry, Double[]> toRet = Maps.newLinkedHashMap(); tableMean.entrySet() .forEach((e) -> toRet.put(e.getKey(), getEntry(e.getKey().getEntries()))); return toRet.entrySet().spliterator(); } @Override public Iterator<Entry<PayoffEntry, Double[]>> iterator() { Map<PayoffEntry, Double[]> toRet = Maps.newLinkedHashMap(); tableMean.entrySet() .forEach((e) -> toRet.put(e.getKey(), getEntry(e.getKey().getEntries()))); return toRet.entrySet().iterator(); } }
class
java
864
def load_SRs_file(path): reportlist = [""] x = 0 with open(path, "r") as reprotsfile: for line in reprotsfile: if line != "----------------------------------------------\n" and x >= 0: reportlist[x] = reportlist[x] + line elif line == "----------------------------------------------\n": x = x+1 reportlist.append(line) return reportlist
function
python
865
public abstract class SwrveBaseCampaign { // Default campaign throttle limits protected static int DEFAULT_DELAY_FIRST_MESSAGE = 180; protected static int DEFAULT_MAX_IMPRESSIONS = 99999; protected static int DEFAULT_MIN_DELAY_BETWEEN_MSGS = 60; protected ISwrveCampaignManager campaignManager; protected SwrveCampaignDisplayer campaignDisplayer; protected int id; protected SwrveCampaignState saveableState; // The state of the campaign that will be kept saved by the SDK protected Date startDate; protected Date endDate; protected List<Trigger> triggers; protected boolean messageCenter; protected String subject; // MessageCenter subject of the campaign protected int maxImpressions; protected int minDelayBetweenMessage; protected Date showMessagesAfterLaunch; // Time we can show the first message after launch /* * Parse a campaign from JSON data. */ public SwrveBaseCampaign(ISwrveCampaignManager campaignManager, SwrveCampaignDisplayer campaignDisplayer, JSONObject campaignData) throws JSONException { this.campaignManager = campaignManager; this.campaignDisplayer = campaignDisplayer; this.id = campaignData.getInt("id"); SwrveLogger.i("Parsing campaign %s", id); this.messageCenter = campaignData.optBoolean("message_center", false); this.subject = campaignData.isNull("subject") ? "" : campaignData.getString("subject"); this.saveableState = new SwrveCampaignState(); // Start with an empty state // Campaign rule defaults this.maxImpressions = DEFAULT_MAX_IMPRESSIONS; this.minDelayBetweenMessage = DEFAULT_MIN_DELAY_BETWEEN_MSGS; this.showMessagesAfterLaunch = SwrveHelper.addTimeInterval(campaignManager.getInitialisedTime(), DEFAULT_DELAY_FIRST_MESSAGE, Calendar.SECOND); // Parse campaign triggers if (campaignData.has("triggers")) { String triggersJson = campaignData.getString("triggers"); triggers = Trigger.fromJson(triggersJson, id); } else { triggers = new ArrayList<>(); } // Parse campaign rules if (campaignData.has("rules")) { JSONObject rules = campaignData.getJSONObject("rules"); if (rules.has("dismiss_after_views")) { int totalImpressions = rules.getInt("dismiss_after_views"); this.maxImpressions = totalImpressions; } if (rules.has("delay_first_message")) { int delayFirstMessage = rules.getInt("delay_first_message"); this.showMessagesAfterLaunch = SwrveHelper.addTimeInterval(campaignManager.getInitialisedTime(), delayFirstMessage, Calendar.SECOND); } if (rules.has("min_delay_between_messages")) { this.minDelayBetweenMessage = rules.getInt("min_delay_between_messages"); } } // Parse campaign dates if (campaignData.has("start_date")) { this.startDate = new Date(campaignData.getLong("start_date")); } if (campaignData.has("end_date")) { this.endDate = new Date(campaignData.getLong("end_date")); } } /** * @return the campaign id. */ public int getId() { return id; } /** * Used internally to identify campaigns that have been marked as MessageCenter campaigns on the dashboard. * * @return true if the campaign is an MessageCenter campaign. */ public boolean isMessageCenter() { return messageCenter; } /** * @return the name of the campaign. */ public String getSubject() { return subject; } /** * @param date Current date * @return true if the campaign is active at the given time. */ public boolean isActive(Date date) { return campaignDisplayer.isCampaignActive(this, date, null); } /** * @return the triggers for this campaign. */ public List<Trigger> getTriggers() { return triggers; } /** * @return current impressions */ public int getImpressions() { return saveableState.impressions; } public void setImpressions(int impressions) { this.saveableState.impressions = impressions; } /** * @return maximum impressions */ public int getMaxImpressions() { return maxImpressions; } /** * @return the campaign start date. */ public Date getStartDate() { return startDate; } /** * @return the campaign end date. */ public Date getEndDate() { return endDate; } /** * Increment impressions by one. */ public void incrementImpressions() { this.saveableState.impressions++; } /** * Ensures a new message cannot be shown until now + minDelayBetweenMessage */ protected void setMessageMinDelayThrottle() { this.saveableState.showMessagesAfterDelay = SwrveHelper.addTimeInterval(campaignManager.getNow(), this.minDelayBetweenMessage, Calendar.SECOND); campaignDisplayer.setMessageMinDelayThrottle(campaignManager.getNow()); } /** * Used internally to set the status of the campaign. * * @param status new status of the campaign */ public void setStatus(SwrveCampaignState.Status status) { this.saveableState.status = status; } /** * Get the status of the campaign. * * @return status of the campaign */ public SwrveCampaignState.Status getStatus() { return saveableState.status; } /** * Used by sublcasses to inform that the campaign was displayed. */ public void messageWasShownToUser() { setStatus(SwrveCampaignState.Status.Seen); incrementImpressions(); setMessageMinDelayThrottle(); } public abstract boolean supportsOrientation(SwrveOrientation orientation); /** * Used by QAUser to determine what kind of campaign we are reporting * * @return CAMPAIGN_TYPE enum */ public abstract CAMPAIGN_TYPE getCampaignType(); /** * Determine if the assets for this campaign have been downloaded. * * @param assetsOnDisk All assets that are already downloaded. * @return if the assets are ready */ @Deprecated public abstract boolean areAssetsReady(Set<String> assetsOnDisk); /** * Determine if the assets for this campaign have been downloaded. * * @param assetsOnDisk All assets that are already downloaded. * @param properties String map of personalized properties. * @return if the assets are ready */ public abstract boolean areAssetsReady(Set<String> assetsOnDisk, Map<String, String> properties); /** * Obtain the serializable state of the campaign. * * @return the serializable state of the campaign. */ public SwrveCampaignState getSaveableState() { return saveableState; } /** * Set the previous state of this campaign. * * @param saveableState The state to save */ public void setSaveableState(SwrveCampaignState saveableState) { this.saveableState = saveableState; } public Date getShowMessagesAfterLaunch() { return showMessagesAfterLaunch; } }
class
java
866
def add_state(self, *args): for arg in args: if not isinstance(arg, State): raise ValueError(f"Param 'state' must be a State object. You passed {arg.__class__.__name__}.") if self._exist_state(arg): raise AddStateError(f"Cannot add an already existing state") self.states.append(arg)
function
python
867
public class NotificationListenerService extends FirebaseMessagingService { protected static final String NOTIFICATION_TAG = "com.deltadna.android.sdk.notifications"; protected static final IntentFilter RECEIVER_FILTER = new IntentFilter(); static { RECEIVER_FILTER.addAction(Actions.NOTIFICATION_OPENED); RECEIVER_FILTER.addAction(Actions.NOTIFICATION_DISMISSED); } private static final String TAG = BuildConfig.LOG_TAG + ' ' + NotificationListenerService.class.getSimpleName(); protected Bundle metaData; protected NotificationManager manager; protected NotificationFactory factory; @Override public void onCreate() { super.onCreate(); metaData = MetaData.get(this); manager = (NotificationManager) getSystemService( NOTIFICATION_SERVICE); factory = createFactory(this); } @Override public void onMessageReceived(RemoteMessage message) { final String from = message.getFrom(); final Map<String, String> data = message.getData(); Log.d( TAG, String.format( Locale.US, "Received message %s from %s", data, from)); if (from == null) { Log.w(TAG, "Message sender is unknown"); } else if (!from.equals(getString(metaData.getInt(MetaData.SENDER_ID)))) { Log.d(TAG, "Not handling message due to sender ID mismatch"); } else if (data == null || data.isEmpty()) { Log.w(TAG, "Message data is null or empty"); } else { final PushMessage pushMessage = new PushMessage( this, message.getFrom(), message.getData()); sendBroadcast(Utils.wrapWithReceiver( this, new Intent(Actions.MESSAGE_RECEIVED) .setPackage(getPackageName()) .putExtra(Actions.PUSH_MESSAGE, pushMessage))); final int id = (int) pushMessage.id; final NotificationInfo info = new NotificationInfo(id, pushMessage); final Notification notification = factory.create( factory.configure( this, pushMessage), info); if (notification != null) { notify(id, notification); sendBroadcast(Utils.wrapWithReceiver( this, new Intent(Actions.NOTIFICATION_POSTED) .setPackage(getPackageName()) .putExtra(Actions.NOTIFICATION_INFO, info))); } } } /** * Creates the notification factory to be used for creating notifications * when a push message is received. * * @param context the context * * @return notification factory */ protected NotificationFactory createFactory(Context context) { return new NotificationFactory(context); } /** * Posts a {@link Notification} on the {@link NotificationManager}. * * @param id the id * @param notification the notification */ protected void notify(long id, Notification notification) { manager.notify(NOTIFICATION_TAG, (int) id, notification); } }
class
java
868
def Error(self, source, reason, exc_info=None, quiet=False): msg, stacktrace = self._FormatErrorMessage(source, reason, exc_info) self.errors.append(msg) if not quiet: self.ui.console.PrintError(msg) if stacktrace: self.ui.console.PrintLog(stacktrace)
function
python
869
public class EventMessageConfirmation : BaseEvent { [XmlAttribute("sendTime")] public uint SendTime { get; set; } [XmlAttribute("id")] public uint Id { get; set; } [XmlAttribute("fromAddr")] public string FromAddr { get; set; } [XmlAttribute("toAddr")] public string ToAddr { get; set; } [XmlAttribute("confTime")] public uint ConfTime { get; set; } [XmlAttribute("confirmation")] public MessageConfirmType Confirmation { get; set; } }
class
c#
870
def values(self, p_map, v_note): assigned = p_map.assigned_actors(self) unassigned = p_map.unassigned_actors(self) if len(assigned) == 0: pitches = p_map.all_tonal_pitches(v_note) return {Note(p, v_note.note.base_duration, v_note.note.num_dots) for p in pitches} if v_note in assigned: return {p_map[v_note].note} if v_note not in unassigned: raise Exception('{0} is not in actor list of not equal pitch constraints.'.format(v_note.note)) e_set = set() for v in assigned: e_set.add(p_map[v].note) return NotEqualPitchConstraint.compute_full_result(p_map, v_note, e_set)
function
python
871
def deserialize_single_field( field, source_val, name="value", *, mapper=None, keep_undefined=True, camel_case_convert=False, ignore_none=False, ): if source_val is None and (ignore_none or isinstance(field, NoneField)): return source_val if isinstance(field, (Number, String, Boolean)): field._validate(source_val) value = source_val elif ( isinstance(field, TypedField) and getattr(field, "_ty", "") in {str, int, float} and isinstance(source_val, getattr(field, "_ty", "")) ): value = source_val elif isinstance(field, Array): value = deserialize_array( field, source_val, name, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) elif isinstance(field, Deque): value = deserialize_deque( field, source_val, name, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) elif isinstance(field, Tuple): value = deserialize_tuple( field, source_val, name, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) elif isinstance(field, Set): value = deserialize_set( field, source_val, name, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) elif isinstance(field, MultiFieldWrapper): value = deserialize_multifield_wrapper( field, source_val, name, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) elif isinstance(field, ClassReference): value = ( deserialize_structure_internal( getattr(field, "_ty", None), source_val, name, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) if not isinstance(source_val, Structure) else source_val ) elif isinstance(field, StructureReference): try: value = deserialize_structure_reference( getattr(field, "_newclass", None), source_val, keep_undefined=keep_undefined, mapper=mapper, camel_case_convert=camel_case_convert, ) except Exception as e: raise ValueError(f"{name}: Got {wrap_val(source_val)}; {str(e)}") from e elif isinstance(field, Map): value = deserialize_map( field, source_val, name, camel_case_convert=camel_case_convert ) elif isinstance(field, SerializableField): value = field.deserialize(source_val) elif isinstance(field, Anything) or field is None: value = source_val elif isinstance(field, TypedField) and isinstance(source_val, (list, dict)): ty = getattr(field, "_ty") if isinstance(source_val, list): value = ty(*source_val) elif isinstance(source_val, dict): value = ty(**source_val) elif isinstance(field, NoneField): raise ValueError(f"{name}: Got {wrap_val(source_val)}; Expected None") else: raise NotImplementedError( f"{name}: Got {wrap_val(source_val)}; Cannot deserialize value of type {field.__class__.__name__}. Are " "you using non-Typepy class? " ) return value
function
python
872
def fetch(self, refspec: Union[str, List[str], None] = None, progress: Union[RemoteProgress, None, 'UpdateProgress'] = None, verbose: bool = True, kill_after_timeout: Union[None, float] = None, **kwargs: Any) -> IterableList[FetchInfo]: if refspec is None: self._assert_refspec() kwargs = add_progress(kwargs, self.repo.git, progress) if isinstance(refspec, list): args: Sequence[Optional[str]] = refspec else: args = [refspec] proc = self.repo.git.fetch(self, *args, as_process=True, with_stdout=False, universal_newlines=True, v=verbose, **kwargs) res = self._get_fetch_info_from_stderr(proc, progress, kill_after_timeout=kill_after_timeout) if hasattr(self.repo.odb, 'update_cache'): self.repo.odb.update_cache() return res
function
python
873
def insert_into_tables(self,path): try: cur = self.connect_database() df = pd.read_csv(path) data = df.to_records(index=False) host="host" user="user" port="portr" database="database" password="password" postgres_str = f'postgresql://{user}:{password}@{host}:{port}/{database}' engine = create_engine(postgres_str, echo=False) df.to_sql('booksdepo',con=engine,index=False,if_exists='append') cur.execute(''' INSERT INTO categories (category) VALUES('finace'); INSERT INTO categories (category) VALUES('chidren'); INSERT INTO categories (category) VALUES('economics'); ''') print("Succesfully added values to tables") except (Exception, DatabaseError): raise DatabaseError("Could not add value to tables in the specified database") self.__connection.commit()
function
python
874
void MetaspaceShared::initialize_shared_spaces() { FileMapInfo *static_mapinfo = FileMapInfo::current_info(); char* buffer = static_mapinfo->serialized_data(); intptr_t* array = (intptr_t*)buffer; ReadClosure rc(&array); serialize(&rc); SymbolTable::create_table(); static_mapinfo->patch_archived_heap_embedded_pointers(); static_mapinfo->close(); static_mapinfo->unmap_region(MetaspaceShared::bm); FileMapInfo *dynamic_mapinfo = FileMapInfo::dynamic_info(); if (dynamic_mapinfo != NULL) { intptr_t* buffer = (intptr_t*)dynamic_mapinfo->serialized_data(); ReadClosure rc(&buffer); SymbolTable::serialize_shared_table_header(&rc, false); SystemDictionaryShared::serialize_dictionary_headers(&rc, false); dynamic_mapinfo->close(); dynamic_mapinfo->unmap_region(MetaspaceShared::bm); } if (DynamicDumpSharedSpaces) { LambdaFormInvokers::read_static_archive_invokers(); } if (PrintSharedArchiveAndExit) { if (dynamic_mapinfo != nullptr) { tty->print_cr("\n\nBase archive name: %s", Arguments::GetSharedArchivePath()); tty->print_cr("Base archive version %d", static_mapinfo->version()); } else { tty->print_cr("Static archive name: %s", static_mapinfo->full_path()); tty->print_cr("Static archive version %d", static_mapinfo->version()); } SystemDictionaryShared::print_shared_archive(tty); if (dynamic_mapinfo != nullptr) { tty->print_cr("\n\nDynamic archive name: %s", dynamic_mapinfo->full_path()); tty->print_cr("Dynamic archive version %d", dynamic_mapinfo->version()); SystemDictionaryShared::print_shared_archive(tty, false); } CountSharedSymbols cl; SymbolTable::shared_symbols_do(&cl); tty->print_cr("Number of shared symbols: %d", cl.total()); tty->print_cr("Number of shared strings: %zu", StringTable::shared_entry_count()); tty->print_cr("VM version: %s\r\n", static_mapinfo->vm_version()); if (FileMapInfo::current_info() == NULL || _archive_loading_failed) { tty->print_cr("archive is invalid"); vm_exit(1); } else { tty->print_cr("archive is valid"); vm_exit(0); } } }
function
c++
875
public class EforceExceptionWriterMvc extends EforceExceptionWriterAbstract { private View view; public EforceExceptionWriterMvc(View view) { this.produces = "text/html"; this.order = 100; this.view = view; } @Override public void write(ErrorTO error, HttpServletRequest req, HttpServletResponse res) throws IOException { res.setStatus(error.getCode()); Map<String, Object> model = new HashMap<>(); model.put("error", error); customizeModel(model); try { view.render(model, req, res); } catch (Exception e) { throw new IOException("Cannot render exception. View rendering failed. " + e.getMessage(), e); } } protected void customizeModel(Map<String, Object> model) { // For extension/override via subclassing. } }
class
java
876
public class RenderEngineMiddleware { private readonly RequestDelegate _next; private readonly IRenderEngineFactory _renderEngineFactory; public RenderEngineMiddleware(RequestDelegate next, IRenderEngineFactory renderEngineFactory) { _next = next; _renderEngineFactory = renderEngineFactory; } public async Task Invoke(HttpContext context) { IRenderEngine engine = null; try { engine = _renderEngineFactory.RequestEngine(); context.Items["RenderEngine"] = engine; await _next(context); } finally { if (engine != null) _renderEngineFactory.ReturnEngine(engine); } } }
class
c#
877
def _prepare_response(self, method, result): method_name = method if callable(method): method_name = method.__name__ if hasattr(method, "skip_secure_response"): return result routing = getattr(method, "routing", None) output_param = routing["output_param"] if not output_param: _logger.warning( "DEPRECATED: You must define an output schema for method %s " "in service %s", method_name, self._name, ) return result return output_param.to_response(self, result)
function
python
878
def _indexOfEndTag(istack): if len(istack) <= 0: return 0 if not istack[0].isOpeningTag(): return 0 cnt = 0 opener = istack[0] for index, el in enumerate(istack[1:]): if el.isOpeningTag() and \ el.getTagName().lower() == opener.getTagName().lower(): cnt += 1 elif el.isEndTagTo(opener): if cnt == 0: return index + 1 cnt -= 1 return 0
function
python
879
public static JSONObject BuildJsonTaskContent(String text, String n_answers, String quorum, String calibration, int project_id, String priority_0, String media_url) { try { JSONObject app = new JSONObject(); app.put("text", text); app.put("media_url", media_url); JSONObject app2 = new JSONObject(); app2.put("info", app); app2.put("n_answers", n_answers); app2.put("quorum", quorum); app2.put("calibration", calibration); app2.put("project_id", project_id); app2.put("priority_0", priority_0); return app2; } catch (Exception e) { logger.error("Error ", e); return null; } }
function
java
880
def activate_cloud(self, username, cloudname): try: _args = locals() del (_args['self']) log.debug("[{0}()] called with [{1}]".format(sys._getframe().f_code.co_name, str(_args))) except: pass self._connect_to_mongo() cloud = self.mongo.get_cloud( cm_user_id=username, cloud_name=cloudname, force=True) if not cloud: return 0 else: defaults = self.mongo.db_defaults.find_one( {'cm_user_id': username}) if cloudname not in defaults['registered_clouds']: defaults['registered_clouds'].append(cloudname) if cloudname not in defaults['activeclouds']: defaults['activeclouds'].append(cloudname) self.mongo.db_defaults.update( {'cm_user_id': username}, defaults, upsert=True) return 1
function
python
881
def write(tag, tensor, step=None, metadata=None, name=None): with ops.name_scope(name, "write_summary") as scope: if _summary_state.writer is None: return constant_op.constant(False) if step is None: step = get_step() if metadata is None: serialized_metadata = b"" elif hasattr(metadata, "SerializeToString"): serialized_metadata = metadata.SerializeToString() else: serialized_metadata = metadata def record(): if step is None: raise ValueError("No step set. Please specify one either through the " "`step` argument or through " "tf.summary.experimental.set_step()") with ops.device("cpu:0"): summary_tensor = tensor() if callable(tensor) else array_ops.identity( tensor) write_summary_op = gen_summary_ops.write_summary( _summary_state.writer._resource, step, summary_tensor, tag, serialized_metadata, name=scope) with ops.control_dependencies([write_summary_op]): return constant_op.constant(True) op = smart_cond.smart_cond( should_record_summaries(), record, _nothing, name="summary_cond") if not context.executing_eagerly(): ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) return op
function
python
882
def add_trap_bias(train, test, bias_factor=50.0): logit = lambda p: np.log(p) - np.log(1 - p) traps = np.unique(np.concatenate([train['Trap'], test['Trap']])) num_total, num_wnv = len(train), np.sum(train['WnvPresent']) ratio_wnv = (num_wnv + 0.05) / (num_total + 1.0) traps_bias = {} for trap in traps: num, wnv = np.sum(train['Trap']==trap), np.sum(train[train['Trap']==trap]['WnvPresent']) if num==0: bias = 1.0 else: ratio = (wnv + 0.05) / (num + 1.0) prob = np.min([scipy.stats.hypergeom.sf(wnv-1, num_total, num_wnv, num), scipy.stats.hypergeom.cdf(wnv, num_total, num_wnv, num)]) bias = (ratio/ratio_wnv) ** np.max([0.0, np.min([1.0, (logit(1.0-prob)/bias_factor)])]) traps_bias[trap] = bias train['TrapBias'] = np.array(map(lambda trap: traps_bias[trap], train['Trap']), dtype=np.float32) test ['TrapBias'] = np.array(map(lambda trap: traps_bias[trap], test ['Trap']), dtype=np.float32)
function
python
883
func (s *Section) UnmarshalJSON(b []byte) error { data := &jsonSection{ Type: &s.Type, ID: &s.ID, From: &s.From, To: &s.To, Mode: &s.Mode, Display: &s.Display, Additional: &s.Additional, StopTimes: &s.StopTimes, Path: &s.Path, } err := json.Unmarshal(b, data) if err != nil { return fmt.Errorf("error while unmarshalling Section: %w", err) } gen := unmarshalErrorMaker{"Section", b} s.Departure, err = parseDateTime(data.Departure) if err != nil { return gen.err(err, "Departure", "departure_date_time", data.Departure, "parseDateTime failed") } s.Arrival, err = parseDateTime(data.Arrival) if err != nil { return gen.err(err, "Arrival", "arrival_date_time", data.Arrival, "parseDateTime failed") } s.Duration = time.Duration(data.Duration) * time.Second if data.Geo != nil { if data.Geo.Coordinates == nil { return gen.err(nil, "Geo", "geojson", data.Geo, "Geo.Coordinates is nil, can't continue as that will cause a panic") } geot, err := data.Geo.Decode() if err != nil { return gen.err(err, "Geo", "geojson", data.Geo, "Geo.Decode() failed") } geo, ok := geot.(*geom.LineString) if !ok { return gen.err(err, "Geo", "geojson", data.Geo, "Geo type assertion failed!") } s.Geo = geo } return nil }
function
go
884
class Analyzer: """Runs pairwise comparisons for the defined renderers over each page of the specified document list or directory""" def __init__(self, files, renderers=get_sparclur_renderers(), metrics='sim', parser_args=dict(), max_workers=1, timeout=None, overall_timeout=None, recurse=False, base_path=None, progress_bar=True, save_path=None): """ Parameters ---------- files : str or List[str] Path to a directory or a text file of PDF paths or a List of paths renderers : List[str] or List[Renderer] The desired renderers to compare. Must select at least 2 renderers. Default is all SPARCLUR renderers. metrics: str or List[str] List of metrics to return in the final results. Default is just the SPARCLUR similarity score. Full list: whash, phash, size, sum_square, ccorr, ccoeff, entropy Use 'all' to do the full set. parser_args : Dict[str, Dict[str, Any]] A dictionary of dictionaries containing any optional parameters to pass into the renderers. See an each renderer for it's possible parameters. max_workers : int The desired number of workers for the mutli-processing. timeout : int The number of seconds each parser is given to render the document and the time for each page comparison. overall_timeout : int The number of seconds before the task is cancelled in the mutli-processing. recurse : bool Whether or not the directory passed into the files parameter should be recursively searched for PDF's base_path : str A base directory that should be appended to the list of paths passed into files progress_bar : bool Whether or not to display a progress bar save_path: str If specified, will save a csv of the run results to save_path """ self._renderers = _parse_renderers(renderers) self._metrics = _set_metrics(metrics) self._parser_args = parser_args self._files = create_file_list(files, recurse=recurse, base_path=base_path) self._max_workers = max_workers self._timeout = timeout self._overall_timeout = overall_timeout self._progress_bar = progress_bar self._save_path = save_path @property def max_workers(self): """Return the set number of max workers""" return self._max_workers @max_workers.setter def max_workers(self, m): """Set a new number of max workers""" self._max_workers = m @property def overall_timeout(self): """Return the set timeout value""" return self._overall_timeout @overall_timeout.setter def compare_timeout(self, t): """Set a new timeout parameter""" self._overall_timeout = t @compare_timeout.deleter def overall_timeout(self): self._overall_timeout = None @property def timeout(self): """Return the set timeout value""" return self._timeout @timeout.setter def parser_timeout(self, t): """Set a new timeout parameter""" self._timeout = t @parser_timeout.deleter def timeout(self): self._timeout = None @property def renderer_list(self): """List of the renderers to be compared""" return self._renderers @renderer_list.setter def renderer_list(self, rl): self._renderers = _parse_renderers(rl) @property def metrics(self): """List of the metrics to be returned""" return self._metrics @metrics.setter def metrics(self, m): self._metrics = _set_metrics(m) @property def progress_bar(self): """Return the progress bar setting""" return self._progress_bar @progress_bar.setter def progress_bar(self, p: bool): """Set whether or not to show progress bar""" self._progress_bar = p @progress_bar.deleter def progress_bar(self): self._progress_bar = False @property def save_path(self): return self._save_path @save_path.setter def save_path(self, sp): self._save_path = sp @save_path.deleter def save_path(self): self._save_path = None def run(self): """ Return the comparisons for each page of each document from the file list Returns ------- List[Dict[str, Any]] """ transformed_data = [ {'path': path, 'renderers': self._renderers, 'parser_args': self._parser_args, 'timeout': self._timeout, 'metrics': self._metrics} for path in self._files ] if self._max_workers == 1: results = _serial_prc(files=transformed_data, progress_bar=self._progress_bar, compare_timeout=self._overall_timeout, renderers=self._renderers, metrics=self._metrics ) else: results = _parallel_prc(files=transformed_data, progress_bar=self._progress_bar, max_workers=self._max_workers, overall_timeout=self._overall_timeout, renderers=self._renderers, metrics=self._metrics ) if self._save_path is not None: pd.DataFrame(results).to_csv(path_or_buf=self._save_path, index=False) else: return results
class
python
885
public static int[] solve(ArrayList<ArrayList<Integer>> formula, int numOfVars) { varNum = numOfVars * 2 + 1; definedIndex = new int[varNum]; prevIndex = new int[varNum]; graph = new ArrayList[varNum]; answer = new int[varNum]; scc = new ArrayList[varNum]; createGraph(formula); getAllSCC(); /** * Once all SCCs have been found, check if each SCC can traverse to its negation. * If it does, the formula is not satisfiable. * Otherwise, the answer will indicate the truth value of each vertex. * Note: The answer might contain something like 5879 => 5233. The number above or below * the number of variables should indicate if it is true or false. * If a variable points to null, ignore it. */ for (int l = 1; l < varNum; l++) { ArrayList<Integer> temp = scc[l]; if (temp != null) { for (int u : temp) { int nu = negate(u); if (answer[nu] == l) { return null; } answer[u] = l; } } } for (int i = 1; i <= numOfVars; i++) { if (answer[i] <= numOfVars) { System.out.print("1 "); } else { System.out.print("0 "); } } System.out.print("\n"); return answer; }
function
java
886
private void storeAndRetrieveAll(int count) { int i = count; ArrayList<SignalDetection> signalDetections = new ArrayList<>(count); while (i > 0) { SignalDetection signalDetection = SignalDetection.create( monitoringOrganization, stationId, featureMeasurements, creationInfoId); signalDetections.add(signalDetection); signalDetectionRepositoryJpa.store(signalDetection); i--; } Collection<SignalDetection> dbSignalDetections = signalDetectionRepositoryJpa.retrieveAll(); assertEquals(count, dbSignalDetections.size()); assertTrue(signalDetections.containsAll(dbSignalDetections)); }
function
java
887
final synchronized long recordCall(long start, boolean success, String errorCode) { long duration = System.currentTimeMillis() - start; if (success) { _successful.recordCall(start, duration); } else { _unsuccessful.recordCall(start, duration); Statistic errorCodeStat = _errorCodeStatistics.get(errorCode); if (errorCodeStat == null) { errorCodeStat = new Statistic(); _errorCodeStatistics.put(errorCode, errorCodeStat); } errorCodeStat.recordCall(start, duration); } return duration; }
function
java
888
def BSF(cpu, dest, src): value = src.read() flag = EXTRACT(value, 0, 1) == 1 res = 0 for pos in xrange(0, src.size): res = ITE(dest.size, flag, res, pos) flag = flag | (EXTRACT(value, pos, 1) == 1) cpu.ZF = value == 0 dest.write(ITE(dest.size, cpu.ZF, dest.read(), res)) res = value cpu.PF = (res ^ res>>1 ^ res>>2 ^ res>>3 ^ res>>4 ^ res>>5 ^ res>>6 ^ res>>7)&1 == 0
function
python
889
class Manager: """ Event manager Provides API for subscribing for and firing events. There's also global event manager instantiated at module level with functions :func:`.subscribe`, :func:`.handle` and decorator :func:`.subscriber` aliased to corresponding methods of class. """ def __init__(self): axes = (("event_type", TypeAxis()),) self.registry = Registry(*axes) def subscribe(self, handler, event_type): """ Subscribe ``handler`` to specified ``event_type``""" handler_set = self.registry.get_registration(event_type) if handler_set is None: handler_set = self._register_handler_set(event_type) handler_set.add(handler) def unsubscribe(self, handler, event_type): """ Unsubscribe ``handler`` from ``event_type``""" handler_set = self.registry.get_registration(event_type) if handler_set and handler in handler_set: handler_set.remove(handler) def handle(self, event): """ Fire ``event`` All subscribers will be executed with no determined order. """ handler_sets = self.registry.query(event) for handler_set in handler_sets: for handler in set(handler_set): handler(event) def _register_handler_set(self, event_type): """ Register new handler set for ``event_type``. """ handler_set = set() self.registry.register(handler_set, event_type) return handler_set def subscriber(self, event_type): """ Decorator for subscribing handlers Works like this: >>> mymanager = Manager() >>> class MyEvent(): ... pass >>> @mymanager.subscriber(MyEvent) ... def mysubscriber(evt): ... # handle event ... return >>> mymanager.handle(MyEvent()) """ def registrator(func): self.subscribe(func, event_type) return func return registrator
class
python
890
class AIFProgressMonitor extends NullProgressMonitor { private static final SimpleDateFormat FORMAT = new SimpleDateFormat("HH:mm:ss.SSS"); private static final int LOGGING_THRESHOLD = 500; private final Logger logger = (Logger) (org.slf4j.LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)); private boolean logging = false; private BufferedWriter out; private Queue<ImmutablePair<String, Long>> shapeList = new LinkedList<>(); private int shapeNum; private int numShapes; /** * Create a progress monitor that logs validation progress to StdOut. */ AIFProgressMonitor() { logging = true; out = null; } /** * Create a progress monitor that logs validation progress to the specified filename. * @param filename the filename to output validation progress * @throws IOException if the file cannot be created. */ AIFProgressMonitor(String filename) throws IOException { log("Creating file " + filename + " for logging validation progress."); out = Files.newBufferedWriter(Paths.get(filename)); } private void log(String text) { if (logging) { logger.info(text); } } @Override public void beginTask(String label, int numShapes) { log("Beginning task " + label + " (" + numShapes + ")"); this.numShapes = numShapes; this.shapeNum = 0; log("Logging to file number of shapes: " + this.numShapes); if (out != null) { try { out.write("Total: " + this.numShapes + "\n"); out.write("Shape#\tShape Name\tStart Time\tEnd Time\tDuration (ms)\n"); out.flush(); } catch (IOException ioe) { System.err.println("Could not write to progress monitor."); } } } @Override public void done() { // Currently, this doesn't actually get called by TopBraid. If it did, we could close the BufferedWriter here. log("DONE!"); } // Note that this depends on the format of the label that TopBraid's validator engine sends to progress monitors. // e.g., label = "Shape 3: sh:DerivedValuesConstraintComponent" @Override public void subTask(String label) { log("Validating " + label); shapeList.add(new ImmutablePair<>(label.split(" ")[2], System.currentTimeMillis())); } @Override public void worked(int amount) { final Date endTime = new Date(); shapeNum += amount; log("Completed shape " + shapeNum + " / " + numShapes); final ImmutablePair<String, Long> pair = shapeList.remove(); final String shapeName = pair.getKey(); final long startTimeMs = pair.getValue(); final long duration = endTime.getTime() - startTimeMs; if (duration > LOGGING_THRESHOLD) { log(" Duration: " + duration + "ms"); log(" Timestamp: " + FORMAT.format(endTime)); } // Write a row of the file. if (out != null) { try { final String row = shapeNum + "\t" + shapeName + "\t" + FORMAT.format(new Date(startTimeMs)) + "\t" + FORMAT.format(endTime) + "\t" + duration + "\n"; out.write(row); out.flush(); if (shapeNum >= numShapes) { out.close(); } } catch (IOException ioe) { logger.error("Could not write to progress monitor."); } } } // Simulate a validator invoking both variants of the AIFProgressMonitor public static void main(String[] args) { AIFProgressMonitor fileMonitor, stdoutMonitor; final java.util.Random r = new java.util.Random(); final String FILENAME = "test-progress.tab"; final String SHAPES[] = new String[]{"FOO", "BAR", "BAZ", "FEE", "FI", "FO", "FUM"}; final int numShapes = SHAPES.length; System.out.println("Test file- and stdout-based AIFProgressMonitors."); System.out.println("Check " + FILENAME + "for file-based results.\n"); try { fileMonitor = new AIFProgressMonitor(FILENAME); stdoutMonitor = new AIFProgressMonitor(); fileMonitor.beginTask("Test validation", numShapes); stdoutMonitor.beginTask("Test validation", numShapes); for (int i = 1; i <= numShapes; i++) { fileMonitor.subTask("Shape " + i + ": " + SHAPES[i - 1]); stdoutMonitor.subTask("Shape " + i + ": " + SHAPES[i - 1]); Thread.sleep(r.nextInt(1000)); fileMonitor.worked(1); stdoutMonitor.worked(1); } fileMonitor.done(); stdoutMonitor.done(); } catch (Exception e) { System.err.println("AIFProcessMonitor unit test error."); e.printStackTrace(); } } }
class
java
891
func (s *Stamina) Consumes(state move.State) { switch state { case move.Dash: s.consumes(2) case move.Walk, move.Ascending, move.Descending: s.consumes(1) default: } }
function
go
892
public static void For( int start, int stop, ForLoopBody loopBody ) { lock ( sync ) { Parallel instance = Instance; instance.currentIndex = start - 1; instance.stopIndex = stop; instance.loopBody = loopBody; for ( int i = 0; i < threadsCount; i++ ) { instance.threadIdle[i].Reset( ); instance.jobAvailable[i].Set( ); } for ( int i = 0; i < threadsCount; i++ ) { instance.threadIdle[i].WaitOne( ); } } }
function
c#
893
public synchronized void zeroCalibrate(double calPower) { final String funcName = "zeroCalibrate"; if (debugEnabled) { dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API, "calPower=%f", calPower); } Calibration power is always negative. Motor 1 always has a lower limit switch. If there is a motor 2, motor 2 has a lower limit switch only if it is independent of motor 1 and needs synchronizing with motor 1. this.calPower = -Math.abs(calPower); calibrating = true; motor1ZeroCalDone = false; motor2ZeroCalDone = motor2 == null || syncGain == 0.0; setTaskEnabled(true); if (debugEnabled) { dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API); } }
function
java
894
def _mod_init(self, low): try: self.states[ "{}.{}".format(low["state"], low["fun"]) ] except KeyError: return minit = "{}.mod_init".format(low["state"]) if low["state"] not in self.mod_init: if minit in self.states._dict: mret = self.states[minit](low) if not mret: return self.mod_init.add(low["state"])
function
python
895
pub async fn websocket_bind(addr: Url2, config: Arc<WebsocketConfig>) -> Result<WebsocketListener> { let addr = url_to_addr(&addr, config.scheme).await?; let socket = match &addr { SocketAddr::V4(_) => net2::TcpBuilder::new_v4()?, SocketAddr::V6(_) => net2::TcpBuilder::new_v6()?, } .reuse_address(true)? .bind(addr)? .listen(config.max_pending_connections as i32)?; socket.set_nonblocking(true)?; let socket = tokio::net::TcpListener::from_std(socket)?; let local_addr = addr_to_url(socket.local_addr()?, config.scheme); let socket = socket .map({ let config = config.clone(); move |socket_result| connect(config.clone(), socket_result) }) .buffer_unordered(config.max_pending_connections) .boxed(); tracing::info!( message = "bind", local_addr = %local_addr, ); Ok(WebsocketListener { config, local_addr, socket, }) }
function
rust
896
static ca_error handlePcpsDataIndication(struct PCPS_DATA_indication_pset *params, struct ca821x_dev *pDeviceRef) { if (out_mode == OUT_MODE_HEX) { printTime(NULL); ca_print("Rx len %d, CS: %d, ED: %d >", params->PsduLength, params->CS, params->ED); for (int i = 0; i < params->PsduLength; i++) { ca_print(" %02x", params->Psdu[i]); } ca_print("\n"); } else if (out_mode == OUT_MODE_PCAP) { pcaprec_hdr_t hdr = {0, 0, 0, 0}; ca_error error = CA_ERROR_SUCCESS; fillTimestamp(&hdr); hdr.incl_len = params->PsduLength + sizeof(ethernet_header); hdr.orig_len = params->PsduLength + sizeof(ethernet_header); error |= ca_write(&hdr, sizeof(hdr)); error |= ca_write(&ethernet_header, sizeof(ethernet_header)); error |= ca_write(params->Psdu, params->PsduLength); ca_flush(); if (error && dPipeName) { MLME_RESET_request_sync(1, pDeviceRef); open_pipe(dPipeName); printPcapHeader(); initialiseRadio(pDeviceRef); } if (debugMode) { printTime(stderr); fprintf(stderr, "Rx len %d, CS: %d, ED: %d >", params->PsduLength, params->CS, params->ED); for (int i = 0; i < params->PsduLength; i++) { fprintf(stderr, " %02x", params->Psdu[i]); } fprintf(stderr, "\n"); } } return CA_ERROR_SUCCESS; }
function
c
897
def create_agent( sess, environment, summary_writer=None): if FLAGS.agent_name == 'cumulant_jax_dqn': agent = dqn_agent.CumulantJaxDQNAgentWithAuxiliaryMC elif FLAGS.agent_name == 'discounted_jax_dqn': agent = discounted_dqn_agent.DiscountedJaxDQNAgentWithAuxiliaryMC else: raise ValueError('{} is not a valid agent name'.format(FLAGS.agent_name)) return agent(num_actions=environment.action_space.n, summary_writer=summary_writer)
function
python
898
protected void Parse (ByteVector data) { if (data == null) throw new ArgumentNullException ("data"); picture_fields_dirty = false; pictures = null; int pos = 0; int vendor_length = (int) data.Mid (pos, 4) .ToUInt (false); pos += 4; vendor_id = data.ToString (StringType.UTF8, pos, vendor_length); pos += vendor_length; int comment_fields = (int) data.Mid (pos, 4) .ToUInt (false); pos += 4; for(int i = 0; i < comment_fields; i++) { int comment_length = (int) data.Mid (pos, 4) .ToUInt (false); pos += 4; string comment = data.ToString (StringType.UTF8, pos, comment_length); pos += comment_length; int comment_separator_position = comment .IndexOf ('='); if (comment_separator_position < 0) continue; string key = comment.Substring (0, comment_separator_position) .ToUpper ( CultureInfo.InvariantCulture); string value = comment.Substring ( comment_separator_position + 1); string [] values; if (field_list.TryGetValue (key, out values)) { Array.Resize <string> (ref values, values.Length + 1); values [values.Length - 1] = value; field_list [key] = values; } else { SetField (key, value); } } }
function
c#
899