language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def checkInstrumentsValidity(self):
"""Checks the validity of the instruments used in the Analyses If an
analysis with an invalid instrument (out-of-date or with calibration
tests failed) is found, a warn message will be displayed.
"""
invalid = []
ans = self.context.getAnalyses()
for an in ans:
valid = an.isInstrumentValid()
if not valid:
instrument = an.getInstrument()
inv = "%s (%s)" % (
safe_unicode(an.Title()), safe_unicode(instrument.Title()))
if inv not in invalid:
invalid.append(inv)
if len(invalid) > 0:
message = _("Some analyses use out-of-date or uncalibrated "
"instruments. Results edition not allowed")
message = "%s: %s" % (message, (", ".join(invalid)))
self.context.plone_utils.addPortalMessage(message, "warning")
|
java
|
public static String parseDateWithoutFraction(String date) {
if(date == null) return "";
if (date.length() < 20) {
return date;
}
return date.substring(0, 19);
}
|
python
|
def expand_actions(self, actions):
"""Accepts an array of actions and returns an array of actions which match.
This should be called before "matches?" and other checking methods since they
rely on the actions to be expanded."""
results = list()
for action in actions:
if action in self.aliased_actions:
results.append(action)
for item in self.expand_actions(self.aliased_actions[action]):
results.append(item)
else:
results.append(action)
return results
|
java
|
public static DateRange range(Date start, Date end, final DateField unit) {
return new DateRange(start, end, unit);
}
|
python
|
def enabled(self):
"""bool: ``True`` if BGP is enabled; ``False`` if BGP is disabled.
"""
namespace = 'urn:ietf:params:xml:ns:netconf:base:1.0'
bgp_filter = 'rbridge-id/router/bgp'
bgp_config = ET.Element('get-config', xmlns="%s" % namespace)
source = ET.SubElement(bgp_config, 'source')
ET.SubElement(source, 'running')
ET.SubElement(bgp_config, 'filter',
type="xpath", select="%s" % bgp_filter)
bgp_config = self._callback(bgp_config, handler='get')
namespace = 'urn:brocade.com:mgmt:brocade-bgp'
enabled = bgp_config.find('.//*{%s}bgp' % namespace)
if enabled is not None:
return True
return False
|
java
|
@Override
public List<String> removeSavedPlainCommands() {
if (saved.isEmpty()) {
return Collections.emptyList();
}
List<String> result = new ArrayList<>(saved);
saved.clear();
if (!result.isEmpty()) {
log.info("{} commands are removed from saved list", result.size());
}
return result;
}
|
java
|
private Index ensureIndexed(Index proposedIndex) throws QueryException {
for (FieldSort fs : proposedIndex.fieldNames) {
if (fs.sort == FieldSort.Direction.DESCENDING) {
throw new UnsupportedOperationException("Indexes with Direction.DESCENDING are " +
"not supported. To return data in descending order, create an index with " +
"Direction.ASCENDING fields and execute the subsequent query with " +
"Direction.DESCENDING fields as required.");
}
}
if (proposedIndex.indexType == IndexType.TEXT) {
if (!SQLDatabaseFactory.FTS_AVAILABLE) {
String message = "Text search not supported. To add support for text " +
"search, enable FTS compile options in SQLite.";
logger.log(Level.SEVERE, message);
throw new QueryException(message);
}
}
final List<FieldSort> fieldNamesList = proposedIndex.fieldNames;
Set<String> uniqueNames = new HashSet<String>();
for (FieldSort fieldName: fieldNamesList) {
uniqueNames.add(fieldName.field);
Misc.checkArgument(validFieldName(fieldName.field), "Field "+fieldName.field+" is not valid");
}
// Check there are no duplicate field names in the array
Misc.checkArgument(uniqueNames.size() == fieldNamesList.size(), String.format("Cannot create index with duplicated field names %s"
, proposedIndex.fieldNames));
// Prepend _id and _rev if it's not in the array
if (!uniqueNames.contains("_rev")) {
fieldNamesList.add(0, new FieldSort("_rev"));
}
if (!uniqueNames.contains("_id")) {
fieldNamesList.add(0, new FieldSort("_id"));
}
// get existing indexes
List<Index> existingIndexes;
try {
existingIndexes = DatabaseImpl.get(this.queue.submit(new ListIndexesCallable()));
} catch (ExecutionException e) {
String msg = "Failed to list indexes";
logger.log(Level.SEVERE, msg, e);
throw new QueryException(msg, e);
}
if(proposedIndex.indexName == null){
// generate a name for the index.
String indexName = GENERATED_INDEX_NAME_PREFIX + proposedIndex.toString();
// copy over definition of existing proposed index and create it with this name
proposedIndex = new Index(proposedIndex.fieldNames,
indexName,
proposedIndex.indexType,
proposedIndex.tokenizer);
}
for (Index existingIndex : existingIndexes) {
// Check the index limit. Limit is 1 for "text" indexes and unlimited for "json" indexes.
// If there are any existing "text" indexes, throw an exception
if (proposedIndex.indexType == IndexType.TEXT &&
existingIndex.indexType == IndexType.TEXT) {
String msg = String.format("Text index limit reached. There is a limit of one " +
"text index per database. There is an existing text index in this " +
"database called \"%s\".",
existingIndex.indexName);
logger.log(Level.SEVERE, msg, existingIndex.indexName);
throw new QueryException(msg);
}
//
// check if an index of this name already exists
//
if (existingIndex.indexName.equals(proposedIndex.indexName)) {
if (existingIndex.equals(proposedIndex)) {
// we already have an index with this name and the same definition, just update
// it and return it
logger.fine(String.format("Index with name \"%s\" already exists with same " +
"definition", proposedIndex.indexName));
IndexUpdater.updateIndex(existingIndex.indexName,
existingIndex.fieldNames,
database,
queue);
return existingIndex;
} else {
throw new QueryException(String.format("Index with name \"%s\" already exists" +
" but has different definition to requested index", proposedIndex
.indexName));
}
}
//
// check if an index already exists that matches the request index definition, ignoring name
//
// construct an index for comparison which has the same values as the proposed index
// but the name of the one we're comparing to
Index compare = new Index(proposedIndex.fieldNames, existingIndex.indexName, proposedIndex
.indexType, proposedIndex.tokenizer);
if (compare.equals(existingIndex)) {
// we already have an index with the same definition but a different name, just
// update it and return it
logger.fine(String.format("Index with name \"%s\" exists which has same " +
"definition of requested index \"%s\"",
existingIndex.indexName, proposedIndex.indexName));
IndexUpdater.updateIndex(existingIndex.indexName,
existingIndex.fieldNames,
database,
queue);
return existingIndex;
}
}
final Index index = proposedIndex;
Future<Void> result = queue.submitTransaction(new CreateIndexCallable(fieldNamesList, index));
// Update the new index if it's been created
try {
result.get();
} catch (ExecutionException e) {
String message = "Execution error encountered whilst inserting index metadata";
logger.log(Level.SEVERE, message, e);
throw new QueryException(message, e);
} catch (InterruptedException e) {
String message = "Execution interrupted error encountered whilst inserting index metadata";
logger.log(Level.SEVERE, message, e);
throw new QueryException(message, e);
}
IndexUpdater.updateIndex(index.indexName,
fieldNamesList,
database,
queue);
return index;
}
|
java
|
protected void beginDragging(MouseDownEvent event) {
m_dragging = true;
m_windowWidth = Window.getClientWidth();
m_clientLeft = Document.get().getBodyOffsetLeft();
m_clientTop = Document.get().getBodyOffsetTop();
DOM.setCapture(getElement());
m_dragStartX = event.getX();
m_dragStartY = event.getY();
addStyleName(I_CmsLayoutBundle.INSTANCE.dialogCss().dragging());
}
|
java
|
public static IProject getProject(Resource resource) {
ProjectAdapter adapter = (ProjectAdapter) EcoreUtil.getAdapter(resource.getResourceSet().eAdapters(), ProjectAdapter.class);
if (adapter == null) {
final String platformString = resource.getURI().toPlatformString(true);
final IProject project = ResourcesPlugin.getWorkspace().getRoot().getFile(new Path(platformString)).getProject();
adapter = new ProjectAdapter(project);
resource.getResourceSet().eAdapters().add(adapter);
}
return adapter.getProject();
}
|
java
|
public <O> ListenableFuture<O> borrowBatchAsync(int maxSize, Function<List<T>, BorrowResult<T, O>> function)
{
checkArgument(maxSize >= 0, "maxSize must be at least 0");
ListenableFuture<List<T>> borrowedListFuture;
synchronized (this) {
List<T> list = getBatch(maxSize);
if (!list.isEmpty()) {
borrowedListFuture = immediateFuture(list);
borrowerCount++;
}
else if (finishing && borrowerCount == 0) {
borrowedListFuture = immediateFuture(ImmutableList.of());
}
else {
borrowedListFuture = Futures.transform(
notEmptySignal,
ignored -> {
synchronized (this) {
List<T> batch = getBatch(maxSize);
if (!batch.isEmpty()) {
borrowerCount++;
}
return batch;
}
},
executor);
}
}
return Futures.transform(
borrowedListFuture,
elements -> {
// The borrowerCount field was only incremented for non-empty lists.
// Decrements should only happen for non-empty lists.
// When it should, it must always happen even if the caller-supplied function throws.
try {
BorrowResult<T, O> borrowResult = function.apply(elements);
if (elements.isEmpty()) {
checkArgument(borrowResult.getElementsToInsert().isEmpty(), "Function must not insert anything when no element is borrowed");
return borrowResult.getResult();
}
for (T element : borrowResult.getElementsToInsert()) {
offer(element);
}
return borrowResult.getResult();
}
finally {
if (!elements.isEmpty()) {
synchronized (this) {
borrowerCount--;
signalIfFinishing();
}
}
}
}, directExecutor());
}
|
java
|
public ResourceGroupExportResultInner exportTemplate(String resourceGroupName, ExportTemplateRequest parameters) {
return exportTemplateWithServiceResponseAsync(resourceGroupName, parameters).toBlocking().single().body();
}
|
python
|
def permutation_arbitrary(qubit_inds, n_qubits):
"""
Generate the permutation matrix that permutes an arbitrary number of
single-particle Hilbert spaces into adjacent positions.
Transposes the qubit indices in the order they are passed to a
contiguous region in the complete Hilbert space, in increasing
qubit index order (preserving the order they are passed in).
Gates are usually defined as `GATE 0 1 2`, with such an argument ordering
dictating the layout of the matrix corresponding to GATE. If such an
instruction is given, actual qubits (0, 1, 2) need to be swapped into the
positions (2, 1, 0), because the lifting operation taking the 8 x 8 matrix
of GATE is done in the little-endian (reverse) addressed qubit space.
For example, suppose I have a Quil command CCNOT 20 15 10.
The median of the qubit indices is 15 - hence, we permute qubits
[20, 15, 10] into the final map [16, 15, 14] to minimize the number of
swaps needed, and so we can directly operate with the final CCNOT, when
lifted from indices [16, 15, 14] to the complete Hilbert space.
Notes: assumes qubit indices are unique (assured in parent call).
See documentation for further details and explanation.
Done in preparation for arbitrary gate application on
adjacent qubits.
:param qubit_inds: (int) Qubit indices in the order the gate is
applied to.
:param int n_qubits: Number of qubits in system
:return:
perm - permutation matrix providing the desired qubit reordering
qubit_arr - new indexing of qubits presented in left to right
decreasing index order. Should be identical to passed 'args'.
start_i - starting index to lift gate from
:rtype: tuple (sparse_array, np.array, int)
"""
# Begin construction of permutation
perm = np.eye(2 ** n_qubits, dtype=np.complex128)
# First, sort the list and find the median.
sorted_inds = np.sort(qubit_inds)
med_i = len(qubit_inds) // 2
med = sorted_inds[med_i]
# The starting position of all specified Hilbert spaces begins at
# the qubit at (median - med_i)
start = med - med_i
# Array of final indices the arguments are mapped to, from
# high index to low index, left to right ordering
final_map = np.arange(start, start + len(qubit_inds))[::-1]
start_i = final_map[-1]
# Note that the lifting operation takes a k-qubit gate operating
# on the qubits i+k-1, i+k-2, ... i (left to right).
# two_swap_helper can be used to build the
# permutation matrix by filling out the final map by sweeping over
# the qubit_inds from left to right and back again, swapping qubits into
# position. we loop over the qubit_inds until the final mapping matches
# the argument.
qubit_arr = np.arange(n_qubits) # current qubit indexing
made_it = False
right = True
while not made_it:
array = range(len(qubit_inds)) if right else range(len(qubit_inds))[::-1]
for i in array:
pmod, qubit_arr = two_swap_helper(np.where(qubit_arr == qubit_inds[i])[0][0],
final_map[i], n_qubits,
qubit_arr)
# update permutation matrix
perm = pmod.dot(perm)
if np.allclose(qubit_arr[final_map[-1]:final_map[0] + 1][::-1], qubit_inds):
made_it = True
break
# for next iteration, go in opposite direction
right = not right
assert np.allclose(qubit_arr[final_map[-1]:final_map[0] + 1][::-1], qubit_inds)
return perm, qubit_arr[::-1], start_i
|
java
|
protected void updateModelBase(ModelBase value, String xmlTag, Counter counter, Element element)
{
boolean shouldExist = value != null;
Element root = updateElement(counter, element, xmlTag, shouldExist);
if (shouldExist)
{
Counter innerCount = new Counter(counter.getDepth() + 1);
findAndReplaceSimpleLists(innerCount, root, value.getModules(), "modules", "module");
iterateRepository(innerCount, root, value.getRepositories(), "repositories", "repository");
iterateRepository(innerCount, root, value.getPluginRepositories(), "pluginRepositories",
"pluginRepository");
iterateDependency(innerCount, root, value.getDependencies(), "dependencies", "dependency");
findAndReplaceXpp3DOM(innerCount, root, "reports", (Xpp3Dom) value.getReports());
updateReporting(value.getReporting(), "reporting", innerCount, root);
updateDependencyManagement(value.getDependencyManagement(), "dependencyManagement", innerCount, root);
updateDistributionManagement(value.getDistributionManagement(), "distributionManagement", innerCount, root);
findAndReplaceProperties(innerCount, root, "properties", value.getProperties());
}
}
|
java
|
public List<Integer> getRunningFlowIds() {
final List<Integer> allIds = new ArrayList<>();
try {
getExecutionIdsHelper(allIds, this.executorLoader.fetchUnfinishedFlows().values());
} catch (final ExecutorManagerException e) {
this.logger.error("Failed to get running flow ids.", e);
}
return allIds;
}
|
java
|
@Override
public int compare(Integer o1, Integer o2) {
double diff = dataArray[o2] - dataArray[o1];
if (diff == 0) {
return 0;
}
if (sortType == ASCENDING) {
return (diff > 0) ? -1 : 1;
} else {
return (diff > 0) ? 1 : -1;
}
}
|
python
|
def get_url_shortener():
"""
Return the selected URL shortener backend.
"""
try:
backend_module = import_module(URL_SHORTENER_BACKEND)
backend = getattr(backend_module, 'backend')
except (ImportError, AttributeError):
warnings.warn('%s backend cannot be imported' % URL_SHORTENER_BACKEND,
RuntimeWarning)
backend = default_backend
except ImproperlyConfigured as e:
warnings.warn(str(e), RuntimeWarning)
backend = default_backend
return backend
|
python
|
def update_ips(self):
"""Retrieves the public and private ip of the instance by using the
cloud provider. In some cases the public ip assignment takes some
time, but this method is non blocking. To check for a public ip,
consider calling this method multiple times during a certain timeout.
"""
self.ips = self._cloud_provider.get_ips(self.instance_id)
return self.ips[:]
|
java
|
public Observable<Void> beginImportImageAsync(String resourceGroupName, String registryName, ImportImageParameters parameters) {
return beginImportImageWithServiceResponseAsync(resourceGroupName, registryName, parameters).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
|
java
|
@Override
public void setTraceRouter(WsTraceRouter traceRouter) {
internalTraceRouter.set(traceRouter);
// Pass the earlierMessages queue to the router.
// Now that the internalMessageRouter is non-null, this class will
// NOT add any more messages to the earlierMessages queue.
// The MessageRouter basically owns the earlierMessages queue
// from now on.
if (earlierTraces != null) {
synchronized (this) {
if (earlierTraces != null) {
traceRouter.setEarlierTraces(earlierTraces);
}
}
} else {
traceRouter.setEarlierTraces(null);
}
}
|
java
|
private static <T> List<T> toCandidateList(List<EvaluatedCandidate<T>> evaluatedCandidates)
{
List<T> candidates = new ArrayList<T>(evaluatedCandidates.size());
for (EvaluatedCandidate<T> evaluatedCandidate : evaluatedCandidates)
{
candidates.add(evaluatedCandidate.getCandidate());
}
return candidates;
}
|
java
|
public void endObject() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_END_OBJECT) {
stackSize--;
pathNames[stackSize] = null; // Free the last path name so that it can be garbage collected!
pathIndices[stackSize - 1]++;
peeked = PEEKED_NONE;
} else {
throw new IllegalStateException("Expected END_OBJECT but was " + peek() + locationString());
}
}
|
java
|
@Override
protected void executeRuntimeStep(OperationContext context, ModelNode operation) {
final PathAddress address = PathAddress.pathAddress(operation.require(OP_ADDR));
final String cacheContainerName = address.getElement(address.size() - 2).getValue();
final String cacheName = address.getLastElement().getValue();
final String attrName = operation.require(NAME).asString();
final ServiceController<?> controller = context.getServiceRegistry(false).getService(
CacheServiceName.CACHE.getServiceName(cacheContainerName, cacheName));
Cache<?, ?> cache = (Cache<?, ?>) controller.getValue();
ClusteredCacheMetrics metric = ClusteredCacheMetrics.getStat(attrName);
ModelNode result = new ModelNode();
if (metric == null) {
context.getFailureDescription().set(String.format("Unknown metric %s", attrName));
} else if (cache == null) {
context.getFailureDescription().set(String.format("Unavailable cache %s", attrName));
} else {
AdvancedCache<?, ?> aCache = cache.getAdvancedCache();
ClusterCacheStats clusterCacheStats = aCache.getComponentRegistry().getComponent(ClusterCacheStats.class);
switch (metric) {
case NUMBER_OF_LOCKS_AVAILABLE: {
result.set(clusterCacheStats.getNumberOfLocksAvailable());
break;
}
case NUMBER_OF_LOCKS_HELD: {
result.set(clusterCacheStats.getNumberOfLocksHeld());
break;
}
case TIME_SINCE_START: {
result.set(clusterCacheStats.getTimeSinceStart());
break;
}
case AVERAGE_READ_TIME: {
result.set(clusterCacheStats.getAverageReadTime());
break;
}
case AVERAGE_WRITE_TIME: {
result.set(clusterCacheStats.getAverageWriteTime());
break;
}
case AVERAGE_REMOVE_TIME: {
result.set(clusterCacheStats.getAverageRemoveTime());
break;
}
case AVERAGE_READ_TIME_NANOS: {
result.set(clusterCacheStats.getAverageReadTimeNanos());
break;
}
case AVERAGE_WRITE_TIME_NANOS: {
result.set(clusterCacheStats.getAverageWriteTimeNanos());
break;
}
case AVERAGE_REMOVE_TIME_NANOS: {
result.set(clusterCacheStats.getAverageRemoveTimeNanos());
break;
}
case EVICTIONS: {
result.set(clusterCacheStats.getEvictions());
break;
}
case HIT_RATIO: {
result.set(clusterCacheStats.getHitRatio());
break;
}
case HITS: {
result.set(clusterCacheStats.getHits());
break;
}
case MISSES: {
result.set(clusterCacheStats.getMisses());
break;
}
case NUMBER_OF_ENTRIES: {
result.set(clusterCacheStats.getCurrentNumberOfEntries());
break;
}
case NUMBER_OF_ENTRIES_IN_MEMORY: {
result.set(clusterCacheStats.getCurrentNumberOfEntriesInMemory());
break;
}
case DATA_MEMORY_USED:
result.set(clusterCacheStats.getDataMemoryUsed());
break;
case OFF_HEAP_MEMORY_USED:
result.set(clusterCacheStats.getOffHeapMemoryUsed());
break;
case MINIMUM_REQUIRED_NODES:
result.set(clusterCacheStats.getRequiredMinimumNumberOfNodes());
break;
case READ_WRITE_RATIO: {
result.set(clusterCacheStats.getReadWriteRatio());
break;
}
case REMOVE_HITS: {
result.set(clusterCacheStats.getRemoveHits());
break;
}
case REMOVE_MISSES: {
result.set(clusterCacheStats.getRemoveMisses());
break;
}
case STORES: {
result.set(clusterCacheStats.getStores());
break;
}
case TIME_SINCE_RESET: {
result.set(clusterCacheStats.getTimeSinceReset());
break;
}
case INVALIDATIONS: {
result.set(clusterCacheStats.getInvalidations());
break;
}
case PASSIVATIONS: {
result.set(clusterCacheStats.getPassivations());
break;
}
case ACTIVATIONS: {
result.set(clusterCacheStats.getActivations());
break;
}
case CACHE_LOADER_LOADS: {
result.set(clusterCacheStats.getCacheLoaderLoads());
break;
}
case CACHE_LOADER_MISSES: {
result.set(clusterCacheStats.getCacheLoaderMisses());
break;
}
case CACHE_LOADER_STORES: {
result.set(clusterCacheStats.getStoreWrites());
break;
}
case STALE_STATS_THRESHOLD:
result.set(clusterCacheStats.getStaleStatsThreshold());
break;
default: {
context.getFailureDescription().set(String.format("Unknown metric %s", metric));
break;
}
}
context.getResult().set(result);
}
context.stepCompleted();
}
|
java
|
public int getAttributeValueAsInteger(final String _key)
throws EFapsException
{
final String value = getAttributeValue(_key);
return value == null ? 0 : Integer.parseInt(value);
}
|
java
|
public static WriteableSymbolTable symbolTableEffectiveCopy(SymbolTable syms) {
if (syms instanceof ImmutableSymbolTable) {
return new UnionSymbolTable(syms);
}
if (syms instanceof UnionSymbolTable) {
return UnionSymbolTable.copyFrom((UnionSymbolTable) syms);
}
if (syms instanceof FrozenSymbolTable) {
return (FrozenSymbolTable) syms;
}
// maybe consider the size and if its "big" return a union of the mutable version?
return new MutableSymbolTable(syms);
}
|
java
|
private String getSystemHostName()
{
if (EventUtils.isEmptyOrNull(systemHostName)) {
try {
systemHostName = InetAddress.getLocalHost().getHostName();
} catch (Throwable t) {
systemHostName = "localhost";
}
}
return systemHostName;
}
|
python
|
def check(self, **kwargs): # pragma: no cover
"""Calls the TimeZoneField's custom checks."""
errors = super(TimeZoneField, self).check(**kwargs)
errors.extend(self._check_timezone_max_length_attribute())
errors.extend(self._check_choices_attribute())
return errors
|
java
|
public JsonArray names() throws JsonException {
return nameValuePairs.isEmpty()
? null
: new JsonArray(new ArrayList<String>(nameValuePairs.keySet()));
}
|
python
|
def generate_defect_structure(self, supercell=(1, 1, 1)):
"""
Returns Defective Interstitial structure, decorated with charge
Args:
supercell (int, [3x1], or [[]] (3x3)): supercell integer, vector, or scaling matrix
"""
defect_structure = self.bulk_structure.copy()
defect_structure.make_supercell(supercell)
# consider modifying velocity property to make sure defect site is decorated
# consistently with bulk structure for final defect_structure
defect_properties = self.site.properties.copy()
if ('velocities' in self.bulk_structure.site_properties) and \
'velocities' not in defect_properties:
if all( vel == self.bulk_structure.site_properties['velocities'][0]
for vel in self.bulk_structure.site_properties['velocities']):
defect_properties['velocities'] = self.bulk_structure.site_properties['velocities'][0]
else:
raise ValueError("No velocity property specified for defect site and "
"bulk_structure velocities are not homogeneous. Please specify this "
"property within the initialized defect_site object.")
#create a trivial defect structure to find where supercell transformation moves the defect site
site_properties_for_fake_struct = {prop: [val] for prop,val in defect_properties.items()}
struct_for_defect_site = Structure( self.bulk_structure.copy().lattice,
[self.site.specie],
[self.site.frac_coords],
to_unit_cell=True,
site_properties = site_properties_for_fake_struct)
struct_for_defect_site.make_supercell(supercell)
defect_site = struct_for_defect_site[0]
defect_structure.append(self.site.specie.symbol, defect_site.coords, coords_are_cartesian=True,
properties = defect_site.properties)
defect_structure.set_charge(self.charge)
return defect_structure
|
java
|
public static <I, S, A extends MutableDFA<S, I>> A equiv(DFA<?, I> dfa1,
DFA<?, I> dfa2,
Collection<? extends I> inputs,
A out) {
return combine(dfa1, dfa2, inputs, out, AcceptanceCombiner.EQUIV);
}
|
python
|
def fix_pix_borders(image2d, nreplace, sought_value, replacement_value):
"""Replace a few pixels at the borders of each spectrum.
Set to 'replacement_value' 'nreplace' pixels at the beginning (at
the end) of each spectrum just after (before) the spectrum value
changes from (to) 'sought_value', as seen from the image borders.
Parameters
----------
image2d : numpy array
Initial 2D image.
nreplace : int
Number of pixels to be replaced in each border.
sought_value : int, float, bool
Pixel value that indicates missing data in the spectrum.
replacement_value : int, float, bool
Pixel value to be employed in the 'nreplace' pixels.
Returns
-------
image2d : numpy array
Final 2D image.
"""
# input image size
naxis2, naxis1 = image2d.shape
for i in range(naxis2):
# only spectra with values different from 'sought_value'
jborder_min, jborder_max = find_pix_borders(
image2d[i, :],
sought_value=sought_value
)
# left border
if jborder_min != -1:
j1 = jborder_min
j2 = min(j1 + nreplace, naxis1)
image2d[i, j1:j2] = replacement_value
# right border
if jborder_max != naxis1:
j2 = jborder_max + 1
j1 = max(j2 - nreplace, 0)
image2d[i, j1:j2] = replacement_value
return image2d
|
java
|
public String convertIfcStructuralSurfaceMemberTypeEnumToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
python
|
def get_clusters(self, variant_id):
"""Search what clusters a variant belongs to
Args:
variant_id(str): From ID column in vcf
Returns:
clusters()
"""
query = {'variant_id':variant_id}
identities = self.db.identity.find(query)
return identities
|
java
|
private SqlNode navigationInDefine(SqlNode node, String alpha) {
Set<String> prefix = node.accept(new PatternValidator(false));
Util.discard(prefix);
node = new NavigationExpander().go(node);
node = new NavigationReplacer(alpha).go(node);
return node;
}
|
python
|
def as_bel(self) -> str:
"""Return this node as a BEL string."""
variants = self.get(VARIANTS)
if not variants:
return super(CentralDogma, self).as_bel()
variants_canon = sorted(map(str, variants))
return "{}({}:{}, {})".format(
self._func,
self.namespace,
ensure_quotes(self._priority_id),
', '.join(variants_canon)
)
|
python
|
def try_disk(self, path, gpg=True):
"""
Try to load json off disk
"""
if not os.path.isfile(path):
return
if not gpg or self.validate_gpg_sig(path):
stream = open(path, 'r')
json_stream = stream.read()
if len(json_stream):
try:
json_config = json.loads(json_stream)
return json_config
except ValueError:
logger.error("ERROR: Invalid JSON in %s", path)
return False
else:
logger.warn("WARNING: %s was an empty file", path)
return
|
java
|
public static boolean isEquals( String s1, String s2)
{
return s1 == null ? s2 == null : s1.equals(s2);
}
|
python
|
def CacheQueryResults(
self, sql_results, attribute_name, key_name, column_names):
"""Build a dictionary object based on a SQL command.
This function will take a SQL command, execute it and for
each resulting row it will store a key in a dictionary.
An example::
sql_results = A SQL result object after executing the
SQL command: 'SELECT foo, bla, bar FROM my_table'
attribute_name = 'all_the_things'
key_name = 'foo'
column_names = ['bla', 'bar']
Results from running this against the database:
'first', 'stuff', 'things'
'second', 'another stuff', 'another thing'
This will result in a dictionary object being created in the
cache, called 'all_the_things' and it will contain the following value::
all_the_things = {
'first': ['stuff', 'things'],
'second': ['another_stuff', 'another_thing'],
'third': ['single_thing']}
Args:
sql_results (sqlite3.Cursor): result after executing a SQL command
on a database.
attribute_name (str): attribute name in the cache to store results to.
This will be the name of the dictionary attribute.
key_name (str): name of the result field that should be used as a key
in the resulting dictionary that is created.
column_names (list[str]): of column names that are stored as values to
the dictionary. If this list has only one value in it the value will
be stored directly, otherwise the value will be a list containing
the extracted results based on the names provided in this list.
"""
row = sql_results.fetchone()
if not row:
return
# Note that pysqlite does not accept a Unicode string in row['string'] and
# will raise "IndexError: Index must be int or string".
keys_name_to_index_map = {
name: index for index, name in enumerate(row.keys())}
attribute_value = {}
while row:
value_index = keys_name_to_index_map.get(key_name)
key_value = row[value_index]
attribute_value[key_value] = []
for column_name in column_names:
value_index = keys_name_to_index_map.get(column_name)
column_value = row[value_index]
attribute_value[key_value].append(column_value)
row = sql_results.fetchone()
setattr(self, attribute_name, attribute_value)
|
python
|
def connect(self):
"""
Opens a connection to the mail host.
"""
app = getattr(self, "app", None) or current_app
try:
return Connection(app.extensions['mail'])
except KeyError:
raise RuntimeError("The curent application was"
" not configured with Flask-Mail")
|
python
|
def _parse_patterns(self, pattern):
"""Parse patterns."""
self.pattern = []
self.npatterns = None
npattern = []
for p in pattern:
if _wcparse.is_negative(p, self.flags):
# Treat the inverse pattern as a normal pattern if it matches, we will exclude.
# This is faster as compiled patterns usually compare the include patterns first,
# and then the exclude, but glob will already know it wants to include the file.
npattern.append(p[1:])
else:
self.pattern.extend(
[_wcparse.WcPathSplit(x, self.flags).split() for x in _wcparse.expand_braces(p, self.flags)]
)
if npattern:
self.npatterns = _wcparse.compile(npattern, self.flags ^ (_wcparse.NEGATE | _wcparse.REALPATH))
if not self.pattern and self.npatterns is not None:
self.pattern.append(_wcparse.WcPathSplit((b'**' if self.is_bytes else '**'), self.flags).split())
|
java
|
public void destroy() throws ChainException, ChannelException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "destroy; " + this);
}
synchronized (this.cf) {
// Verify that the VCF hasn't already been destroyed.
if (0 == getRefCount()) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Virtual connection factory already destroyed");
}
throw new ChainException("Virtual connection factory already destroyed");
}
decrementRefCount();
// Check if there are no more users of the chain.
if (0 == getRefCount()) {
// Verify the chain was ever used beyond init. That is a call was
// made to get a VCF, but never a VC. Note, outbound chains can
// never be in QUIESCED state.
if (getChain().getState() == RuntimeState.STARTED) {
this.cf.stopChainInternal(getChain(), 0);
}
this.cf.destroyChainInternal(getChain());
}
} // end sync block
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "destroy");
}
}
|
python
|
def recover(self, data, redis=None):
''' Retrieve this field's value from the database '''
value = data.get(self.name)
if value is None or value == 'None':
return None
return str(value)
|
java
|
public void addPushResponse(int streamId, HttpCarbonResponse pushResponse) {
pushResponsesMap.put(streamId, pushResponse);
responseFuture.notifyPushResponse(streamId, pushResponse);
}
|
java
|
AxesWalker cloneDeep(WalkingIterator cloneOwner, Vector cloneList)
throws CloneNotSupportedException
{
AxesWalker clone = findClone(this, cloneList);
if(null != clone)
return clone;
clone = (AxesWalker)this.clone();
clone.setLocPathIterator(cloneOwner);
if(null != cloneList)
{
cloneList.addElement(this);
cloneList.addElement(clone);
}
if(wi().m_lastUsedWalker == this)
cloneOwner.m_lastUsedWalker = clone;
if(null != m_nextWalker)
clone.m_nextWalker = m_nextWalker.cloneDeep(cloneOwner, cloneList);
// If you don't check for the cloneList here, you'll go into an
// recursive infinate loop.
if(null != cloneList)
{
if(null != m_prevWalker)
clone.m_prevWalker = m_prevWalker.cloneDeep(cloneOwner, cloneList);
}
else
{
if(null != m_nextWalker)
clone.m_nextWalker.m_prevWalker = clone;
}
return clone;
}
|
python
|
def close(self):
''' Close the application and all installed plugins. '''
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
self.stopped = True
|
java
|
protected void cleanStart() {
File workareaFile = bootProps.getWorkareaFile(null);
// If we're clean starting, remove all files from the working directory;
// Note: do not reverse the checks in the following if(); we need to call hasServiceBeenApplied each time
if (ServiceFingerprint.hasServiceBeenApplied(bootProps.getInstallRoot(), workareaFile) || bootProps.checkCleanStart()) {
// Must clean the static data from the ServiceFinterprint class; otherwise the stale data will be persisted.
ServiceFingerprint.clear();
KernelUtils.cleanStart(workareaFile);
// clean up / remove various "clean" parameters
// storage area has already been wiped..
bootProps.remove(BootstrapConstants.INITPROP_OSGI_CLEAN);
bootProps.remove(BootstrapConstants.OSGI_CLEAN);
}
}
|
python
|
def get_levels(self, arcs):
"""Calculate available arc height "levels".
Used to calculate arrow heights dynamically and without wasting space.
args (list): Individual arcs and their start, end, direction and label.
RETURNS (list): Arc levels sorted from lowest to highest.
"""
levels = set(map(lambda arc: arc["end"] - arc["start"], arcs))
return sorted(list(levels))
|
java
|
private void reportBadBaseMethodUse(Node n, String className, String extraMessage) {
compiler.report(JSError.make(n, BASE_CLASS_ERROR, className, extraMessage));
}
|
java
|
private static void initTextFileConverterAdapterResource()
{
try
{
theTextFileConverterAdapterResource = ResourceBundle.getBundle(RB_PREXIX + RB_TEXT_FILE_CONVERTER_ADAPTER);
} // try
catch (MissingResourceException _exception)
{
_exception.printStackTrace();
} // catch (MissingResourceException _exception)
theMessageUnsupportedEncoding = CollectionTools.getString(theTextFileConverterAdapterResource, KEY_UNSUPPORTED_ENCODING,
KEY_UNSUPPORTED_ENCODING);
theMessageErrorTranslatingEncoding = CollectionTools.getString(theTextFileConverterAdapterResource,
KEY_ERROR_TRANSLATING_ENCODING, KEY_ERROR_TRANSLATING_ENCODING);
}
|
java
|
@Override
public UpdateResolverEndpointResult updateResolverEndpoint(UpdateResolverEndpointRequest request) {
request = beforeClientExecution(request);
return executeUpdateResolverEndpoint(request);
}
|
python
|
def queuedb_create(path):
"""
Create a sqlite3 db at the given path.
Create all the tables and indexes we need.
Raises if the table already exists
"""
global QUEUE_SQL, ERROR_SQL
lines = [l + ";" for l in QUEUE_SQL.split(";")]
con = sqlite3.connect( path, isolation_level=None )
db_query_execute(con, 'pragma mmap_size=536870912', ())
for line in lines:
db_query_execute(con, line, ())
con.commit()
con.row_factory = queuedb_row_factory
return con
|
python
|
def generate(self, **kwargs):
"""Generate the methods section.
Parameters
----------
task_converter : :obj:`dict`, optional
A dictionary with information for converting task names from BIDS
filename format to human-readable strings.
Returns
-------
counter : :obj:`collections.Counter`
A dictionary of unique descriptions across subjects in the dataset,
along with the number of times each pattern occurred. In cases
where all subjects underwent the same protocol, the most common
pattern is most likely the most complete. In cases where the
dataset contains multiple protocols, each pattern will need to be
inspected manually.
Examples
--------
>>> from os.path import join
>>> from bids.layout import BIDSLayout
>>> from bids.reports import BIDSReport
>>> from bids.tests import get_test_data_path
>>> layout = BIDSLayout(join(get_test_data_path(), 'synthetic'))
>>> report = BIDSReport(layout)
>>> counter = report.generate(session='01')
>>> counter.most_common()[0][0]
"""
descriptions = []
subjs = self.layout.get_subjects(**kwargs)
kwargs = {k: v for k, v in kwargs.items() if k != 'subject'}
for sid in subjs:
descriptions.append(self._report_subject(subject=sid, **kwargs))
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
print(utils.reminder())
return counter
|
java
|
static protected NodeInfo selectSeedNext( NodeInfo prevSeed , NodeInfo prevNext ,
NodeInfo currentSeed, boolean ccw ) {
double referenceAngle = direction(prevNext, prevSeed);
double bestScore = Double.MAX_VALUE;
NodeInfo best = null;
// cut down on verbosity by saving the reference here
Point2D_F64 c = currentSeed.ellipse.center;
for (int i = 0; i < currentSeed.edges.size(); i++) {
Edge edge = currentSeed.edges.get(i);
if( edge.target.marked )
continue;
double angle = edge.angle;
double angleDist = ccw ? UtilAngle.distanceCCW(referenceAngle,angle) : UtilAngle.distanceCW(referenceAngle,angle);
if( angleDist > Math.PI+MAX_LINE_ANGLE_CHANGE )
continue;
Point2D_F64 p = edge.target.ellipse.center;
double score = angleDist*c.distance(p);
if( score < bestScore ) {
bestScore = score;
best = edge.target;
}
}
if( best != null )
best.marked = true;
return best;
}
|
java
|
public static boolean acquireLock(Context context) {
if (wl != null && wl.isHeld()) {
return true;
}
powerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
wl = powerManager.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, TAG);
if (wl == null) {
return false;
}
wl.acquire();
return wl.isHeld();
}
|
python
|
def load_conll(cls, fname):
"""
The CONLL file must have a tab delimited header, for example::
# description tags
Alice
Hello t1
my t2
name t3
is t4
alice t5
Bob
I'm t1
bob t2
Here, the fields are `description` and `tags`. The first instance has the label `Alice` and the
description `['Hello', 'my', 'name', 'is', 'alice']` and the tags `['t1', 't2', 't3', 't4', 't5']`.
The second instance has the label `Bob` and the description `["I'm", 'bob']` and the tags `['t1', 't2']`.
:param fname: The CONLL formatted file from which to load the dataset
:return: loaded Dataset instance
"""
def process_cache(cache, fields):
cache = [l.split() for l in cache if l]
if not cache:
return None
fields['label'].append(cache[0][0])
instance = {k: [] for k in fields if k != 'label'}
for l in cache[1:]:
for i, k in enumerate(fields):
if k != 'label':
instance[k].append(None if l[i] == '-' else l[i])
for k, v in instance.items():
fields[k].append(v)
cache = []
with open(fname) as f:
header = f.next().strip().split('\t')
header[0] = header[0].lstrip('# ')
fields = OrderedDict([(head, []) for head in header])
fields['label'] = []
for line in f:
line = line.strip()
if line:
cache.append(line)
else:
# met empty line, process cache
process_cache(cache, fields)
cache = []
if cache:
process_cache(cache, fields)
return cls(fields)
|
java
|
protected Map<String, Map<String, String>> createPrintableDataContext(Map<String, Map<String, String>>
dataContext) {
return createPrintableDataContext(OPTION_KEY, SECURE_OPTION_KEY, SECURE_OPTION_VALUE, dataContext);
}
|
java
|
static boolean intersect(int[] verts, int a, int b, int c, int d) {
if (intersectProp(verts, a, b, c, d))
return true;
else if (between(verts, a, b, c) || between(verts, a, b, d) || between(verts, c, d, a)
|| between(verts, c, d, b))
return true;
else
return false;
}
|
java
|
protected boolean isValidLocation(Mover mover, int sx, int sy, int x, int y) {
boolean invalid = (x < 0) || (y < 0) || (x >= map.getWidthInTiles()) || (y >= map.getHeightInTiles());
if ((!invalid) && ((sx != x) || (sy != y))) {
this.mover = mover;
this.sourceX = sx;
this.sourceY = sy;
invalid = map.blocked(this, x, y);
}
return !invalid;
}
|
python
|
def connect_async(self, connection_id, connection_string, callback):
"""Connect to a device by its connection_string
This function looks for the device on AWS IOT using the preconfigured
topic prefix and looking for:
<prefix>/devices/connection_string
It then attempts to lock that device for exclusive access and
returns a callback if successful.
Args:
connection_id (int): A unique integer set by the caller for referring to this connection
once created
connection_string (string): A device id of the form d--XXXX-YYYY-ZZZZ-WWWW
callback (callable): A callback function called when the connection has succeeded or
failed
"""
topics = MQTTTopicValidator(self.prefix + 'devices/{}'.format(connection_string))
key = self._generate_key()
name = self.name
conn_message = {'type': 'command', 'operation': 'connect', 'key': key, 'client': name}
context = {'key': key, 'slug': connection_string, 'topics': topics}
self.conns.begin_connection(connection_id, connection_string, callback, context, self.get_config('default_timeout'))
self._bind_topics(topics)
try:
self.client.publish(topics.connect, conn_message)
except IOTileException:
self._unbind_topics(topics)
self.conns.finish_connection(connection_id, False, 'Failed to send connection message')
|
python
|
def get_info(sld, tld, nameserver):
'''
Retrieves information about a registered nameserver. Returns the following
information:
- IP Address set for the nameserver
- Domain name which was queried
- A list of nameservers and their statuses
sld
SLD of the domain name
tld
TLD of the domain name
nameserver
Nameserver to retrieve
CLI Example:
.. code-block:: bash
salt '*' namecheap_domains_ns.get_info sld tld nameserver
'''
opts = salt.utils.namecheap.get_opts('namecheap.domains.ns.delete')
opts['SLD'] = sld
opts['TLD'] = tld
opts['Nameserver'] = nameserver
response_xml = salt.utils.namecheap.post_request(opts)
if response_xml is None:
return {}
domainnsinforesult = response_xml.getElementsByTagName('DomainNSInfoResult')[0]
return salt.utils.namecheap.xml_to_dict(domainnsinforesult)
|
java
|
@Service
public String ackTaskReceived(String taskId, String workerId) {
LOGGER.debug("Ack received for task: {} from worker: {}", taskId, workerId);
return String.valueOf(ackTaskReceived(taskId));
}
|
java
|
public static Class<?> findAnnotationDeclaringClass(Class<? extends Annotation> annotationType, Class<?> clazz) {
Assert.notNull(annotationType, "Annotation type must not be null");
if (clazz == null || clazz.equals(Object.class)) {
return null;
}
if (isAnnotationDeclaredLocally(annotationType, clazz)) {
return clazz;
}
return findAnnotationDeclaringClass(annotationType, clazz.getSuperclass());
}
|
java
|
public GetVideoInfoEpisodeOperation buildGetVideoInfoEpisodeOperation(String databasePath, String videoId){
return new GetVideoInfoEpisodeOperation(getOperationFactory(), databasePath, videoId);
}
|
python
|
def apply(self, stream=False):
"""
Run a 'terraform apply'
:param stream: whether or not to stream TF output in realtime
:type stream: bool
"""
self._setup_tf(stream=stream)
try:
self._taint_deployment(stream=stream)
except Exception:
pass
args = ['-input=false', '-refresh=true', '.']
logger.warning('Running terraform apply: %s', ' '.join(args))
out = self._run_tf('apply', cmd_args=args, stream=stream)
if stream:
logger.warning('Terraform apply finished successfully.')
else:
logger.warning("Terraform apply finished successfully:\n%s", out)
self._show_outputs()
|
java
|
public CompletableFuture<Boolean> drain(Duration timeout) throws InterruptedException {
if (!this.isActive() || this.connection==null) {
throw new IllegalStateException("Consumer is closed");
}
if (isDraining()) {
return this.getDrainingFuture();
}
Instant start = Instant.now();
final CompletableFuture<Boolean> tracker = new CompletableFuture<>();
this.markDraining(tracker);
this.sendUnsubForDrain();
try {
this.connection.flush(timeout); // Flush and wait up to the timeout
} catch (TimeoutException e) {
this.connection.processException(e);
}
this.markUnsubedForDrain();
// Wait for the timeout or the pending count to go to 0, skipped if conn is
// draining
connection.getExecutor().submit(() -> {
try {
Instant now = Instant.now();
while (timeout == null || timeout.equals(Duration.ZERO)
|| Duration.between(start, now).compareTo(timeout) < 0) {
if (this.isDrained()) {
break;
}
Thread.sleep(1); // Sleep 1 milli
now = Instant.now();
}
this.cleanUpAfterDrain();
} catch (InterruptedException e) {
this.connection.processException(e);
} finally {
tracker.complete(this.isDrained());
}
});
return getDrainingFuture();
}
|
java
|
protected void releaseIpAddress( String ip ) {
this.logger.fine( "Releasing IP address: " + ip );
this.usedIps.remove( ip );
save( this );
}
|
java
|
private static Type[] getImplicitUpperBounds(final WildcardType wildcardType) {
Assert.requireNonNull(wildcardType, "wildcardType");
final Type[] bounds = wildcardType.getUpperBounds();
return bounds.length == 0 ? new Type[]{Object.class} : normalizeUpperBounds(bounds);
}
|
java
|
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
colorChooserBorder = new com.igormaznitsa.sciareto.ui.misc.ColorChooserButton();
colorChooserFill = new com.igormaznitsa.sciareto.ui.misc.ColorChooserButton();
colorChooserText = new com.igormaznitsa.sciareto.ui.misc.ColorChooserButton();
buttonResetBorder = new javax.swing.JButton();
buttonResetFill = new javax.swing.JButton();
buttonResetText = new javax.swing.JButton();
setLayout(new java.awt.GridBagLayout());
colorChooserBorder.setText("Border color"); // NOI18N
colorChooserBorder.setHorizontalAlignment(javax.swing.SwingConstants.LEFT);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.ipadx = 64;
gridBagConstraints.insets = new java.awt.Insets(8, 16, 8, 16);
add(colorChooserBorder, gridBagConstraints);
colorChooserFill.setText("Fill color"); // NOI18N
colorChooserFill.setHorizontalAlignment(javax.swing.SwingConstants.LEFT);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.ipadx = 64;
gridBagConstraints.insets = new java.awt.Insets(0, 16, 8, 16);
add(colorChooserFill, gridBagConstraints);
colorChooserText.setText("Text color"); // NOI18N
colorChooserText.setHorizontalAlignment(javax.swing.SwingConstants.LEFT);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.ipadx = 64;
gridBagConstraints.insets = new java.awt.Insets(0, 16, 8, 16);
add(colorChooserText, gridBagConstraints);
buttonResetBorder.setIcon(new javax.swing.ImageIcon(getClass().getResource("/icons/cross16.png"))); // NOI18N
buttonResetBorder.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
buttonResetBorderActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.insets = new java.awt.Insets(8, 0, 8, 0);
add(buttonResetBorder, gridBagConstraints);
buttonResetFill.setIcon(new javax.swing.ImageIcon(getClass().getResource("/icons/cross16.png"))); // NOI18N
buttonResetFill.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
buttonResetFillActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 8, 0);
add(buttonResetFill, gridBagConstraints);
buttonResetText.setIcon(new javax.swing.ImageIcon(getClass().getResource("/icons/cross16.png"))); // NOI18N
buttonResetText.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
buttonResetTextActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 8, 0);
add(buttonResetText, gridBagConstraints);
}
|
python
|
def exists_secondary_combined_list(curriculum_abbr,
course_number,
primary_section_id,
quarter,
year):
"""
Return True if a combined mailman list exists for all
the secondary course sections in the given quarter and year
"""
return exists(get_secondary_combined_list_name(curriculum_abbr,
course_number,
primary_section_id,
quarter,
year))
|
java
|
public static void copyStream(InputStream copyFrom, OutputStream copyTo)
throws IOException {
copyStream(copyFrom, copyTo, null);
}
|
java
|
public GetSessionTokenResponse getSessionToken(GetSessionTokenRequest request) {
checkNotNull(request, "The parameter request should NOT be null.");
checkIsTrue(request.getDurationSeconds() > 0, "the durationSeconds parameter should be greater than zero");
InternalRequest internalRequest = new InternalRequest(HttpMethodName.POST,
HttpUtils.appendUri(this.getEndpoint(), URL_PREFIX, GET_SESSION_TOKEN_PATH));
if (request.getDurationSeconds() != null) {
internalRequest.addParameter("durationSeconds", String.valueOf(request.getDurationSeconds()));
}
internalRequest.setCredentials(request.getRequestCredentials());
internalRequest.addHeader(Headers.CONTENT_LENGTH,
String.valueOf(request.getAcl() == null ? 0 : request.getAcl().length()));
internalRequest.addHeader(Headers.CONTENT_TYPE, "application/json");
if (request.getAcl() != null) {
internalRequest.setContent(RestartableInputStream.wrap(request.getAcl().getBytes()));
}
return this.invokeHttpClient(internalRequest, GetSessionTokenResponse.class);
}
|
java
|
public boolean isEmbeddable(Class embeddableClazz)
{
return embeddables != null ? embeddables.containsKey(embeddableClazz)
&& embeddables.get(embeddableClazz).getPersistenceType().equals(PersistenceType.EMBEDDABLE) : false;
}
|
python
|
def base(self):
"""
Return the base object if the memory of the underlying data is shared.
.. deprecated:: 0.23.0
"""
warnings.warn("{obj}.base is deprecated and will be removed "
"in a future version".format(obj=type(self).__name__),
FutureWarning, stacklevel=2)
return self.values.base
|
python
|
def _calculate_data_point_zscalars(self, x, y, type_='array'):
"""Determines the Z-scalar values at the specified coordinates
for use when setting up the kriging matrix. Uses bilinear
interpolation.
Currently, the Z scalar values are extracted from the input Z grid
exactly at the specified coordinates. This means that if the Z grid
resolution is finer than the resolution of the desired kriged grid,
there is no averaging of the scalar values to return an average
Z value for that cell in the kriged grid. Rather, the exact Z value
right at the coordinate is used."""
if type_ == 'scalar':
nx = 1
ny = 1
z_scalars = None
else:
if x.ndim == 1:
nx = x.shape[0]
ny = 1
else:
ny = x.shape[0]
nx = x.shape[1]
z_scalars = np.zeros(x.shape)
for m in range(ny):
for n in range(nx):
if type_ == 'scalar':
xn = x
yn = y
else:
if x.ndim == 1:
xn = x[n]
yn = y[n]
else:
xn = x[m, n]
yn = y[m, n]
if xn > np.amax(self.external_Z_array_x) or \
xn < np.amin(self.external_Z_array_x) or \
yn > np.amax(self.external_Z_array_y) or \
yn < np.amin(self.external_Z_array_y):
raise ValueError("External drift array does not cover "
"specified kriging domain.")
# bilinear interpolation
external_x2_index = \
np.amin(np.where(self.external_Z_array_x >= xn)[0])
external_x1_index = \
np.amax(np.where(self.external_Z_array_x <= xn)[0])
external_y2_index = \
np.amin(np.where(self.external_Z_array_y >= yn)[0])
external_y1_index = \
np.amax(np.where(self.external_Z_array_y <= yn)[0])
if external_y1_index == external_y2_index:
if external_x1_index == external_x2_index:
z = self.external_Z_array[external_y1_index, external_x1_index]
else:
z = (self.external_Z_array[external_y1_index, external_x1_index] *
(self.external_Z_array_x[external_x2_index] - xn) +
self.external_Z_array[external_y2_index, external_x2_index] *
(xn - self.external_Z_array_x[external_x1_index])) / \
(self.external_Z_array_x[external_x2_index] -
self.external_Z_array_x[external_x1_index])
elif external_x1_index == external_x2_index:
if external_y1_index == external_y2_index:
z = self.external_Z_array[external_y1_index, external_x1_index]
else:
z = (self.external_Z_array[external_y1_index, external_x1_index] *
(self.external_Z_array_y[external_y2_index] - yn) +
self.external_Z_array[external_y2_index, external_x2_index] *
(yn - self.external_Z_array_y[external_y1_index])) / \
(self.external_Z_array_y[external_y2_index] -
self.external_Z_array_y[external_y1_index])
else:
z = (self.external_Z_array[external_y1_index, external_x1_index] *
(self.external_Z_array_x[external_x2_index] - xn) *
(self.external_Z_array_y[external_y2_index] - yn) +
self.external_Z_array[external_y1_index, external_x2_index] *
(xn - self.external_Z_array_x[external_x1_index]) *
(self.external_Z_array_y[external_y2_index] - yn) +
self.external_Z_array[external_y2_index, external_x1_index] *
(self.external_Z_array_x[external_x2_index] - xn) *
(yn - self.external_Z_array_y[external_y1_index]) +
self.external_Z_array[external_y2_index, external_x2_index] *
(xn - self.external_Z_array_x[external_x1_index]) *
(yn - self.external_Z_array_y[external_y1_index])) / \
((self.external_Z_array_x[external_x2_index] -
self.external_Z_array_x[external_x1_index]) *
(self.external_Z_array_y[external_y2_index] -
self.external_Z_array_y[external_y1_index]))
if type_ == 'scalar':
z_scalars = z
else:
if z_scalars.ndim == 1:
z_scalars[n] = z
else:
z_scalars[m, n] = z
return z_scalars
|
python
|
def cleanup(self):
"""Run cleanup script of pipeline when hook is configured."""
if self.data.hooks and len(self.data.hooks.cleanup) > 0:
env = self.data.env_list[0].copy()
env.update({'PIPELINE_RESULT': 'SUCCESS', 'PIPELINE_SHELL_EXIT_CODE': '0'})
config = ShellConfig(script=self.data.hooks.cleanup, model=self.model,
env=env, dry_run=self.options.dry_run,
debug=self.options.debug, strict=self.options.strict,
temporary_scripts_path=self.options.temporary_scripts_path)
cleanup_shell = Bash(config)
for line in cleanup_shell.process():
yield line
|
java
|
public static CommerceDiscount[] filterFindByG_C_PrevAndNext(
long commerceDiscountId, long groupId, String couponCode,
OrderByComparator<CommerceDiscount> orderByComparator)
throws com.liferay.commerce.discount.exception.NoSuchDiscountException {
return getPersistence()
.filterFindByG_C_PrevAndNext(commerceDiscountId, groupId,
couponCode, orderByComparator);
}
|
python
|
def tileServers(self):
"""
Returns the objects to manage site's tile hosted services/servers. It returns
AGSAdministration object if the site is Portal and it returns a
hostedservice.Services object if it is AGOL.
"""
services = []
ishttps = False
if self.urls == {}:
return {}
urls = self.urls["urls"]['tiles']
if 'https' in urls:
res = urls['https']
ishttps = True
else:
res = urls['http']
for https in res:
if ishttps:
scheme = "https"
else:
scheme = "http"
if self.isPortal == False:
url = "%s://%s/tiles/%s/arcgis/admin/services" % (scheme, https, self.portalId)
services.append(Services(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port))
else:
url = "%s/admin" % https
servers = self.servers
for server in servers.servers:
url = server.adminUrl
sh = PortalServerSecurityHandler(tokenHandler=self._securityHandler,
serverUrl=url,
referer=server.name.split(":")[0]
)
services.append(
AGSAdministration(url=url,
securityHandler=sh,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=True)
)
return services
|
python
|
def key_event_to_name(event):
""" Converts a keystroke event into a corresponding key name.
"""
key_code = event.key()
modifiers = event.modifiers()
if modifiers & QtCore.Qt.KeypadModifier:
key = keypad_map.get(key_code)
else:
key = None
if key is None:
key = key_map.get(key_code)
name = ''
if modifiers & QtCore.Qt.ControlModifier:
name += 'Ctrl'
if modifiers & QtCore.Qt.AltModifier:
name += '-Alt' if name else 'Alt'
if modifiers & QtCore.Qt.MetaModifier:
name += '-Meta' if name else 'Meta'
if modifiers & QtCore.Qt.ShiftModifier and ((name != '') or (key is not None and len(key) > 1)):
name += '-Shift' if name else 'Shift'
if key:
if name:
name += '-'
name += key
return name
|
java
|
public org.tensorflow.framework.DeviceLocality getClientLocality() {
return clientLocality_ == null ? org.tensorflow.framework.DeviceLocality.getDefaultInstance() : clientLocality_;
}
|
java
|
@Override
public void update(Object pData) throws APPErrorException {
valid(pData);
mDao.updateByPrimaryKey(pData);
}
|
java
|
public void getGroups(String startFrom, boolean forward, Collection<Recipient> result) {
List<String> lst = broker.callRPCList("RGUTRPC FILGET", null, 3.8, startFrom, forward ? 1 : -1, MG_SCREEN, 40);
toRecipients(lst, true, startFrom, result);
}
|
java
|
public static com.liferay.commerce.product.model.CPDefinitionOptionRel fetchCPDefinitionOptionRelByUuidAndGroupId(
String uuid, long groupId) {
return getService()
.fetchCPDefinitionOptionRelByUuidAndGroupId(uuid, groupId);
}
|
java
|
public String getValue() {
String value = null;
if (fixedValue != null) {
value = fixedValue;
} else if (currentValue != null) {
value = currentValue;
} else if (constraint != null) {
value = constraint.initValues(null);
}
if (value == null) {
value = "";
}
return configuration.processPropertyValue(value);
}
|
java
|
public static <S, I> StateIDs<S> initDeterministic(PaigeTarjan pt,
SimpleDeterministicAutomaton<S, I> automaton,
Alphabet<I> inputs,
Function<? super S, ?> initialClassification,
Object sinkClassification) {
int numStates = automaton.size();
int numInputs = inputs.size();
int sinkId = numStates;
int numStatesWithSink = numStates + 1;
int posDataLow = numStatesWithSink;
int predOfsDataLow = posDataLow + numStatesWithSink;
int numTransitionsFull = numStatesWithSink * numInputs;
int predDataLow = predOfsDataLow + numTransitionsFull + 1;
int dataSize = predDataLow + numTransitionsFull;
int[] data = new int[dataSize];
Block[] blockForState = new Block[numStatesWithSink];
StateIDs<S> ids = automaton.stateIDs();
Map<Object, Block> blockMap = new HashMap<>();
S init = automaton.getInitialState();
int initId = ids.getStateId(init);
Object initClass = initialClassification.apply(init);
Block initBlock = pt.createBlock();
initBlock.high = 1;
blockForState[initId] = initBlock;
blockMap.put(initClass, initBlock);
int[] statesBuff = new int[numStatesWithSink];
statesBuff[0] = initId;
int statesPtr = 0;
int reachableStates = 1;
boolean partial = false;
while (statesPtr < reachableStates) {
int currId = statesBuff[statesPtr++];
if (currId == sinkId) {
continue;
}
S curr = ids.getState(currId);
int predCountBase = predOfsDataLow;
for (int i = 0; i < numInputs; i++) {
I sym = inputs.getSymbol(i);
S succ = automaton.getSuccessor(curr, sym);
int succId;
if (succ != null) {
succId = ids.getStateId(succ);
} else {
succId = sinkId;
partial = true;
}
Block succBlock = blockForState[succId];
if (succBlock == null) {
Object succClass;
if (succ != null) {
succClass = initialClassification.apply(succ);
} else {
succClass = sinkClassification;
}
succBlock = blockMap.get(succClass);
if (succBlock == null) {
succBlock = pt.createBlock();
succBlock.high = 0;
blockMap.put(succClass, succBlock);
}
succBlock.high++;
blockForState[succId] = succBlock;
statesBuff[reachableStates++] = succId;
}
data[predCountBase + succId]++;
predCountBase += numStatesWithSink;
}
}
if (partial) {
int predCountIdx = predOfsDataLow + sinkId;
for (int i = 0; i < numInputs; i++) {
data[predCountIdx]++;
predCountIdx += numStatesWithSink;
}
}
int curr = 0;
for (Block b : pt.blockList()) {
curr += b.high;
b.high = curr;
b.low = curr;
}
data[predOfsDataLow] += predDataLow;
prefixSum(data, predOfsDataLow, predDataLow);
for (int i = 0; i < reachableStates; i++) {
int stateId = statesBuff[i];
Block b = blockForState[stateId];
int pos = --b.low;
data[pos] = stateId;
data[posDataLow + stateId] = pos;
S state = ids.getState(stateId);
int predOfsBase = predOfsDataLow;
for (int j = 0; j < numInputs; j++) {
I sym = inputs.getSymbol(j);
S succ = automaton.getSuccessor(state, sym);
int succId;
if (succ == null) {
succId = sinkId;
} else {
succId = ids.getStateId(succ);
}
data[--data[predOfsBase + succId]] = stateId;
predOfsBase += numStatesWithSink;
}
}
pt.setBlockData(data);
pt.setPosData(data, posDataLow);
pt.setPredOfsData(data, predOfsDataLow);
pt.setPredData(data);
pt.setSize(numStatesWithSink, numInputs);
pt.setBlockForState(blockForState);
pt.removeEmptyBlocks();
return ids;
}
|
java
|
@Override
public void clear() {
values.clear();
listBox.clear();
clearStatusText();
if (emptyPlaceHolder != null) {
insertEmptyPlaceHolder(emptyPlaceHolder);
}
reload();
if (isAllowBlank()) {
addBlankItemIfNeeded();
}
}
|
python
|
def pop(self, n=None):
"""Call from main thread. Returns the list of newly-available (handle, env) pairs."""
self.error_buffer.check()
envs = []
if n is None:
while True:
try:
envs += self.ready.get(block=False)
except queue.Empty:
break
else:
sync_timeout = 10 * 60
start = time.time()
wait_time = 1
while len(envs) < n:
try:
extra_logger.info('[%s] Waiting for %d envs, currently at %d, sleeping for %d', self.label, n, len(envs), wait_time)
envs += self.ready.get(timeout=wait_time)
except queue.Empty:
self.error_buffer.check()
wait_time = min(wait_time * 2, 30)
delta = time.time() - start
if delta > sync_timeout:
raise FatalError("Waited %.0fs to obtain envs, timeout was %.0fs. (Obtained %d/%d envs.)" % (delta, sync_timeout, len(envs), n))
return envs
|
python
|
def clear(self) -> None:
"""
Clears the board.
Resets move stack and move counters. The side to move is white. There
are no rooks or kings, so castling rights are removed.
In order to be in a valid :func:`~chess.Board.status()` at least kings
need to be put on the board.
"""
self.turn = WHITE
self.castling_rights = BB_EMPTY
self.ep_square = None
self.halfmove_clock = 0
self.fullmove_number = 1
self.clear_board()
|
python
|
def all(*validators):
"""Validation only succeeds if all passed in validators return no errors"""
def validate_all(fields):
for validator in validators:
errors = validator(fields)
if errors:
return errors
validate_all.__doc__ = " and ".join(validator.__doc__ for validator in validators)
return validate_all
|
java
|
private static CharSequence convertReverseDNSIPv4(String str, int suffixStartIndex) throws AddressStringException {
StringBuilder builder = new StringBuilder(suffixStartIndex);
int segCount = 0;
int j = suffixStartIndex;
for(int i = suffixStartIndex - 1; i > 0; i--) {
char c1 = str.charAt(i);
if(c1 == IPv4Address.SEGMENT_SEPARATOR) {
if(j - i <= 1) {
throw new AddressStringException(str, i);
}
for(int k = i + 1; k < j; k++) {
builder.append(str.charAt(k));
}
builder.append(c1);
j = i;
segCount++;
}
}
for(int k = 0; k < j; k++) {
builder.append(str.charAt(k));
}
if(segCount + 1 != IPv4Address.SEGMENT_COUNT) {
throw new AddressStringException(str, 0);
}
return builder;
}
|
java
|
private V replaceValue(Node<V> node, V newValue) {
// Note: a node is terminal if it already has a value
if (node.isTerminal()) {
V old = node.value;
node.value = newValue;
return old;
}
// the node wasn't already a terminal node (i.e. this char sequence is a
// substring of an existing sequence), assign it a value
else {
node.value = newValue;
size++;
return null; // no old value
}
}
|
python
|
def space_acl(args):
''' Retrieve access control list for a workspace'''
r = fapi.get_workspace_acl(args.project, args.workspace)
fapi._check_response_code(r, 200)
result = dict()
for user, info in sorted(r.json()['acl'].items()):
result[user] = info['accessLevel']
return result
|
java
|
public final byte[] getStringAsBytes(int columnIndex) {
validateColumnType(columnIndex, VoltType.STRING);
int pos = m_buffer.position();
m_buffer.position(getOffset(columnIndex));
int len = m_buffer.getInt();
if (len == VoltTable.NULL_STRING_INDICATOR) {
m_wasNull = true;
m_buffer.position(pos);
return null;
}
m_wasNull = false;
byte[] data = new byte[len];
m_buffer.get(data);
m_buffer.position(pos);
return data;
}
|
python
|
def chartItems(self):
"""
Returns the chart items that are found within this scene.
:return [<XChartWidgetItem>, ..]
"""
from projexui.widgets.xchartwidget import XChartWidgetItem
return filter(lambda x: isinstance(x, XChartWidgetItem), self.items())
|
python
|
def add_serverconnection_methods(cls):
"""Add a bunch of methods to an :class:`irc.client.SimpleIRCClient`
to send commands and messages.
Basically it wraps a bunch of methdos from
:class:`irc.client.ServerConnection` to be
:meth:`irc.schedule.IScheduler.execute_after`.
That way, you can easily send, even if the IRCClient is running in
:class:`IRCClient.process_forever` in another thread.
On the plus side you can use positional and keyword arguments instead of just positional ones.
:param cls: The class to add the methods do.
:type cls: :class:`irc.client.SimpleIRCClient`
:returns: None
"""
methods = ['action', 'admin', 'cap', 'ctcp', 'ctcp_reply',
'globops', 'info', 'invite', 'ison', 'join',
'kick', 'links', 'list', 'lusers', 'mode',
'motd', 'names', 'nick', 'notice', 'oper', 'part',
'part', 'pass_', 'ping', 'pong', 'privmsg',
'privmsg_many', 'quit', 'send_raw', 'squit',
'stats', 'time', 'topic', 'trace', 'user', 'userhost',
'users', 'version', 'wallops', 'who', 'whois', 'whowas']
for m in methods:
method = _wrap_execute_after(m)
f = getattr(irc.client.ServerConnection, m)
method.__doc__ = f.__doc__
setattr(cls, method.__name__, method)
return cls
|
java
|
protected S getService() {
// No need to recreate it
if (service != null) {
return service;
}
Retrofit.Builder retrofitBuilder = new Retrofit.Builder()
.baseUrl(baseUrl())
.addConverterFactory(GsonConverterFactory.create(getGsonBuilder().create()));
if (getCallFactory() != null) {
retrofitBuilder.callFactory(getCallFactory());
} else {
retrofitBuilder.client(getOkHttpClient());
}
retrofit = retrofitBuilder.build();
service = (S) retrofit.create(serviceType);
return service;
}
|
python
|
def render_blocks(self, template_name, **context):
"""
To render all the blocks
:param template_name: The template file name
:param context: **kwargs context to render
:retuns dict: of all the blocks with block_name as key
"""
blocks = {}
template = self._get_template(template_name)
for block in template.blocks:
blocks[block] = self._render_context(template,
template.blocks[block],
**context)
return blocks
|
java
|
public void setDMName(String newDMName) {
String oldDMName = dmName;
dmName = newDMName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.EDM__DM_NAME, oldDMName, dmName));
}
|
python
|
def _drawIndentMarkersAndEdge(self, paintEventRect):
"""Draw indentation markers
"""
painter = QPainter(self.viewport())
def drawWhiteSpace(block, column, char):
leftCursorRect = self.__cursorRect(block, column, 0)
rightCursorRect = self.__cursorRect(block, column + 1, 0)
if leftCursorRect.top() == rightCursorRect.top(): # if on the same visual line
middleHeight = (leftCursorRect.top() + leftCursorRect.bottom()) / 2
if char == ' ':
painter.setPen(Qt.transparent)
painter.setBrush(QBrush(Qt.gray))
xPos = (leftCursorRect.x() + rightCursorRect.x()) / 2
painter.drawRect(QRect(xPos, middleHeight, 2, 2))
else:
painter.setPen(QColor(Qt.gray).lighter(factor=120))
painter.drawLine(leftCursorRect.x() + 3, middleHeight,
rightCursorRect.x() - 3, middleHeight)
def effectiveEdgePos(text):
"""Position of edge in a block.
Defined by self._lineLengthEdge, but visible width of \t is more than 1,
therefore effective position depends on count and position of \t symbols
Return -1 if line is too short to have edge
"""
if self._lineLengthEdge is None:
return -1
tabExtraWidth = self.indentWidth - 1
fullWidth = len(text) + (text.count('\t') * tabExtraWidth)
if fullWidth <= self._lineLengthEdge:
return -1
currentWidth = 0
for pos, char in enumerate(text):
if char == '\t':
# Qt indents up to indentation level, so visible \t width depends on position
currentWidth += (self.indentWidth - (currentWidth % self.indentWidth))
else:
currentWidth += 1
if currentWidth > self._lineLengthEdge:
return pos
else: # line too narrow, probably visible \t width is small
return -1
def drawEdgeLine(block, edgePos):
painter.setPen(QPen(QBrush(self._lineLengthEdgeColor), 0))
rect = self.__cursorRect(block, edgePos, 0)
painter.drawLine(rect.topLeft(), rect.bottomLeft())
def drawIndentMarker(block, column):
painter.setPen(QColor(Qt.blue).lighter())
rect = self.__cursorRect(block, column, offset=0)
painter.drawLine(rect.topLeft(), rect.bottomLeft())
indentWidthChars = len(self._indenter.text())
cursorPos = self.cursorPosition
for block in iterateBlocksFrom(self.firstVisibleBlock()):
blockGeometry = self.blockBoundingGeometry(block).translated(self.contentOffset())
if blockGeometry.top() > paintEventRect.bottom():
break
if block.isVisible() and blockGeometry.toRect().intersects(paintEventRect):
# Draw indent markers, if good indentation is not drawn
if self._drawIndentations:
text = block.text()
if not self.drawAnyWhitespace:
column = indentWidthChars
while text.startswith(self._indenter.text()) and \
len(text) > indentWidthChars and \
text[indentWidthChars].isspace():
if column != self._lineLengthEdge and \
(block.blockNumber(), column) != cursorPos: # looks ugly, if both drawn
"""on some fonts line is drawn below the cursor, if offset is 1
Looks like Qt bug"""
drawIndentMarker(block, column)
text = text[indentWidthChars:]
column += indentWidthChars
# Draw edge, but not over a cursor
if not self._drawSolidEdge:
edgePos = effectiveEdgePos(block.text())
if edgePos != -1 and edgePos != cursorPos[1]:
drawEdgeLine(block, edgePos)
if self.drawAnyWhitespace or \
self.drawIncorrectIndentation:
text = block.text()
for column, draw in enumerate(self._chooseVisibleWhitespace(text)):
if draw:
drawWhiteSpace(block, column, text[column])
|
java
|
public final ItemStream findFirstMatchingItemStream(Filter filter) throws MessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "findFirstMatchingItemStream", filter);
ItemStream item = (ItemStream) _itemStreams.findFirstMatching(filter);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "findFirstMatchingItemStream", item);
return item;
}
|
java
|
private Token unwindTo(int targetIndent, Token copyFrom) {
assert dentsBuffer.isEmpty() : dentsBuffer;
dentsBuffer.add(createToken(nlToken, copyFrom));
// To make things easier, we'll queue up ALL of the dedents, and then pop off the first one.
// For example, here's how some text is analyzed:
//
// Text : Indentation : Action : Indents Deque
// [ baseline ] : 0 : nothing : [0]
// [ foo ] : 2 : INDENT : [0, 2]
// [ bar ] : 3 : INDENT : [0, 2, 3]
// [ baz ] : 0 : DEDENT x2 : [0]
while (true) {
int prevIndent = indentations.pop();
if (prevIndent == targetIndent) {
break;
}
if (targetIndent > prevIndent) {
// "weird" condition above
indentations.push(prevIndent); // restore previous indentation, since we've indented from it
dentsBuffer.add(createToken(indentToken, copyFrom));
break;
}
dentsBuffer.add(createToken(dedentToken, copyFrom));
}
indentations.push(targetIndent);
return dentsBuffer.remove();
}
|
java
|
static void validateTypeForSchemaMarshalling(Class<?> type) {
if (isNonCollectionInterface(type) || isAbstract(type)) {
throw new IllegalArgumentException("Cannot marshal " + type.getSimpleName()
+ ". Interfaces and abstract class cannot be cannot be marshalled into consistent BigQuery data.");
}
if (!isCollection(type) && isGenericType(type)) {
throw new IllegalArgumentException("Cannot marshal " + type.getSimpleName()
+ ". Parameterized type other than Collection<T> cannot be marshalled into consistent BigQuery data.");
}
if (Map.class.isAssignableFrom(type)) {
throw new IllegalArgumentException(
"Cannot marshal a map into BigQuery data " + type.getSimpleName());
}
if (UNSUPPORTED_TYPES.contains(type)) {
throw new IllegalArgumentException(
"Type cannot be marshalled into bigquery schema. " + type.getSimpleName());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.