code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
package com.github.thiagogarbazza.examples.xmlwithsax; import java.io.File; import java.io.InputStream; import java.net.URISyntaxException; import java.net.URL; class ResourceGetter { static File resourceFile(final String filePath) { final URL url = Thread.currentThread().getContextClassLoader().getResource(filePath); try { return new File(url.toURI()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } static InputStream resourceInputStream(final String filePath) { return Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath); } }
thiagogarbazza/exemplos
example-xml-with-sax/src/test/java/com/github/thiagogarbazza/examples/xmlwithsax/ResourceGetter.java
Java
apache-2.0
621
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hubblemon.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
naver/hubblemon
manage.py
Python
apache-2.0
252
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.cluster.messaging.impl; import com.google.common.base.Throwables; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.util.Tools; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.ControllerNode; import org.onosproject.cluster.NodeId; import org.onosproject.store.cluster.messaging.ClusterCommunicationService; import org.onosproject.store.cluster.messaging.ClusterMessage; import org.onosproject.store.cluster.messaging.ClusterMessageHandler; import org.onosproject.store.cluster.messaging.Endpoint; import org.onosproject.store.cluster.messaging.MessageSubject; import org.onosproject.store.cluster.messaging.MessagingService; import org.onosproject.utils.MeteringAgent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Objects; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; @Component(immediate = true) @Service public class ClusterCommunicationManager implements ClusterCommunicationService { private final Logger log = LoggerFactory.getLogger(getClass()); private final MeteringAgent subjectMeteringAgent = new MeteringAgent(PRIMITIVE_NAME, SUBJECT_PREFIX, true); private final MeteringAgent endpointMeteringAgent = new MeteringAgent(PRIMITIVE_NAME, ENDPOINT_PREFIX, true); private static final String PRIMITIVE_NAME = "clusterCommunication"; private static final String SUBJECT_PREFIX = "subject"; private static final String ENDPOINT_PREFIX = "endpoint"; private static final String SERIALIZING = "serialization"; private static final String DESERIALIZING = "deserialization"; private static final String NODE_PREFIX = "node:"; private static final String ROUND_TRIP_SUFFIX = ".rtt"; private static final String ONE_WAY_SUFFIX = ".oneway"; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) private ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MessagingService messagingService; private NodeId localNodeId; @Activate public void activate() { localNodeId = clusterService.getLocalNode().id(); log.info("Started"); } @Deactivate public void deactivate() { log.info("Stopped"); } @Override public <M> void broadcast(M message, MessageSubject subject, Function<M, byte[]> encoder) { multicast(message, subject, encoder, clusterService.getNodes() .stream() .filter(node -> !Objects.equal(node, clusterService.getLocalNode())) .map(ControllerNode::id) .collect(Collectors.toSet())); } @Override public <M> void broadcastIncludeSelf(M message, MessageSubject subject, Function<M, byte[]> encoder) { multicast(message, subject, encoder, clusterService.getNodes() .stream() .map(ControllerNode::id) .collect(Collectors.toSet())); } @Override public <M> CompletableFuture<Void> unicast(M message, MessageSubject subject, Function<M, byte[]> encoder, NodeId toNodeId) { try { byte[] payload = new ClusterMessage( localNodeId, subject, timeFunction(encoder, subjectMeteringAgent, SERIALIZING).apply(message) ).getBytes(); return doUnicast(subject, payload, toNodeId); } catch (Exception e) { return Tools.exceptionalFuture(e); } } @Override public <M> void multicast(M message, MessageSubject subject, Function<M, byte[]> encoder, Set<NodeId> nodes) { byte[] payload = new ClusterMessage( localNodeId, subject, timeFunction(encoder, subjectMeteringAgent, SERIALIZING).apply(message)) .getBytes(); nodes.forEach(nodeId -> doUnicast(subject, payload, nodeId)); } @Override public <M, R> CompletableFuture<R> sendAndReceive(M message, MessageSubject subject, Function<M, byte[]> encoder, Function<byte[], R> decoder, NodeId toNodeId) { try { ClusterMessage envelope = new ClusterMessage( clusterService.getLocalNode().id(), subject, timeFunction(encoder, subjectMeteringAgent, SERIALIZING). apply(message)); return sendAndReceive(subject, envelope.getBytes(), toNodeId). thenApply(bytes -> timeFunction(decoder, subjectMeteringAgent, DESERIALIZING).apply(bytes)); } catch (Exception e) { return Tools.exceptionalFuture(e); } } private CompletableFuture<Void> doUnicast(MessageSubject subject, byte[] payload, NodeId toNodeId) { ControllerNode node = clusterService.getNode(toNodeId); checkArgument(node != null, "Unknown nodeId: %s", toNodeId); Endpoint nodeEp = new Endpoint(node.ip(), node.tcpPort()); MeteringAgent.Context context = subjectMeteringAgent.startTimer(subject.toString() + ONE_WAY_SUFFIX); return messagingService.sendAsync(nodeEp, subject.value(), payload).whenComplete((r, e) -> context.stop(e)); } private CompletableFuture<byte[]> sendAndReceive(MessageSubject subject, byte[] payload, NodeId toNodeId) { ControllerNode node = clusterService.getNode(toNodeId); checkArgument(node != null, "Unknown nodeId: %s", toNodeId); Endpoint nodeEp = new Endpoint(node.ip(), node.tcpPort()); MeteringAgent.Context epContext = endpointMeteringAgent. startTimer(NODE_PREFIX + toNodeId.toString() + ROUND_TRIP_SUFFIX); MeteringAgent.Context subjectContext = subjectMeteringAgent. startTimer(subject.toString() + ROUND_TRIP_SUFFIX); return messagingService.sendAndReceive(nodeEp, subject.value(), payload). whenComplete((bytes, throwable) -> { subjectContext.stop(throwable); epContext.stop(throwable); }); } @Override public void addSubscriber(MessageSubject subject, ClusterMessageHandler subscriber, ExecutorService executor) { messagingService.registerHandler(subject.value(), new InternalClusterMessageHandler(subscriber), executor); } @Override public void removeSubscriber(MessageSubject subject) { messagingService.unregisterHandler(subject.value()); } @Override public <M, R> void addSubscriber(MessageSubject subject, Function<byte[], M> decoder, Function<M, R> handler, Function<R, byte[]> encoder, Executor executor) { messagingService.registerHandler(subject.value(), new InternalMessageResponder<M, R>(decoder, encoder, m -> { CompletableFuture<R> responseFuture = new CompletableFuture<>(); executor.execute(() -> { try { responseFuture.complete(handler.apply(m)); } catch (Exception e) { responseFuture.completeExceptionally(e); } }); return responseFuture; })); } @Override public <M, R> void addSubscriber(MessageSubject subject, Function<byte[], M> decoder, Function<M, CompletableFuture<R>> handler, Function<R, byte[]> encoder) { messagingService.registerHandler(subject.value(), new InternalMessageResponder<>(decoder, encoder, handler)); } @Override public <M> void addSubscriber(MessageSubject subject, Function<byte[], M> decoder, Consumer<M> handler, Executor executor) { messagingService.registerHandler(subject.value(), new InternalMessageConsumer<>(decoder, handler), executor); } /** * Performs the timed function, returning the value it would while timing the operation. * * @param timedFunction the function to be timed * @param meter the metering agent to be used to time the function * @param opName the opname to be used when starting the meter * @param <A> The param type of the function * @param <B> The return type of the function * @return the value returned by the timed function */ private <A, B> Function<A, B> timeFunction(Function<A, B> timedFunction, MeteringAgent meter, String opName) { checkNotNull(timedFunction); checkNotNull(meter); checkNotNull(opName); return new Function<A, B>() { @Override public B apply(A a) { final MeteringAgent.Context context = meter.startTimer(opName); B result = null; try { result = timedFunction.apply(a); context.stop(null); return result; } catch (Exception e) { context.stop(e); Throwables.propagate(e); return null; } } }; } private class InternalClusterMessageHandler implements BiFunction<Endpoint, byte[], byte[]> { private ClusterMessageHandler handler; public InternalClusterMessageHandler(ClusterMessageHandler handler) { this.handler = handler; } @Override public byte[] apply(Endpoint sender, byte[] bytes) { ClusterMessage message = ClusterMessage.fromBytes(bytes); handler.handle(message); return message.response(); } } private class InternalMessageResponder<M, R> implements BiFunction<Endpoint, byte[], CompletableFuture<byte[]>> { private final Function<byte[], M> decoder; private final Function<R, byte[]> encoder; private final Function<M, CompletableFuture<R>> handler; public InternalMessageResponder(Function<byte[], M> decoder, Function<R, byte[]> encoder, Function<M, CompletableFuture<R>> handler) { this.decoder = decoder; this.encoder = encoder; this.handler = handler; } @Override public CompletableFuture<byte[]> apply(Endpoint sender, byte[] bytes) { return handler.apply(timeFunction(decoder, subjectMeteringAgent, DESERIALIZING). apply(ClusterMessage.fromBytes(bytes).payload())). thenApply(m -> timeFunction(encoder, subjectMeteringAgent, SERIALIZING).apply(m)); } } private class InternalMessageConsumer<M> implements BiConsumer<Endpoint, byte[]> { private final Function<byte[], M> decoder; private final Consumer<M> consumer; public InternalMessageConsumer(Function<byte[], M> decoder, Consumer<M> consumer) { this.decoder = decoder; this.consumer = consumer; } @Override public void accept(Endpoint sender, byte[] bytes) { consumer.accept(timeFunction(decoder, subjectMeteringAgent, DESERIALIZING). apply(ClusterMessage.fromBytes(bytes).payload())); } } }
sonu283304/onos
core/store/dist/src/main/java/org/onosproject/store/cluster/messaging/impl/ClusterCommunicationManager.java
Java
apache-2.0
13,581
/* * Copyright 2016 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gaffer.gafferpop; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** * A <code>GafferPopEdge</code> is an {@link GafferPopElement} and {link Edge}. * <p> * inVertex() and outVertex() methods are not supported as it is possible for a * edge to have multiple in vertices and multiple out vertices (due to the mapping * to a TinkerPop vertex to Gaffer {@link gaffer.data.element.Entity}. * Use vertices(Direction) instead. * </p> * <p> * An ID is required to be an {@link EdgeId} which contains the source and * destination of an edge. * </p> */ public final class GafferPopEdge extends GafferPopElement implements Edge { private Map<String, Property> properties; public GafferPopEdge(final String label, final Object source, final Object dest, final GafferPopGraph graph) { super(label, new EdgeId(source, dest), graph); } @Override public <V> Property<V> property(final String key, final V value) { if (isReadOnly()) { throw new UnsupportedOperationException("Updates are not supported"); } ElementHelper.validateProperty(key, value); final Property<V> newProperty = new GafferPopProperty<>(this, key, value); if (null == this.properties) { this.properties = new HashMap<>(); } this.properties.put(key, newProperty); return newProperty; } @Override public <V> Property<V> property(final String key) { return null == this.properties ? Property.<V>empty() : this.properties.getOrDefault(key, Property.<V>empty()); } @Override public <V> Iterator<Property<V>> properties(final String... propertyKeys) { if (null == this.properties) { return Collections.emptyIterator(); } if (propertyKeys.length == 1) { final Property<V> property = this.properties.get(propertyKeys[0]); return null == property ? Collections.emptyIterator() : IteratorUtils.of(property); } else { return (Iterator) this.properties.entrySet() .stream() .filter(entry -> ElementHelper.keyExists(entry.getKey(), propertyKeys)) .map(entry -> entry.getValue()).collect(Collectors.toList()) .iterator(); } } @Override public Iterator<Vertex> vertices(final Direction direction) { if (Direction.IN.equals(direction)) { return graph().vertices(id().getDest()); } if (Direction.OUT.equals(direction)) { return graph().vertices(id().getSource()); } return graph().vertices(id().getSource(), id().getDest()); } @Override public Set<String> keys() { return null == this.properties ? Collections.emptySet() : this.properties.keySet(); } @Override public EdgeId id() { return (EdgeId) super.id(); } @Override public String toString() { return "e[" + id().getSource() + "-" + label + "->" + id().getDest() + "]"; } @Override public Vertex outVertex() { throw new UnsupportedOperationException("Use 'vertices(Direction)' instead - it may return multiple vertices"); } @Override public Vertex inVertex() { throw new UnsupportedOperationException("Use 'vertices(Direction)' instead - it may return multiple vertices"); } }
flitte/Gaffer
tinkerpop/src/main/java/gaffer/gafferpop/GafferPopEdge.java
Java
apache-2.0
4,463
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.spi; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; /** * A Range of values across the continuous space defined by the types of the Markers */ public final class Range { private final Marker low; private final Marker high; @JsonCreator public Range( @JsonProperty("low") Marker low, @JsonProperty("high") Marker high) { Objects.requireNonNull(low, "value is null"); Objects.requireNonNull(high, "value is null"); if (!low.getType().equals(high.getType())) { throw new IllegalArgumentException(String.format("Marker types do not match: %s vs %s", low.getType(), high.getType())); } if (low.isUpperUnbounded()) { throw new IllegalArgumentException("low cannot be upper unbounded"); } if (high.isLowerUnbounded()) { throw new IllegalArgumentException("high cannot be lower unbounded"); } if (low.compareTo(high) > 0) { throw new IllegalArgumentException("low must be less than or equal to high"); } this.low = low; this.high = high; } public static Range all(Class<?> type) { return new Range(Marker.lowerUnbounded(type), Marker.upperUnbounded(type)); } public static Range greaterThan(Comparable<?> low) { return new Range(Marker.above(low), Marker.upperUnbounded(low.getClass())); } public static Range greaterThanOrEqual(Comparable<?> low) { return new Range(Marker.exactly(low), Marker.upperUnbounded(low.getClass())); } public static Range lessThan(Comparable<?> high) { return new Range(Marker.lowerUnbounded(high.getClass()), Marker.below(high)); } public static Range lessThanOrEqual(Comparable<?> high) { return new Range(Marker.lowerUnbounded(high.getClass()), Marker.exactly(high)); } public static Range equal(Comparable<?> value) { return new Range(Marker.exactly(value), Marker.exactly(value)); } public static Range range(Comparable<?> low, boolean lowInclusive, Comparable<?> high, boolean highInclusive) { Marker lowMarker = lowInclusive ? Marker.exactly(low) : Marker.above(low); Marker highMarker = highInclusive ? Marker.exactly(high) : Marker.below(high); return new Range(lowMarker, highMarker); } @JsonIgnore public Class<?> getType() { return low.getType(); } @JsonProperty public Marker getLow() { return low; } @JsonProperty public Marker getHigh() { return high; } @JsonIgnore public boolean isSingleValue() { return !low.isLowerUnbounded() && !high.isUpperUnbounded() && low.getBound() == Marker.Bound.EXACTLY && high.getBound() == Marker.Bound.EXACTLY && low.getValue() == high.getValue(); } @JsonIgnore public Comparable<?> getSingleValue() { if (!isSingleValue()) { throw new IllegalStateException("Range does not have just a single value"); } return low.getValue(); } @JsonIgnore public boolean isAll() { return low.isLowerUnbounded() && high.isUpperUnbounded(); } public boolean includes(Marker marker) { Objects.requireNonNull(marker, "marker is null"); checkTypeCompatibility(marker); return low.compareTo(marker) <= 0 && high.compareTo(marker) >= 0; } public boolean contains(Range other) { checkTypeCompatibility(other); return this.getLow().compareTo(other.getLow()) <= 0 && this.getHigh().compareTo(other.getHigh()) >= 0; } public Range span(Range other) { checkTypeCompatibility(other); Marker lowMarker = Marker.min(low, other.getLow()); Marker highMarker = Marker.max(high, other.getHigh()); return new Range(lowMarker, highMarker); } public boolean overlaps(Range other) { checkTypeCompatibility(other); return this.getLow().compareTo(other.getHigh()) <= 0 && other.getLow().compareTo(this.getHigh()) <= 0; } public Range intersect(Range other) { checkTypeCompatibility(other); if (!this.overlaps(other)) { throw new IllegalArgumentException("Cannot intersect non-overlapping ranges"); } Marker lowMarker = Marker.max(low, other.getLow()); Marker highMarker = Marker.min(high, other.getHigh()); return new Range(lowMarker, highMarker); } private void checkTypeCompatibility(Range range) { if (!getType().equals(range.getType())) { throw new IllegalArgumentException(String.format("Mismatched Range types: %s vs %s", getType(), range.getType())); } } private void checkTypeCompatibility(Marker marker) { if (!getType().equals(marker.getType())) { throw new IllegalArgumentException(String.format("Marker of %s does not match Range of %s", marker.getType(), getType())); } } @Override public int hashCode() { return Objects.hash(low, high); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final Range other = (Range) obj; return Objects.equals(this.low, other.low) && Objects.equals(this.high, other.high); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); if (isSingleValue()) { sb.append('[').append(low.getValue()).append(']'); } else { sb.append((low.getBound() == Marker.Bound.EXACTLY) ? '[' : '('); sb.append(low.isLowerUnbounded() ? "<min>" : low.getValue()); sb.append(", "); sb.append(high.isUpperUnbounded() ? "<max>" : high.getValue()); sb.append((high.getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); } return sb.toString(); } }
sdgdsffdsfff/presto
presto-spi/src/main/java/com/facebook/presto/spi/Range.java
Java
apache-2.0
6,940
/******************************************************************************* * Copyright 2013-2015 alladin-IT GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package at.alladin.rmbt.android.util; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketTimeoutException; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.os.AsyncTask; import android.util.Log; import at.alladin.rmbt.android.adapter.result.OnCompleteListener; import at.alladin.rmbt.android.main.RMBTMainActivity; import at.alladin.rmbt.android.util.net.NetworkInfoCollector; public class CheckIpTask extends AsyncTask<Void, Void, JSONArray> { private final RMBTMainActivity activity; private JSONArray newsList; String lastIp; InetAddress privateIpv6; InetAddress privateIpv4; String publicIpv4; String publicIpv6; boolean needsRetry = false; ControlServerConnection serverConn; private OnCompleteListener onCompleteListener; /** * */ private static final String DEBUG_TAG = "CheckIpTask"; public CheckIpTask(final RMBTMainActivity activity) { this.activity = activity; } /** * * @param listener */ public void setOnCompleteListener(OnCompleteListener listener) { this.onCompleteListener = listener; } @Override protected JSONArray doInBackground(final Void... params) { needsRetry = false; serverConn = new ControlServerConnection(activity); try { Socket s = new Socket(); InetSocketAddress addr = new InetSocketAddress(ConfigHelper.getCachedControlServerNameIpv4(activity.getApplicationContext()), ConfigHelper.getControlServerPort(activity.getApplicationContext())); s.connect(addr, 5000); privateIpv4 = s.getLocalAddress(); s.close(); } catch (Exception e) { e.printStackTrace(); } try { Socket s = new Socket(); InetSocketAddress addr = new InetSocketAddress(ConfigHelper.getCachedControlServerNameIpv6(activity.getApplicationContext()), ConfigHelper.getControlServerPort(activity.getApplicationContext())); s.connect(addr, 5000); privateIpv6 = s.getLocalAddress(); s.close(); } catch (SocketTimeoutException e) { e.printStackTrace(); needsRetry = ConfigHelper.isRetryRequiredOnIpv6SocketTimeout(activity); } catch (Exception e) { needsRetry = false; e.printStackTrace(); } newsList = new JSONArray(); if (privateIpv4 != null) { JSONArray response = serverConn.requestIp(false); if (response != null && response.length() >= 1) { newsList.put(response.opt(0)); } } else { Log.d(DEBUG_TAG, "no private ipv4 found"); } if (privateIpv6 != null) { JSONArray response = serverConn.requestIp(true); if (response != null && response.length() >= 1) { newsList.put(response.opt(0)); } } else { Log.d(DEBUG_TAG, "no private ipv6 found"); } return newsList; } @Override protected void onCancelled() { if (serverConn != null) { serverConn.unload(); serverConn = null; } } @Override protected void onPostExecute(final JSONArray newsList) { try { Log.d(DEBUG_TAG, "News: " + newsList); int ipv = 4; if (newsList != null && newsList.length() > 0 && !serverConn.hasError()) { for (int i = 0; i < newsList.length(); i++) { if (!isCancelled() && !Thread.interrupted()) { try { final JSONObject newsItem = newsList.getJSONObject(i); if (newsItem.has("v")) { ipv = newsItem.getInt("v"); if (ipv == 6) { try { publicIpv6 = newsItem.getString("ip"); } catch (Exception e) { e.printStackTrace(); } } else { try { publicIpv4 = newsItem.getString("ip"); } catch (Exception e) { e.printStackTrace(); } } } } catch (final JSONException e) { e.printStackTrace(); } } } if (onCompleteListener != null && !needsRetry) { onCompleteListener.onComplete(NetworkInfoCollector.FLAG_PRIVATE_IPV6, privateIpv6); onCompleteListener.onComplete(NetworkInfoCollector.FLAG_PRIVATE_IPV4, privateIpv4); onCompleteListener.onComplete(NetworkInfoCollector.FLAG_IPV4, publicIpv4); onCompleteListener.onComplete(NetworkInfoCollector.FLAG_IPV6, publicIpv6); onCompleteListener.onComplete(NetworkInfoCollector.FLAG_IP_TASK_COMPLETED, null); } else if (onCompleteListener != null) { onCompleteListener.onComplete(OnCompleteListener.ERROR, NetworkInfoCollector.FLAG_IP_TASK_NEEDS_RETRY); } } else { ConfigHelper.setLastIp(activity.getApplicationContext(), null); if (onCompleteListener != null) { onCompleteListener.onComplete(OnCompleteListener.ERROR, null); } } } catch (Exception e) { e.printStackTrace(); } // finally { // if (onCompleteListener != null) { // onCompleteListener.onComplete(NetworkInfoCollector.FLAG_IP_TASK_COMPLETED, null); // } // } } }
SPECURE/ratel-nettest
RMBTAndroid/src/at/alladin/rmbt/android/util/CheckIpTask.java
Java
apache-2.0
7,069
thDebug = true; thAllowNullText = false;
thymol/thymol.js
src/main/webapp/WEB-INF/templates/thymol/debug/debug-data.js
JavaScript
apache-2.0
41
/* * Copyright 2006 Webmedia Group Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.araneaframework.uilib.form.constraint; import org.araneaframework.Environment; import org.araneaframework.uilib.form.Constraint; /** * Optional <code>Constraint</code> only applies when constrained field has some real value. This class is a wrapper * around the <code>Constraint</code>, and the wrapped <code>Constraint</code> will be applied only if the constrained * field has been read from the request. * * @author Jevgeni Kabanov (ekabanov@araneaframework.org) */ public class OptionalConstraint extends BaseGenericFieldConstraint { private Constraint constraint; /** * Creates a new <code>OptionalConstraint</code> wrapper for given <code>constraint</code>. The latter will be * validated only if the field has a value. * * @param constraint The constraint to use for validation. */ public OptionalConstraint(Constraint constraint) { this.constraint = constraint; } @Override protected void validateConstraint() throws Exception { if (getValue() != null) { this.constraint.validate(); addErrors(this.constraint.getErrors()); this.constraint.clearErrors(); } } @Override public void setCustomErrorMessage(String customErrorMessage) { this.constraint.setCustomErrorMessage(customErrorMessage); } @Override public void setEnvironment(Environment environment) { super.setEnvironment(environment); this.constraint.setEnvironment(environment); } }
nortal/araneaframework
src/org/araneaframework/uilib/form/constraint/OptionalConstraint.java
Java
apache-2.0
2,059
contacts = "Contacts"; // avoid typos, this string occurs many times. Contacts = new Mongo.Collection(contacts); Meteor.methods({ /** * Invoked by AutoForm to add a new Contacts record. * @param doc The Contacts document. */ addContacts: function(doc) { check(doc, Contacts.simpleSchema()); Contacts.insert(doc); }, /** * * Invoked by AutoForm to update a Contacts record. * @param doc The Contacts document. * @param docID It's ID. */ editContacts: function(doc, docID) { check(doc, Contacts.simpleSchema()); Contacts.update({_id: docID}, doc); }, deleteContacts: function(docID) { Contacts.remove(docID); } }); // Publish the entire Collection. Subscription performed in the router. if (Meteor.isServer) { Meteor.publish(contacts, function () { return Contacts.find(); }); } /** * Create the schema for Contacts * See: https://github.com/aldeed/meteor-autoform#common-questions * See: https://github.com/aldeed/meteor-autoform#affieldinput */ Contacts.attachSchema(new SimpleSchema({ firstName: { label: "First Name", type: String, optional: false, max: 20, autoform: { group: contacts, placeholder: "John" } }, lastName: { label: "Last Name", type: String, optional: false, max: 20, autoform: { group: contacts, placeholder: "Doe" } }, address: { label: "Address", type: String, optional: false, max: 50, autoform: { group: contacts, placeholder: "1234 Imaginary Ln." } }, phone: { label: "Phone Number", type: String, optional: false, max: 15, autoform: { group: contacts, placeholder: "(555) 555-5555" } }, email: { label: "Email Address", type: String, optional: false, max: 20, autoform: { group: contacts, placeholder: "johndoe@example.com" } } }));
mkshimod/digits
app/lib/collections/Contacts.js
JavaScript
apache-2.0
1,938
/// Copyright (C) 2012-2015 Soomla Inc. /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. using System; namespace SoomlaWpCore.events { public class AppToForegroundEvent : SoomlaEvent { public AppToForegroundEvent() : this(null) { } public AppToForegroundEvent(Object sender) : base(sender) { } } }
dfalcone/soomla-wp-core
soomla-wp-core/soomla-wp-core-interface/events/AppToForegroundEvent.cs
C#
apache-2.0
877
package ru.stqa.pft.addressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; import ru.stqa.pft.addressbook.model.GroupData; /** * Created by Artur on 17.04.16. */ public class GroupHelper extends HelperBase { public GroupHelper(WebDriver wd) { super(wd); } public void returnToGroupPage() { click(By.linkText("group page")); } public void submitGroupCreation() { click(By.name("submit")); } public void fillGroupForm(GroupData groupData) { type(By.name("group_name"), groupData.getName()); type(By.name("group_header"), groupData.getHeader()); type(By.name("group_footer"), groupData.getFooter()); } public void initGroupCreation() { click(By.name("new")); } public void deletSelectedGroups() { click(By.xpath("//div[@id='content']/form/input[5]")); } public void selectGroup() { if (!wd.findElement(By.name("selected[]")).isSelected()) { click(By.name("selected[]")); } } public void initGroupModification() { click(By.name("edit")); } public void submitGroupModification() { click(By.name("update")); } }
arturkirakosian/java
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmanager/GroupHelper.java
Java
apache-2.0
1,203
/* * Copyright 2017 Jacopo Urbani * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. **/ #include <trident/kb/permsorter.h> #include <kognac/utils.h> #include <kognac/compressor.h> #include <boost/chrono.hpp> #include <boost/log/trivial.hpp> #include <boost/thread.hpp> #include <tbb/parallel_sort.h> #include <tbb/task_scheduler_init.h> #include <thread> #include <functional> namespace timens = boost::chrono; struct __PermSorter_sorter { char *rawinput; const int o1; const int o2; const int o3; __PermSorter_sorter(char *rawinput, const int o1, const int o2, const int o3) : rawinput(rawinput), o1(o1), o2(o2), o3(o3) { } bool operator()(long a, long b) const { char *start_a = rawinput + a * 15; char *start_b = rawinput + b * 15; int ret = memcmp(start_a + o1, start_b + o1, 5); if (ret < 0) { return true; } else if (ret == 0) { ret = memcmp(start_a + o2, start_b + o2, 5); if (ret < 0) { return true; } else if (ret == 0) { ret = memcmp(start_a + o3, start_b + o3, 5); return ret < 0; } } return false; } }; void PermSorter::sortPermutation_old(char *rawinput, std::vector<long> *idx, int sorter) { int o1 = 0, o2 = 0, o3 = 0; switch (sorter) { case IDX_SPO: o1 = 0; o2 = 5; o3 = 10; break; case IDX_SOP: o1 = 0; o2 = 10; o3 = 5; break; case IDX_OSP: o1 = 10; o2 = 0; o3 = 5; break; case IDX_OPS: o1 = 10; o2 = 5; o3 = 0; break; case IDX_POS: o1 = 5; o2 = 10; o3 = 0; break; case IDX_PSO: o1 = 5; o2 = 0; o3 = 10; break; default: throw 10; } tbb::parallel_sort(idx->begin(), idx->end(), __PermSorter_sorter(rawinput, o1, o2, o3)); } typedef std::array<unsigned char, 15> __PermSorter_triple; bool __PermSorter_triple_sorter(const __PermSorter_triple &a, const __PermSorter_triple &b) { return a < b; } void PermSorter::sortPermutation(char *start, char *end) { __PermSorter_triple *sstart = (__PermSorter_triple*) start; __PermSorter_triple *send = (__PermSorter_triple*) end; timens::system_clock::time_point starttime = timens::system_clock::now(); //std::sort(sstart, send, &__PermSorter_triple_sorter); tbb::parallel_sort(sstart, send, &__PermSorter_triple_sorter); timens::duration<double> duration = timens::system_clock::now() - starttime; BOOST_LOG_TRIVIAL(debug) << "Time sorting: " << duration.count() << "s."; } void PermSorter::writeTermInBuffer(char *buffer, const long n) { buffer[0] = (n >> 32) & 0xFF; buffer[1] = (n >> 24) & 0xFF; buffer[2] = (n >> 16) & 0xFF; buffer[3] = (n >> 8) & 0xFF; buffer[4] = n & 0xFF; } long PermSorter::readTermFromBuffer(char *buffer) { long n = 0; n += (long) (buffer[0] & 0xFF) << 32; n += (long) (buffer[1] & 0xFF) << 24; n += (buffer[2] & 0xFF) << 16; n += (buffer[3] & 0xFF) << 8; n += buffer[4] & 0xFF; return n; } void PermSorter::dumpPermutation_seq_old( char *rawinput, long *start, long *end, MultiDiskLZ4Writer *writer, int currentPart, int sorter) { if (sorter == IDX_SPO) { while (start < end) { Triple t; char *bt = rawinput + 15 * *start; t.s = PermSorter::readTermFromBuffer(bt); t.p = PermSorter::readTermFromBuffer(bt + 5); t.o = PermSorter::readTermFromBuffer(bt + 10); t.writeTo(currentPart, writer); start++; } } else { long box1, box2; while (start < end) { Triple t; char *bt = rawinput + 15 * *start; t.s = PermSorter::readTermFromBuffer(bt); t.p = PermSorter::readTermFromBuffer(bt + 5); t.o = PermSorter::readTermFromBuffer(bt + 10); switch (sorter) { case IDX_SOP: box1 = t.o; t.o = t.p; t.p = box1; break; case IDX_OSP: box1 = t.s; box2 = t.p; t.s = t.o; t.p = box1; t.o = box2; break; case IDX_OPS: box1 = t.s; t.s = t.o; t.o = box1; break; case IDX_POS: box1 = t.s; box2 = t.p; t.s = box2; t.p = t.o; t.o = box1; break; case IDX_PSO: box1 = t.p; t.p = t.s; t.s = box1; break; } t.writeTo(currentPart, writer); start++; } } writer->setTerminated(currentPart); } void PermSorter::dumpPermutation_seq( char *start, char *end, MultiDiskLZ4Writer *writer, int currentPart) { while (start < end) { Triple t; t.s = PermSorter::readTermFromBuffer(start); t.p = PermSorter::readTermFromBuffer(start + 5); t.o = PermSorter::readTermFromBuffer(start + 10); t.writeTo(currentPart, writer); start+=15; } writer->setTerminated(currentPart); } bool PermSorter::isMax(char *input, long idx) { char *start = input + idx * 15; for(int i = 0; i < 15; ++i) { if (~(start[i])) return false; } return true; } void PermSorter::dumpPermutation_old(char *input, long end, int parallelProcesses, int maxReadingThreads, string out, std::vector<long> &idx, int sorter) { //Set up the multidiskwriters... MultiDiskLZ4Writer **writers = new MultiDiskLZ4Writer*[maxReadingThreads]; int partsPerWriter = parallelProcesses / maxReadingThreads; int currentPart = 0; for(int i = 0; i < maxReadingThreads; ++i) { std::vector<string> files; for(int j = 0; j < partsPerWriter; ++j) { string o = out + string(".") + to_string(currentPart++); files.push_back(o); } writers[i] = new MultiDiskLZ4Writer(files, 3, 4); } //Find out the total size of the written elements long realSize = idx.size(); while (realSize > 0) { if (PermSorter::isMax(input, idx[realSize - 1])) { realSize--; } else { break; } } BOOST_LOG_TRIVIAL(debug) << "MaxSize=" << idx.size() << " realSize=" << realSize; long *rawidx = NULL; if (idx.size() > 0) rawidx = &(idx[0]); boost::thread *threads = new boost::thread[parallelProcesses]; long chunkSize = max((long)1, (long)(realSize / parallelProcesses)); long currentEnd = 0; for(int i = 0; i < parallelProcesses; ++i) { long nextEnd; if (i == parallelProcesses - 1) { nextEnd = realSize; } else { nextEnd = currentEnd + chunkSize; if (nextEnd > realSize) nextEnd = realSize; } MultiDiskLZ4Writer *currentWriter = writers[i / partsPerWriter]; int currentPart = i % partsPerWriter; if (nextEnd > currentEnd) { threads[i] = boost::thread(PermSorter::dumpPermutation_seq_old, input, rawidx + currentEnd, rawidx + nextEnd, currentWriter, currentPart, sorter); } else { currentWriter->setTerminated(currentPart); } currentEnd = nextEnd; } for(int i = 0; i < parallelProcesses; ++i) { threads[i].join(); } for(int i = 0; i < maxReadingThreads; ++i) { delete writers[i]; } delete[] writers; delete[] threads; } void PermSorter::dumpPermutation(char *input, long end, int parallelProcesses, int maxReadingThreads, string out) { //Set up the multidiskwriters... MultiDiskLZ4Writer **writers = new MultiDiskLZ4Writer*[maxReadingThreads]; int partsPerWriter = parallelProcesses / maxReadingThreads; int currentPart = 0; for(int i = 0; i < maxReadingThreads; ++i) { std::vector<string> files; for(int j = 0; j < partsPerWriter; ++j) { string o = out + string(".") + to_string(currentPart++); files.push_back(o); } writers[i] = new MultiDiskLZ4Writer(files, 3, 4); } //Find out the total size of the written elements long realSize = end; while (realSize > 0) { if (PermSorter::isMax(input, realSize - 1)) { realSize--; } else { break; } } boost::thread *threads = new boost::thread[parallelProcesses]; long chunkSize = max((long)1, (long)(realSize / parallelProcesses)); long currentEnd = 0; for(int i = 0; i < parallelProcesses; ++i) { long nextEnd; if (i == parallelProcesses - 1) { nextEnd = realSize; } else { nextEnd = currentEnd + chunkSize; if (nextEnd > realSize) nextEnd = realSize; } MultiDiskLZ4Writer *currentWriter = writers[i / partsPerWriter]; int currentPart = i % partsPerWriter; if (nextEnd > currentEnd) { threads[i] = boost::thread(PermSorter::dumpPermutation_seq, input + currentEnd * 15, input + nextEnd * 15, currentWriter, currentPart); } else { currentWriter->setTerminated(currentPart); } currentEnd = nextEnd; } for(int i = 0; i < parallelProcesses; ++i) { threads[i].join(); } for(int i = 0; i < maxReadingThreads; ++i) { delete writers[i]; } delete[] writers; delete[] threads; } void PermSorter::sortChunks_seq_old(const int idReader, MultiDiskLZ4Reader *reader, char *start, char *end, long *count) { long i = 0; while (!reader->isEOF(idReader) && start < end) { long first = reader->readLong(idReader); long second = reader->readLong(idReader); long third = reader->readLong(idReader); PermSorter::writeTermInBuffer(start, first); PermSorter::writeTermInBuffer(start + 5, second); PermSorter::writeTermInBuffer(start + 10, third); start += 15; i++; if (i % 1000000000 == 0) { BOOST_LOG_TRIVIAL(debug) << "Processed " << i << " triples"; } } *count = i; BOOST_LOG_TRIVIAL(debug) << "Loaded " << i << " triples"; memset(start, 0xFF, end - start); } void PermSorter::sortChunks_seq(const int idReader, MultiDiskLZ4Reader *reader, std::vector<std::unique_ptr<char[]>> *rawTriples, long startIdx, long endIdx, long *count, std::vector<std::pair<string, char>> additionalPermutations, bool outputSPO) { char *start = rawTriples->at(0).get() + startIdx; char *end = rawTriples->at(0).get() + endIdx; char *current = start; long i = 0; if (outputSPO) { while (!reader->isEOF(idReader) && current < end) { long first = reader->readLong(idReader); long second = reader->readLong(idReader); long third = reader->readLong(idReader); PermSorter::writeTermInBuffer(current, first); PermSorter::writeTermInBuffer(current + 5, second); PermSorter::writeTermInBuffer(current + 10, third); current += 15; i++; if (i % 1000000000 == 0) { BOOST_LOG_TRIVIAL(debug) << "Processed " << i << " triples"; } } } else { while (!reader->isEOF(idReader) && current < end) { long first = reader->readLong(idReader); long second = reader->readLong(idReader); long third = reader->readLong(idReader); PermSorter::writeTermInBuffer(current, first); PermSorter::writeTermInBuffer(current + 5, third); PermSorter::writeTermInBuffer(current + 10, second); current += 15; i++; if (i % 1000000000 == 0) { BOOST_LOG_TRIVIAL(debug) << "Processed " << i << " triples"; } } } *count = i; memset(current, 0xFF, end - current); BOOST_LOG_TRIVIAL(debug) << "Loaded " << i << " triples. Now creating other permutations"; /************* SOP ************/ int idxSOP = 0; for(auto p : additionalPermutations) { if (p.second == IDX_SOP) { break; } idxSOP++; } if (idxSOP < additionalPermutations.size()) { //Found SOP //Copy all array, and swap the second and third element char *startSOP = rawTriples->at(idxSOP + 1).get() + startIdx; memcpy(startSOP, start, endIdx - startIdx); for(long j = 0; j < i; ++j) { //swap p and o char tmp[5]; tmp[0] = startSOP[5]; //copy p in tmp tmp[1] = startSOP[6]; tmp[2] = startSOP[7]; tmp[3] = startSOP[8]; tmp[4] = startSOP[9]; startSOP[5] = startSOP[10]; //copy o in p startSOP[6] = startSOP[11]; startSOP[7] = startSOP[12]; startSOP[8] = startSOP[13]; startSOP[9] = startSOP[14]; startSOP[10] = tmp[0]; //copy p in o startSOP[11] = tmp[1]; startSOP[12] = tmp[2]; startSOP[13] = tmp[3]; startSOP[14] = tmp[4]; startSOP += 15; } } /************* PSO ************/ int idxPSO = 0; for(auto p : additionalPermutations) { if (p.second == IDX_PSO) { break; } idxPSO++; } if (idxPSO < additionalPermutations.size()) { //Found PSO char *startPSO = rawTriples->at(idxPSO + 1).get() + startIdx; memcpy(startPSO, start, endIdx - startIdx); for(long j = 0; j < i; ++j) { //swap s and p char tmp[5]; tmp[0] = startPSO[0]; //copy s in tmp tmp[1] = startPSO[1]; tmp[2] = startPSO[2]; tmp[3] = startPSO[3]; tmp[4] = startPSO[4]; startPSO[0] = startPSO[5]; //copy p in s startPSO[1] = startPSO[6]; startPSO[2] = startPSO[7]; startPSO[3] = startPSO[8]; startPSO[4] = startPSO[9]; startPSO[5] = tmp[0]; //copy s in p startPSO[6] = tmp[1]; startPSO[7] = tmp[2]; startPSO[8] = tmp[3]; startPSO[9] = tmp[4]; startPSO += 15; } } /************* POS ************/ int idxPOS = 0; for(auto p : additionalPermutations) { if (p.second == IDX_POS) { break; } idxPOS++; } if (idxPOS < additionalPermutations.size()) { //Found POS if (idxPSO >= additionalPermutations.size()) { BOOST_LOG_TRIVIAL(error) << "I require also PSO"; throw 10; } char *startPSO = rawTriples->at(idxPSO + 1).get() + startIdx; char *startPOS = rawTriples->at(idxPOS + 1).get() + startIdx; memcpy(startPOS, startPSO, endIdx - startIdx); for(long j = 0; j < i; ++j) { //swap s and o char tmp[5]; tmp[0] = startPOS[5]; //copy s in tmp tmp[1] = startPOS[6]; tmp[2] = startPOS[7]; tmp[3] = startPOS[8]; tmp[4] = startPOS[9]; startPOS[5] = startPOS[10]; //copy o in s startPOS[6] = startPOS[11]; startPOS[7] = startPOS[12]; startPOS[8] = startPOS[13]; startPOS[9] = startPOS[14]; startPOS[10] = tmp[0]; //copy s in o startPOS[11] = tmp[1]; startPOS[12] = tmp[2]; startPOS[13] = tmp[3]; startPOS[14] = tmp[4]; startPOS += 15; } } /************* OPS ************/ int idxOPS = 0; for(auto p : additionalPermutations) { if (p.second == IDX_OPS) { break; } idxOPS++; } if (idxOPS < additionalPermutations.size()) { //Found OPS char *startOPS = rawTriples->at(idxOPS + 1).get() + startIdx; memcpy(startOPS, start, endIdx - startIdx); for(long j = 0; j < i; ++j) { //swap s and o char tmp[5]; tmp[0] = startOPS[0]; //copy s in tmp tmp[1] = startOPS[1]; tmp[2] = startOPS[2]; tmp[3] = startOPS[3]; tmp[4] = startOPS[4]; startOPS[0] = startOPS[10]; //copy o in s startOPS[1] = startOPS[11]; startOPS[2] = startOPS[12]; startOPS[3] = startOPS[13]; startOPS[4] = startOPS[14]; startOPS[10] = tmp[0]; //copy s in o startOPS[11] = tmp[1]; startOPS[12] = tmp[2]; startOPS[13] = tmp[3]; startOPS[14] = tmp[4]; startOPS += 15; } } /************* OSP (labelled graph) ************/ if (outputSPO) { int idxOSP = 0; for(auto p : additionalPermutations) { if (p.second == IDX_OSP) { break; } idxOSP++; } if (idxOSP < additionalPermutations.size()) { //Found OSP if (idxOPS >= additionalPermutations.size()) { BOOST_LOG_TRIVIAL(error) << "I require also OPS"; throw 10; } char *startOPS = rawTriples->at(idxOPS + 1).get() + startIdx; char *startOSP = rawTriples->at(idxOSP + 1).get() + startIdx; memcpy(startOSP, startOPS, endIdx - startIdx); for(long j = 0; j < i; ++j) { //swap p and s char tmp[5]; tmp[0] = startOSP[5]; //copy p in tmp tmp[1] = startOSP[6]; tmp[2] = startOSP[7]; tmp[3] = startOSP[8]; tmp[4] = startOSP[9]; startOSP[5] = startOSP[10]; //copy s in p startOSP[6] = startOSP[11]; startOSP[7] = startOSP[12]; startOSP[8] = startOSP[13]; startOSP[9] = startOSP[14]; startOSP[10] = tmp[0]; //copy s in o startOSP[11] = tmp[1]; startOSP[12] = tmp[2]; startOSP[13] = tmp[3]; startOSP[14] = tmp[4]; startOSP += 15; } } } else { /************* OSP (unlabelled graph) ************/ int idxOSP = 0; for(auto p : additionalPermutations) { if (p.second == IDX_OSP) { break; } idxOSP++; } if (idxOSP < additionalPermutations.size()) { //Found OSP char *startOSP = rawTriples->at(idxOSP + 1).get() + startIdx; memcpy(startOSP, start, endIdx - startIdx); for(long j = 0; j < i; ++j) { //swap o and s char tmp[5]; tmp[0] = startOSP[5]; //copy o in tmp tmp[1] = startOSP[6]; tmp[2] = startOSP[7]; tmp[3] = startOSP[8]; tmp[4] = startOSP[9]; startOSP[5] = startOSP[0]; //copy s in o startOSP[6] = startOSP[1]; startOSP[7] = startOSP[2]; startOSP[8] = startOSP[3]; startOSP[9] = startOSP[4]; startOSP[0] = tmp[0]; //copy s in o startOSP[1] = tmp[1]; startOSP[2] = tmp[2]; startOSP[3] = tmp[3]; startOSP[4] = tmp[4]; startOSP += 15; } } } } void PermSorter::sortChunks_Old(string inputdir, int maxReadingThreads, int parallelProcesses, long estimatedSize, std::vector<std::pair<string, char>> &additionalPermutations) { BOOST_LOG_TRIVIAL(debug) << "Start sortChunks"; //calculate the number of elements long mem = Utils::getSystemMemory() * 0.8; long nelements = mem / (15 + 8 * (additionalPermutations.size() + 1)); // 5 bytes per 3 triple elements + 8 * additionalPermutations.size() long elementsMainMem = max((long)parallelProcesses, min(nelements, (long)(estimatedSize * 1.2))); //Make sure elementsMainMem is a multiple of parallelProcesses elementsMainMem += parallelProcesses - (elementsMainMem % parallelProcesses); /*** SORT THE ORIGINAL FILES IN BLOCKS OF N RECORDS ***/ vector<string> unsortedFiles = Utils::getFiles(inputdir); MultiDiskLZ4Reader **readers = new MultiDiskLZ4Reader*[maxReadingThreads]; std::vector<std::vector<string>> inputsReaders(parallelProcesses); int currentPart = 0; for(int i = 0; i < unsortedFiles.size(); ++i) { if (fs::exists(fs::path(unsortedFiles[i]))) { inputsReaders[currentPart].push_back(unsortedFiles[i]); currentPart = (currentPart + 1) % parallelProcesses; } } auto itr = inputsReaders.begin(); int filesPerReader = parallelProcesses / maxReadingThreads; for(int i = 0; i < maxReadingThreads; ++i) { readers[i] = new MultiDiskLZ4Reader(filesPerReader, 3, 4); readers[i]->start(); for(int j = 0; j < filesPerReader; ++j) { if (itr->empty()) { BOOST_LOG_TRIVIAL(debug) << "Part " << j << " is empty"; } else { BOOST_LOG_TRIVIAL(debug) << "Part " << i << " " << j << " " << itr->at(0); } if (itr != inputsReaders.end()) { readers[i]->addInput(j, *itr); itr++; } else { std::vector<string> emptyset; readers[i]->addInput(j, emptyset); } } } boost::thread *threads = new boost::thread[parallelProcesses]; BOOST_LOG_TRIVIAL(debug) << "Creating vectors of " << elementsMainMem << " elements. Each el is 15 bytes"; char *rawTriples = new char[elementsMainMem * 15]; std::vector<long> idx0(elementsMainMem); for(long i = 0; i < elementsMainMem; ++i) { idx0[i] = i; } std::vector<std::vector<long>> additionalIdxs(additionalPermutations.size()); for(int i = 0; i < additionalPermutations.size(); ++i) { additionalIdxs[i] = idx0; } BOOST_LOG_TRIVIAL(debug) << "Creating vectors of " << elementsMainMem << ". done"; long maxInserts = max((long)1, (long)(elementsMainMem / parallelProcesses)); bool isFinished = false; int iter = 0; while (!isFinished) { BOOST_LOG_TRIVIAL(debug) << "Load in parallel all the triples from disk to the main memory"; std::vector<long> counts(parallelProcesses); for (int i = 0; i < parallelProcesses; ++i) { MultiDiskLZ4Reader *reader = readers[i % maxReadingThreads]; int idReader = i / maxReadingThreads; threads[i] = boost::thread( boost::bind(&sortChunks_seq_old, idReader, reader, rawTriples + (i * 15 * maxInserts), rawTriples + ((i+1) * 15 * maxInserts), &(counts[i]))); } for (int i = 0; i < parallelProcesses; ++i) { threads[i].join(); } BOOST_LOG_TRIVIAL(debug) << "Fill the empty holes with new data"; int curPart = 0; std::vector<std::pair<int, int>> openedStreams; for(int i = 0; i < parallelProcesses; ++i) { int idxReader = i % maxReadingThreads; int idxPart = i / maxReadingThreads; if (!readers[idxReader]->isEOF(idxPart)) { openedStreams.push_back(make_pair(idxReader, idxPart)); } } while (curPart < parallelProcesses && !openedStreams.empty()) { if (counts[curPart] < maxInserts) { int idxCurPair = 0; char *start = rawTriples + curPart * maxInserts * 15; while (idxCurPair < openedStreams.size() && counts[curPart] < maxInserts) { auto pair = openedStreams[idxCurPair]; long first = readers[pair.first]->readLong(pair.second); long second = readers[pair.first]->readLong(pair.second); long third = readers[pair.first]->readLong(pair.second); PermSorter::writeTermInBuffer(start + counts[curPart]*15, first); PermSorter::writeTermInBuffer(start + counts[curPart]*15+5, second); PermSorter::writeTermInBuffer(start + counts[curPart]*15+10, third); if (readers[pair.first]->isEOF(pair.second)) { openedStreams.erase(openedStreams.begin() + idxCurPair); if (idxCurPair == openedStreams.size()) { idxCurPair = 0; } } else { idxCurPair = (idxCurPair + 1) % openedStreams.size(); } counts[curPart]++; } } else { curPart++; } } BOOST_LOG_TRIVIAL(debug) << "Finished filling holes"; BOOST_LOG_TRIVIAL(debug) << "Start sorting"; tbb::task_scheduler_init init(max(1, (int)(parallelProcesses / 6))); std::thread *threads = new std::thread[additionalPermutations.size()]; for(int i = 0; i < additionalPermutations.size(); ++i) { threads[i] = std::thread( std::bind(&PermSorter::sortPermutation_old, rawTriples, &additionalIdxs[i], additionalPermutations[i].second)); } PermSorter::sortPermutation_old(rawTriples, &idx0, IDX_SPO); for(int i = 0; i < additionalPermutations.size(); ++i) { threads[i].join(); } delete[] threads; BOOST_LOG_TRIVIAL(debug) << "End sorting"; BOOST_LOG_TRIVIAL(debug) << "Start dumping"; long maxValue = maxInserts * 15 * parallelProcesses; int addPermIdx = 0; for(auto perm : additionalPermutations) { string outputFile = perm.first + string("/sorted-") + to_string(iter++); PermSorter::dumpPermutation_old(rawTriples, maxValue, parallelProcesses, maxReadingThreads, outputFile, additionalIdxs[addPermIdx++], perm.second); } string outputFile = inputdir + string("/sorted-") + to_string(iter++); PermSorter::dumpPermutation_old(rawTriples, maxValue, parallelProcesses, maxReadingThreads, outputFile, idx0, IDX_SPO); BOOST_LOG_TRIVIAL(debug) << "End dumping"; //Are all files read? int i = 0; for(; i < parallelProcesses; ++i) { if (!readers[i % maxReadingThreads]->isEOF(i / maxReadingThreads)) { break; } } isFinished = i == parallelProcesses; if (!isFinished) { BOOST_LOG_TRIVIAL(debug) << "One round is not enough"; } } delete[] rawTriples; for(int i = 0; i < maxReadingThreads; ++i) { delete readers[i]; } for(auto inputFile : unsortedFiles) fs::remove(fs::path(inputFile)); delete[] readers; delete[] threads; } struct _Offset { char first; char second; char third; }; void PermSorter::sortChunks(string inputdir, int maxReadingThreads, int parallelProcesses, long estimatedSize, bool outputSPO, std::vector<std::pair<string, char>> &additionalPermutations) { BOOST_LOG_TRIVIAL(debug) << "Start sortChunks"; //calculate the number of elements long mem = Utils::getSystemMemory() * 0.8; int nperms = additionalPermutations.size() + 1; long nelements = mem / (15 * nperms); long elementsMainMem = max((long)parallelProcesses, min(nelements, (long)(estimatedSize * 1.2))); //Make sure elementsMainMem is a multiple of parallelProcesses elementsMainMem += parallelProcesses - (elementsMainMem % parallelProcesses); /*** SORT THE ORIGINAL FILES IN BLOCKS OF N RECORDS ***/ vector<string> unsortedFiles = Utils::getFiles(inputdir); MultiDiskLZ4Reader **readers = new MultiDiskLZ4Reader*[maxReadingThreads]; std::vector<std::vector<string>> inputsReaders(parallelProcesses); int currentPart = 0; for(int i = 0; i < unsortedFiles.size(); ++i) { if (fs::exists(fs::path(unsortedFiles[i]))) { inputsReaders[currentPart].push_back(unsortedFiles[i]); currentPart = (currentPart + 1) % parallelProcesses; } } auto itr = inputsReaders.begin(); int filesPerReader = parallelProcesses / maxReadingThreads; for(int i = 0; i < maxReadingThreads; ++i) { readers[i] = new MultiDiskLZ4Reader(filesPerReader, 3, 4); readers[i]->start(); for(int j = 0; j < filesPerReader; ++j) { if (itr->empty()) { BOOST_LOG_TRIVIAL(debug) << "Part " << j << " is empty"; } else { BOOST_LOG_TRIVIAL(debug) << "Part " << i << " " << j << " " << itr->at(0); } if (itr != inputsReaders.end()) { readers[i]->addInput(j, *itr); itr++; } else { std::vector<string> emptyset; readers[i]->addInput(j, emptyset); } } } boost::thread *threads = new boost::thread[parallelProcesses]; BOOST_LOG_TRIVIAL(debug) << "Creating vectors of " << elementsMainMem << " elements. Each el is 15 bytes"; std::vector<std::unique_ptr<char[]>> rawTriples; rawTriples.push_back(std::unique_ptr<char[]>(new char[elementsMainMem * 15])); for(int i = 0; i < additionalPermutations.size(); ++i) { rawTriples.push_back(std::unique_ptr<char[]>(new char[elementsMainMem * 15])); } BOOST_LOG_TRIVIAL(debug) << "Creating vectors of " << elementsMainMem << ". done"; long maxInserts = max((long)1, (long)(elementsMainMem / parallelProcesses)); bool isFinished = false; int iter = 0; while (!isFinished) { BOOST_LOG_TRIVIAL(debug) << "Load in parallel all the triples from disk to the main memory"; std::vector<long> counts(parallelProcesses); for (int i = 0; i < parallelProcesses; ++i) { MultiDiskLZ4Reader *reader = readers[i % maxReadingThreads]; int idReader = i / maxReadingThreads; threads[i] = boost::thread( boost::bind(&sortChunks_seq, idReader, reader, &rawTriples, (i * 15 * maxInserts), ((i+1) * 15 * maxInserts), &(counts[i]), additionalPermutations, outputSPO)); } for (int i = 0; i < parallelProcesses; ++i) { threads[i].join(); } BOOST_LOG_TRIVIAL(debug) << "Fill the empty holes with new data"; int curPart = 0; std::vector<std::pair<int, int>> openedStreams; for(int i = 0; i < parallelProcesses; ++i) { int idxReader = i % maxReadingThreads; int idxPart = i / maxReadingThreads; if (!readers[idxReader]->isEOF(idxPart)) { openedStreams.push_back(make_pair(idxReader, idxPart)); } } std::vector<_Offset> offsets; for(auto p : additionalPermutations) { _Offset o; switch (p.second) { case IDX_SOP: o.first = 0; o.second = 10; o.third = 5; break; case IDX_OPS: o.first = 10; o.second = 5; o.third = 0; break; case IDX_OSP: o.first = 5; o.second = 10; o.third = 0; break; case IDX_POS: o.first = 10; o.second = 0; o.third = 5; break; case IDX_PSO: o.first = 5; o.second = 0; o.third = 10; break; default: throw 10; } offsets.push_back(o); } while (curPart < parallelProcesses && !openedStreams.empty()) { if (counts[curPart] < maxInserts) { int idxCurPair = 0; const long startp = curPart * maxInserts * 15; char *start = rawTriples[0].get() + startp; while (idxCurPair < openedStreams.size() && counts[curPart] < maxInserts) { auto pair = openedStreams[idxCurPair]; const long first = readers[pair.first]->readLong(pair.second); const long second = readers[pair.first]->readLong(pair.second); const long third = readers[pair.first]->readLong(pair.second); const long starto = counts[curPart] * 15; if (outputSPO) { PermSorter::writeTermInBuffer(start + starto, first); PermSorter::writeTermInBuffer(start + starto + 5, second); PermSorter::writeTermInBuffer(start + starto + 10, third); } else { PermSorter::writeTermInBuffer(start + starto, first); PermSorter::writeTermInBuffer(start + starto + 5, third); PermSorter::writeTermInBuffer(start + starto + 10, second); } //Copy the triples also in the other permutations for(int i = 0; i < additionalPermutations.size(); ++i) { char *startperm = rawTriples[i + 1].get() + startp; const _Offset o = offsets[i]; PermSorter::writeTermInBuffer(startperm + starto + o.first, first); PermSorter::writeTermInBuffer(startperm + starto + o.second, second); PermSorter::writeTermInBuffer(startperm + starto + o.third, third); } if (readers[pair.first]->isEOF(pair.second)) { openedStreams.erase(openedStreams.begin() + idxCurPair); if (idxCurPair == openedStreams.size()) { idxCurPair = 0; } } else { idxCurPair = (idxCurPair + 1) % openedStreams.size(); } counts[curPart]++; } } else { curPart++; } } BOOST_LOG_TRIVIAL(debug) << "Finished filling holes"; BOOST_LOG_TRIVIAL(debug) << "Start sorting. Processes per permutation=" << max(1, (int)(parallelProcesses / nperms)); tbb::task_scheduler_init init(max(1, (int)(parallelProcesses / nperms))); std::thread *threads = new std::thread[additionalPermutations.size()]; for(int i = 0; i < additionalPermutations.size(); ++i) { threads[i] = std::thread( std::bind(&PermSorter::sortPermutation, rawTriples[i+1].get(), rawTriples[i+1].get() + 15 * maxInserts * parallelProcesses)); } PermSorter::sortPermutation(rawTriples[0].get(), rawTriples[0].get() + 15 * maxInserts * parallelProcesses); for(int i = 0; i < additionalPermutations.size(); ++i) { threads[i].join(); } delete[] threads; BOOST_LOG_TRIVIAL(debug) << "End sorting"; BOOST_LOG_TRIVIAL(debug) << "Start dumping"; long maxValue = maxInserts * parallelProcesses; int j = 1; for(auto perm : additionalPermutations) { string outputFile = perm.first + string("/sorted-") + to_string(iter++); PermSorter::dumpPermutation(rawTriples[j++].get(), maxValue, parallelProcesses, maxReadingThreads, outputFile); } string outputFile = inputdir + string("/sorted-") + to_string(iter++); PermSorter::dumpPermutation(rawTriples[0].get(), maxValue, parallelProcesses, maxReadingThreads, outputFile); BOOST_LOG_TRIVIAL(debug) << "End dumping"; //Are all files read? int i = 0; for(; i < parallelProcesses; ++i) { if (!readers[i % maxReadingThreads]->isEOF(i / maxReadingThreads)) { break; } } isFinished = i == parallelProcesses; if (!isFinished) { BOOST_LOG_TRIVIAL(debug) << "One round is not enough"; } } for(int i = 0; i < maxReadingThreads; ++i) { delete readers[i]; } for(auto inputFile : unsortedFiles) fs::remove(fs::path(inputFile)); delete[] readers; delete[] threads; }
jrbn/trident
src/trident/kb/permsorter.cpp
C++
apache-2.0
39,131
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany /// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Dr. Frank Celler //////////////////////////////////////////////////////////////////////////////// #include "V8LineEditor.h" #include "Logger/Logger.h" #include "Basics/tri-strings.h" #include "Utilities/Completer.h" #include "Utilities/ShellBase.h" #include "V8/v8-utils.h" using namespace arangodb; using namespace arangodb; //////////////////////////////////////////////////////////////////////////////// /// @brief the active instance of the editor //////////////////////////////////////////////////////////////////////////////// static std::atomic<V8LineEditor*> SINGLETON(nullptr); //////////////////////////////////////////////////////////////////////////////// /// @brief signal handler for CTRL-C //////////////////////////////////////////////////////////////////////////////// #ifdef _WIN32 static bool SignalHandler(DWORD eventType) { switch (eventType) { case CTRL_BREAK_EVENT: case CTRL_C_EVENT: case CTRL_CLOSE_EVENT: case CTRL_LOGOFF_EVENT: case CTRL_SHUTDOWN_EVENT: { // get the instance of the console auto instance = SINGLETON.load(); if (instance != nullptr) { if (instance->isExecutingCommand()) { v8::Isolate* isolate = instance->isolate(); if (!v8::V8::IsExecutionTerminating(isolate)) { v8::V8::TerminateExecution(isolate); } } instance->signal(); } return true; } default: { return true; } } } #else static void SignalHandler(int signal) { // get the instance of the console auto instance = SINGLETON.load(); if (instance != nullptr) { if (instance->isExecutingCommand()) { v8::Isolate* isolate = instance->isolate(); if (!v8::V8::IsExecutionTerminating(isolate)) { v8::V8::TerminateExecution(isolate); } } instance->signal(); } } #endif //////////////////////////////////////////////////////////////////////////////// /// @brief V8Completer //////////////////////////////////////////////////////////////////////////////// namespace { class V8Completer : public Completer { public: V8Completer() {} ~V8Completer() {} public: bool isComplete(std::string const& source, size_t lineno) override final { int openParen = 0; int openBrackets = 0; int openBraces = 0; int openStrings = 0; // only used for template strings, which can be multi-line int openComments = 0; enum line_parse_state_e { NORMAL, // start NORMAL_1, // from NORMAL: seen a single / DOUBLE_QUOTE, // from NORMAL: seen a single " DOUBLE_QUOTE_ESC, // from DOUBLE_QUOTE: seen a backslash SINGLE_QUOTE, // from NORMAL: seen a single ' SINGLE_QUOTE_ESC, // from SINGLE_QUOTE: seen a backslash BACKTICK, // from NORMAL: seen a single ` BACKTICK_ESC, // from BACKTICK: seen a backslash MULTI_COMMENT, // from NORMAL_1: seen a * MULTI_COMMENT_1, // from MULTI_COMMENT, seen a * SINGLE_COMMENT // from NORMAL_1; seen a / }; char const* ptr = source.c_str(); char const* end = ptr + source.length(); line_parse_state_e state = NORMAL; while (ptr < end) { if (state == DOUBLE_QUOTE) { if (*ptr == '\\') { state = DOUBLE_QUOTE_ESC; } else if (*ptr == '"') { state = NORMAL; } ++ptr; } else if (state == DOUBLE_QUOTE_ESC) { state = DOUBLE_QUOTE; ptr++; } else if (state == SINGLE_QUOTE) { if (*ptr == '\\') { state = SINGLE_QUOTE_ESC; } else if (*ptr == '\'') { state = NORMAL; } ++ptr; } else if (state == SINGLE_QUOTE_ESC) { state = SINGLE_QUOTE; ptr++; } else if (state == BACKTICK) { if (*ptr == '\\') { state = BACKTICK_ESC; } else if (*ptr == '`') { state = NORMAL; --openStrings; } ++ptr; } else if (state == BACKTICK_ESC) { state = BACKTICK; ptr++; } else if (state == MULTI_COMMENT) { if (*ptr == '*') { state = MULTI_COMMENT_1; } ++ptr; } else if (state == MULTI_COMMENT_1) { if (*ptr == '/') { state = NORMAL; --openComments; } ++ptr; } else if (state == SINGLE_COMMENT) { ++ptr; if (ptr == end || *ptr == '\n') { state = NORMAL; --openComments; } } else if (state == NORMAL_1) { switch (*ptr) { case '/': state = SINGLE_COMMENT; ++openComments; ++ptr; break; case '*': state = MULTI_COMMENT; ++openComments; ++ptr; break; default: state = NORMAL; // try again, do not change ptr break; } } else { switch (*ptr) { case '"': state = DOUBLE_QUOTE; break; case '\'': state = SINGLE_QUOTE; break; case '`': state = BACKTICK; ++openStrings; break; case '/': state = NORMAL_1; break; case '(': ++openParen; break; case ')': --openParen; break; case '[': ++openBrackets; break; case ']': --openBrackets; break; case '{': ++openBraces; break; case '}': --openBraces; break; case '\\': ++ptr; break; } ++ptr; } } return (openParen <= 0 && openBrackets <= 0 && openBraces <= 0 && openStrings <= 0 && openComments <= 0); } std::vector<std::string> alternatives(char const* text) override final { std::vector<std::string> result; // locate global object or sub-object v8::Isolate* isolate = v8::Isolate::GetCurrent(); v8::Local<v8::Context> context = isolate->GetCurrentContext(); v8::Handle<v8::Object> current = context->Global(); std::string path; char* prefix; if (*text != '\0') { TRI_vector_string_t splitted = TRI_SplitString(text, '.'); if (1 < splitted._length) { for (size_t i = 0; i < splitted._length - 1; ++i) { v8::Handle<v8::String> name = TRI_V8_STRING(splitted._buffer[i]); if (!current->Has(name)) { TRI_DestroyVectorString(&splitted); return result; } v8::Handle<v8::Value> val = current->Get(name); if (!val->IsObject()) { TRI_DestroyVectorString(&splitted); return result; } current = val->ToObject(); path = path + splitted._buffer[i] + "."; } prefix = TRI_DuplicateString(splitted._buffer[splitted._length - 1]); } else { prefix = TRI_DuplicateString(text); } TRI_DestroyVectorString(&splitted); } else { prefix = TRI_DuplicateString(text); } v8::HandleScope scope(isolate); // compute all possible completions v8::Handle<v8::Array> properties; v8::Handle<v8::String> cpl = TRI_V8_ASCII_STRING("_COMPLETIONS"); if (current->HasOwnProperty(cpl)) { v8::Handle<v8::Value> funcVal = current->Get(cpl); if (funcVal->IsFunction()) { v8::Handle<v8::Function> func = v8::Handle<v8::Function>::Cast(funcVal); // assign a dummy entry to the args array even if we don't need it. // this prevents "error C2466: cannot allocate an array of constant size 0" in MSVC v8::Handle<v8::Value> args[] = { v8::Null(isolate) }; try { v8::Handle<v8::Value> cpls = func->Call(current, 0, args); if (cpls->IsArray()) { properties = v8::Handle<v8::Array>::Cast(cpls); } } catch (...) { // silently ignore errors here } } } else { properties = current->GetPropertyNames(); } // locate try { if (!properties.IsEmpty()) { uint32_t const n = properties->Length(); result.reserve(static_cast<size_t>(n)); for (uint32_t i = 0; i < n; ++i) { v8::Handle<v8::Value> v = properties->Get(i); TRI_Utf8ValueNFC str(TRI_UNKNOWN_MEM_ZONE, v); char const* s = *str; if (s != nullptr && *s) { std::string suffix = (current->Get(v)->IsFunction()) ? "()" : ""; std::string name = path + s + suffix; if (*prefix == '\0' || TRI_IsPrefixString(s, prefix)) { result.emplace_back(name); } } } } } catch (...) { // ignore errors in case of OOM } TRI_FreeString(TRI_CORE_MEM_ZONE, prefix); return result; } }; } //////////////////////////////////////////////////////////////////////////////// /// @brief constructs a new editor //////////////////////////////////////////////////////////////////////////////// V8LineEditor::V8LineEditor(v8::Isolate* isolate, v8::Handle<v8::Context> context, std::string const& history) : LineEditor(), _isolate(isolate), _context(context), _executingCommand(false) { // register global instance TRI_ASSERT(SINGLETON.load() == nullptr); SINGLETON.store(this); // create shell _shell = ShellBase::buildShell(history, new V8Completer()); // handle control-c #ifdef _WIN32 int res = SetConsoleCtrlHandler((PHANDLER_ROUTINE)SignalHandler, true); if (res == 0) { LOG_TOPIC(ERR, arangodb::Logger::FIXME) << "unable to install signal handler"; } #else struct sigaction sa; sa.sa_flags = 0; sigemptyset(&sa.sa_mask); sa.sa_handler = &SignalHandler; int res = sigaction(SIGINT, &sa, 0); if (res != 0) { LOG_TOPIC(ERR, arangodb::Logger::FIXME) << "unable to install signal handler"; } #endif } //////////////////////////////////////////////////////////////////////////////// /// @brief destroys the editor //////////////////////////////////////////////////////////////////////////////// V8LineEditor::~V8LineEditor() { // unregister global instance TRI_ASSERT(SINGLETON.load() != nullptr); SINGLETON.store(nullptr); }
joerg84/arangodb
lib/V8/V8LineEditor.cpp
C++
apache-2.0
11,292
using Newtonsoft.Json; using Sendwithus.Net; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Threading.Tasks; namespace Sendwithus { /// <summary> /// sendwithus Template class /// </summary> public class Template { public const string DEFAULT_LOCALE = "en-US"; // all lowercase to match expected JSON format (case-sensitive on server side) public string id { get; set; } public string name { get; set; } public string locale { get; set; } public string created { get; set; } public Collection<TemplateVersion> versions { get; set; } public Collection<string> tags { get; set; } /// <summary> /// Create an empty Template /// </summary> public Template() { locale = DEFAULT_LOCALE; } /// <summary> /// Get all the templates associated with the account. /// GET /templates /// </summary> /// <returns>A list of all the templates associated with the account</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<List<Template>> GetTemplatesAsync() { // Send the GET request var resource = "templates"; var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<List<Template>>(jsonResponse); } /// <summary> /// Get a template by ID. /// GET /templates/(:template_id) /// </summary> /// <param name="templateId">The ID of the template</param> /// <returns>The template with the given ID</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<Template> GetTemplateAsync(string templateId) { // Send the GET request var resource = String.Format("templates/{0}", templateId); var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<Template>(jsonResponse); } /// <summary> /// Get a template by ID and locale. /// GET /templates/(:template_id)/locales/(:locale) /// </summary> /// <param name="templateId">The ID of the template</param> /// <param name="locale"></param> /// <returns>The template with the given ID and locale</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<Template> GetTemplateAsync(string templateId, string locale) { // Send the GET request var resource = String.Format("templates/{0}/locales/{1}", templateId, locale); var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<Template>(jsonResponse); } /// <summary> /// Get a list of template versions (with HTML/text). /// GET /templates/(:template_id)/versions /// </summary> /// <param name="templateId">The ID of the template</param> /// <returns>The template versions associated with the given ID</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<List<TemplateVersion>> GetTemplateVersionsAsync(string templateId) { // Send the GET request var resource = String.Format("templates/{0}/versions", templateId); var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<List<TemplateVersion>>(jsonResponse); } /// <summary> /// Get a list of template versions (with HTML/text). /// GET /templates/(:template_id)/locales/(:locale)/versions /// </summary> /// <param name="templateId">The ID of the template</param> /// <param name="locale">The locale of the template</param> /// <returns>The template versions associated with the given ID and locale</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<List<TemplateVersion>> GetTemplateVersionsAsync(string templateId, string locale) { // Send the GET request var resource = String.Format("templates/{0}/locales/{1}/versions", templateId, locale); var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<List<TemplateVersion>>(jsonResponse); } /// <summary> /// Get a specific version (with HTML/text). /// GET /templates/(:template_id)/versions/(:version_id) /// </summary> /// <param name="templateId">The ID of the template</param> /// <param name="versionId">The ID of the version</param> /// <returns>The template version associated with the given ID</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<TemplateVersion> GetTemplateVersionAsync(string templateId, string versionId) { // Send the GET request var resource = String.Format("templates/{0}/versions/{1}", templateId, versionId); var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<TemplateVersion>(jsonResponse); } /// <summary> /// Get a specific version (with HTML/text). /// GET /templates/(:template_id)/locales/(:locale)/versions/(:version_id) /// </summary> /// <param name="templateId">The ID of the template</param> /// <param name="locale">The locale of the template</param> /// <param name="versionId">The ID of the version</param> /// <returns>The template version associated with the given ID</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<TemplateVersion> GetTemplateVersionAsync(string templateId, string locale, string versionId) { // Send the GET request var resource = String.Format("templates/{0}/locales/{1}/versions/{2}", templateId, locale, versionId); var jsonResponse = await RequestManager.SendGetRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<TemplateVersion>(jsonResponse); } /// <summary> /// Update a Template Version. /// PUT /templates/(:template_id)/versions/(:version_id) /// NOTE - At least one of html or text must be specified in the TemplateVersion /// </summary> /// <param name="templateId">The ID of the template</param> /// <param name="versionId">The ID of the version</param> /// <returns>The template version associated with the given ID</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<TemplateVersion> UpdateTemplateVersionAsync(string templateId, string versionId, TemplateVersion updatedTemplateVersion) { // Send the PUT request var resource = String.Format("templates/{0}/versions/{1}", templateId, versionId); var jsonResponse = await RequestManager.SendPutRequestAsync(resource, updatedTemplateVersion); // Convert the JSON result into an object return JsonConvert.DeserializeObject<TemplateVersion>(jsonResponse); } /// <summary> /// Update a Template Version. /// PUT /templates/(:template_id)/locales/(:locale)/versions/(:version_id) /// </summary> /// <param name="templateId">The ID of the template</param> /// <param name="locale">The locale of the template</param> /// <param name="versionId">The ID of the version</param> /// <param name="updatedTemplateVersion">The updated template version</param> /// <returns>The template version associated with the given ID</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<TemplateVersion> UpdateTemplateVersionAsync(string templateId, string locale, string versionId, TemplateVersion updatedTemplateVersion) { // Send the PUT request var resource = String.Format("templates/{0}/locales/{1}/versions/{2}", templateId, locale, versionId); var jsonResponse = await RequestManager.SendPutRequestAsync(resource, updatedTemplateVersion); // Convert the JSON result into an object return JsonConvert.DeserializeObject<TemplateVersion>(jsonResponse); } /// <summary> /// Creates a new template. /// POST /templates /// </summary> /// <param name="newTemplateVersion">The new version for the template</param> /// <returns>The new template</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<Template> CreateTemplateAsync(TemplateVersion newTemplateVersion) { // Send the POST request var resource = "templates"; var jsonResponse = await RequestManager.SendPostRequestAsync(resource, newTemplateVersion); // Convert the JSON result into an object return JsonConvert.DeserializeObject<Template>(jsonResponse); } /// <summary> /// Add Locale to Existing Template. /// POST /templates/(:template_id)/locales /// </summary> /// <param name="templateId">The ID of the template to add the locale to</param> /// <param name="locale">The locale to add</param> /// <param name="templateVersion">The template version</param> /// <returns>The template with the updated locale</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<Template> AddLocaleToTemplate(string templateId, string locale, TemplateVersion templateVersion) { templateVersion.locale = locale; // Send the POST request var resource = String.Format("templates/{0}/locales", templateId); var jsonResponse = await RequestManager.SendPostRequestAsync(resource, templateVersion); // Convert the JSON result into an object return JsonConvert.DeserializeObject<Template>(jsonResponse); } /// <summary> /// Create a New Template Version. /// POST /templates/(:template_id)/versions /// </summary> /// <param name="templateId">The ID of the template to add the version to</param> /// <param name="templateVersion">The new template verison to add</param> /// <returns>The newly created template version</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<TemplateVersion> CreateTemplateVersion(string templateId, TemplateVersion templateVersion) { // Send the POST request var resource = String.Format("templates/{0}/versions", templateId); var jsonResponse = await RequestManager.SendPostRequestAsync(resource, templateVersion); // Convert the JSON result into an object return JsonConvert.DeserializeObject<TemplateVersion>(jsonResponse); } /// <summary> /// Create a New Template Version. /// POST /templates/(:template_id)/locales/(:locale)/versions /// </summary> /// <param name="templateId">The ID of the template to add the version to</param> /// <param name="locale">The locale of the template to add the version to</param> /// /// <param name="templateVersion">The new verison to add to the template</param> /// <returns>The newly created template version</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<TemplateVersion> CreateTemplateVersion(string templateId, string locale, TemplateVersion templateVersion) { // Send the POST request var resource = String.Format("templates/{0}/locales/{1}/versions", templateId, locale); var jsonResponse = await RequestManager.SendPostRequestAsync(resource, templateVersion); // Convert the JSON result into an object return JsonConvert.DeserializeObject<TemplateVersion>(jsonResponse); } /// <summary> /// Delete a specific template. /// DELETE /templates/(:template_id) /// </summary> /// <param name="templateId">The ID of the template to delete</param> /// <returns>The status of the api call</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<GenericApiCallStatus> DeleteTemplate(string templateId) { // Send the POST request var resource = String.Format("templates/{0}", templateId); var jsonResponse = await RequestManager.SendDeleteRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<GenericApiCallStatus>(jsonResponse); } /// <summary> /// Delete a specific template with a given locale. /// DELETE /templates/(:template_id)/locales/(:locale) /// </summary> /// <param name="templateId">The ID of the template to delete</param> /// <param name="locale">The locale of the template to delete</param> /// <returns>The status of the api call</returns> /// <exception cref="AggregateException">Thrown when the API response status code is not success or when the API call times out</exception> /// <exception cref="InvalidOperationException">Thrown when making a Batch API Request that has already reached the maxmimum API calls per batch request</exception> public static async Task<GenericApiCallStatus> DeleteTemplate(string templateId, string locale) { // Send the POST request var resource = String.Format("templates/{0}/locales/{1}", templateId, locale); var jsonResponse = await RequestManager.SendDeleteRequestAsync(resource); // Convert the JSON result into an object return JsonConvert.DeserializeObject<GenericApiCallStatus>(jsonResponse); } } }
sendwithus/sendwithus_csharp
Sendwithus/SendwithusClient/API/Template.cs
C#
apache-2.0
18,534
import React from 'react'; import PropTypes from 'prop-types'; import style from 'HPCCloudStyle/ItemEditor.mcss'; export default class SchedulerConfigSGE extends React.Component { constructor(props) { super(props); this.updateConfig = this.updateConfig.bind(this); } updateConfig(event) { if (this.props.onChange) { this.props.onChange(event); } } render() { return ( <div> <section className={style.group}> <label className={style.label}>Number of slots</label> <input className={style.input} type="number" min="1" max={ this.props.runtime && this.props.max && this.props.max.sge ? this.props.max.sge.numberOfSlots : null } value={this.props.config.sge.numberOfSlots} data-key="sge.numberOfSlots" onChange={this.updateConfig} /> </section> <section className={style.group}> <label className={style.label}>GPUs/Node</label> <input className={style.input} type="number" min="0" max={ this.props.runtime && this.props.max && this.props.max.sge ? this.props.max.sge.numberOfGpusPerNode : null } value={this.props.config.sge.numberOfGpusPerNode} data-key="sge.numberOfGpusPerNode" onChange={this.updateConfig} /> </section> <section className={style.group}> <label className={style.label}>Parallel Environment</label> <input className={style.input} type="text" value={this.props.config.sge.parallelEnvironment} data-key="sge.parallelEnvironment" onChange={this.updateConfig} required /> </section> </div> ); } } SchedulerConfigSGE.propTypes = { config: PropTypes.object, max: PropTypes.object, onChange: PropTypes.func, runtime: PropTypes.bool, }; SchedulerConfigSGE.defaultProps = { config: undefined, max: undefined, onChange: undefined, runtime: undefined, };
Kitware/HPCCloud
src/panels/SchedulerConfig/SGE.js
JavaScript
apache-2.0
2,229
package com.google.ratel.service.error; import com.google.ratel.core.Mode; import com.google.ratel.*; import com.google.ratel.deps.io.*; import com.google.ratel.service.classdata.*; import com.google.ratel.service.log.LogService; import com.google.ratel.service.resolver.*; import com.google.ratel.util.*; import java.io.*; import java.lang.reflect.Method; import java.nio.charset.*; import javax.servlet.*; import javax.servlet.http.*; /** * */ public class DefaultErrorHandlerService implements ErrorHandlerService { private String errorTemplate; private ServletContext servletContext; @Override public void onInit(ServletContext servletContext) { this.servletContext = servletContext; errorTemplate = loadTemplate("error.htm"); if (errorTemplate == null) { errorTemplate = "<html><body><h1>ErrorReport</h1>@TOKEN</body></html>"; } } @Override public void onDestroy(ServletContext servletContext) { this.servletContext = null; } @Override public void handleRuntimeException(Throwable throwable, Mode mode, HttpServletRequest request, HttpServletResponse response, RatelConfig ratelConfig) { LogService logService = ratelConfig.getLogService(); if (mode == Mode.PROFILE || mode == Mode.PRODUCTION) { logService.error("Could not handle request", throwable); } else { logService.error("Could not handle request", throwable); ServiceResolver serviceResolver = ratelConfig.getServiceResolver(); Class serviceClass = null; Method method = null; try { RequestTargetData targetData = serviceResolver.resolveTarget(request); if (targetData != null) { ClassData serviceData = targetData.getClassData(); serviceClass = serviceData.getServiceClass(); MethodData methodData = targetData.getMethodData(); method = methodData.getMethod(); } } catch (RuntimeException ignore) { // If we cannot lookup the service and method we continue printing the information we have } ErrorReport errorReport = new ErrorReport(throwable, serviceClass, method, mode, request, ratelConfig); String content = errorTemplate.replace("@TOKEN", errorReport.toString()); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); RatelUtils.writeContent(response, content, Constants.HTML); // TODO //logService.error("Could not handle request", throwable); //throw new RuntimeException(throwable); } } @Override public void handleInitializationException(Throwable throwable, Mode mode, ServletContext servletContext, LogService logService) { if (mode == Mode.PROFILE || mode == Mode.PRODUCTION) { logService.error("Could not initialize Ratel", throwable); } else { logService.error("Could not initialize Ratel", throwable); throw new RuntimeException(throwable); } } protected String loadTemplate(String name) { InputStream is = servletContext.getResourceAsStream("/" + name); if (is == null) { String resourcePath = "/META-INF/resources/" + name; is = RatelUtils.getClasspathResourceAsStream(resourcePath); } String template = null; try { if (is != null) { template = IOUtils.toString(is, (Charset) null); } return template; } catch (IOException ioe) { throw new RuntimeException(ioe); } } }
sabob/ratel
ratel/src/com/google/ratel/service/error/DefaultErrorHandlerService.java
Java
apache-2.0
3,778
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the Smart Developer Hub Project: * http://www.smartdeveloperhub.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2015-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.smartdeveloperhub.curator:sdh-curator-connector:0.2.0 * Bundle : sdh-curator-connector-0.2.0.jar * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.smartdeveloperhub.curator.connector; import java.io.IOException; import org.smartdeveloperhub.curator.connector.io.ConversionContext; import org.smartdeveloperhub.curator.protocol.Agent; import org.smartdeveloperhub.curator.protocol.RequestMessage; import org.smartdeveloperhub.curator.protocol.ResponseMessage; final class ServerCuratorController extends CuratorController { ServerCuratorController(final CuratorConfiguration configuration, final String name, final ConversionContext context) { super(configuration,name,context); } private String routingKey(final RequestMessage request) { return curatorConfiguration().responseRoutingKey()+"."+request.submittedBy().agentId(); } void publishResponse(final RequestMessage request, final ResponseMessage response) throws IOException { publishMessage(response,routingKey(request)); } void publishResponse(final RequestMessage request, final String response) throws IOException { publishMessage(response,routingKey(request)); } /** * {@inheritDoc} */ @Override protected String routingKey(final CuratorConfiguration configuration, final Agent agent) throws ControllerException { return configuration.requestRoutingKey()+".*"; } }
SmartDeveloperHub/sdh-curator-connector
src/test/java/org/smartdeveloperhub/curator/connector/ServerCuratorController.java
Java
apache-2.0
2,636
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for UpdateIssue # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-contact-center-insights # [START contactcenterinsights_v1_generated_ContactCenterInsights_UpdateIssue_sync] from google.cloud import contact_center_insights_v1 def sample_update_issue(): # Create a client client = contact_center_insights_v1.ContactCenterInsightsClient() # Initialize request argument(s) request = contact_center_insights_v1.UpdateIssueRequest( ) # Make the request response = client.update_issue(request=request) # Handle the response print(response) # [END contactcenterinsights_v1_generated_ContactCenterInsights_UpdateIssue_sync]
googleapis/python-contact-center-insights
samples/generated_samples/contactcenterinsights_v1_generated_contact_center_insights_update_issue_sync.py
Python
apache-2.0
1,517
#ifndef RIAKPP_CONNECTION_POOL_HPP_ #define RIAKPP_CONNECTION_POOL_HPP_ #include <boost/asio/io_service.hpp> #include <boost/system/error_code.hpp> #include <cstddef> #include <cstdint> #include <vector> #include "async_queue.hpp" #include "check.hpp" #include "endpoint_vector.hpp" #include "transient.hpp" namespace riak { template <class Connection> class connection_pool { public: using connection_type = Connection; using error_type = typename connection_type::error_type; using handler_type = typename connection_type::handler_type; using request_type = typename connection_type::request_type; using response_type = typename connection_type::response_type; connection_pool(boost::asio::io_service& io_service, std::string hostname, uint16_t port, size_t max_connections, size_t highwatermark, uint64_t connection_timeout_ms); ~connection_pool(); void async_send(request_type request, handler_type handler); private: struct packaged_request { packaged_request(request_type request, handler_type handler) : request{std::move(request)}, handler{std::move(handler)} {} request_type request; handler_type handler; }; void resolve(size_t max_connections, std::string hostname, uint16_t port); void report_resolution_error(boost::system::error_code asio_error); void create_connections(size_t max_connections); void notify_connection_ready(connection_type& connection); void send_request(connection_type& connection, packaged_request packaged); boost::asio::io_service& io_service_; std::vector<std::unique_ptr<connection_type>> connections_; async_queue<packaged_request> request_queue_; uint64_t connection_timeout_ms_; endpoint_vector endpoints_; transient<connection_pool> transient_; }; template <class Connection> connection_pool<Connection>::connection_pool( boost::asio::io_service& io_service, std::string hostname, uint16_t port, size_t max_connections, size_t highwatermark, uint64_t connection_timeout_ms) : io_service_(io_service), request_queue_{highwatermark, max_connections}, connection_timeout_ms_{connection_timeout_ms}, transient_{*this} { RIAKPP_CHECK_GE(max_connections, 0) << "Number of connections must be non-zero."; connections_.reserve(max_connections); resolve(max_connections, std::move(hostname), port); } template <class Connection> connection_pool<Connection>::~connection_pool() { request_queue_.close(); transient_.reset(); connections_.clear(); } template <class Connection> void connection_pool<Connection>::async_send(request_type request, handler_type handler) { request_queue_.emplace(std::move(request), std::move(handler)); } template <class Connection> void connection_pool<Connection>::resolve(size_t max_connections, std::string hostname, uint16_t port) { using resolver = boost::asio::ip::tcp::resolver; auto* resolver_raw = new resolver{io_service_}; auto query = resolver::query{std::move(hostname), std::to_string(port)}; auto self_ref = transient_.ref(); resolver_raw->async_resolve( std::move(query), transient_.wrap([this, resolver_raw, max_connections]( boost::system::error_code ec, boost::asio::ip::tcp::resolver::iterator endpoint_begin) { std::unique_ptr<resolver> resolver_destroyer{resolver_raw}; if (ec) { report_resolution_error(ec); } else { endpoints_.assign(endpoint_begin, decltype(endpoint_begin) {}); create_connections(max_connections); } })); } template <class Connection> void connection_pool<Connection>::report_resolution_error( boost::system::error_code asio_error) { request_queue_.async_pop( transient_.wrap([this, asio_error](packaged_request packaged) { io_service_.post( std::bind(std::move(packaged.handler), error_type{asio_error.value(), std::generic_category()}, response_type{})); report_resolution_error(asio_error); })); } template <class Connection> void connection_pool<Connection>::create_connections(size_t max_connections) { RIAKPP_CHECK_GE(endpoints_.size(), 0); for (size_t i_conn = 0; i_conn < max_connections; ++i_conn) { connections_.emplace_back( new connection_type{io_service_, endpoints_.begin(), endpoints_.end(), connection_timeout_ms_}); } for (auto& connection_ptr : connections_) { notify_connection_ready(*connection_ptr); } } template <class Connection> void connection_pool<Connection>::notify_connection_ready( connection_type& connection) { request_queue_.async_pop( transient_.wrap([this, &connection](packaged_request packaged) { send_request(connection, std::move(packaged)); })); } template <class Connection> void connection_pool<Connection>::send_request(connection_type& connection, packaged_request packaged) { using namespace std::placeholders; auto call_and_notify = [this, &connection](handler_type& original_handler, error_type error, response_type& response) { notify_connection_ready(connection); io_service_.post( std::bind(std::move(original_handler), error, std::move(response))); }; auto wrapped = transient_.wrap(std::bind( std::move(call_and_notify), std::move(packaged.handler), _1, _2)); connection.async_send(std::move(packaged.request), std::move(wrapped)); } } // namespace riak #endif // #ifndef RIAKPP_CONNECTION_POOL_HPP_
reinfer/riakpp
src/connection_pool.hpp
C++
apache-2.0
5,709
#!/usr/bin/python # Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Reusable utility functions. This file is generic and can be reused by other models without modification. """ from apache_beam.transforms import core import tensorflow as tf def int64_feature(value): """Create a multi-valued int64 feature from a single value.""" return tf.train.Feature(int64_list=tf.train.Int64List(value=[value])) def bytes_feature(value): """Create a multi-valued bytes feature from a single value.""" return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value])) def float_feature(value): """Create a multi-valued float feature from a single value.""" return tf.train.Feature(float_list=tf.train.FloatList(value=[value])) class DefaultToKeyDict(dict): """Custom dictionary to use the key as the value for any missing entries.""" def __missing__(self, key): return str(key) class TableToDictCombineFn(core.CombineFn): """Beam transform to create a python dictionary from a BigQuery table. This CombineFn reshapes rows from a BigQuery table using the specified key column to a Python dictionary. """ def __init__(self, key_column): self.key_column = key_column def create_accumulator(self): return dict() def add_input(self, accumulator, element): accumulator[element[self.key_column]] = element return accumulator def add_inputs(self, accumulator, elements): for element in elements: self.add_input(accumulator, element) return accumulator def merge_accumulators(self, accumulators): final_accumulator = {} for accumulator in accumulators: final_accumulator.update(accumulator) return final_accumulator def extract_output(self, accumulator): return accumulator
googlegenomics/cloudml-examples
trainer/util.py
Python
apache-2.0
2,296
package org.ovirt.engine.ui.uicommonweb.models.storage; import java.util.Collections; import org.ovirt.engine.core.compat.*; import org.ovirt.engine.ui.uicompat.*; import org.ovirt.engine.core.common.businessentities.*; import org.ovirt.engine.core.common.vdscommands.*; import org.ovirt.engine.core.common.queries.*; import org.ovirt.engine.core.common.action.*; import org.ovirt.engine.ui.frontend.*; import org.ovirt.engine.ui.uicommonweb.*; import org.ovirt.engine.ui.uicommonweb.models.*; import org.ovirt.engine.core.common.*; import org.ovirt.engine.ui.uicompat.*; import org.ovirt.engine.core.common.queries.*; import org.ovirt.engine.core.common.businessentities.*; import org.ovirt.engine.ui.uicommonweb.*; import org.ovirt.engine.ui.uicommonweb.models.*; @SuppressWarnings("unused") public class IscsiImportStorageModel extends ImportSanStorageModel { @Override public StorageType getType() { return StorageType.ISCSI; } @Override protected String getListName() { return "IscsiImportStorageModel"; } }
anjalshireesh/gluster-ovirt-poc
frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/storage/IscsiImportStorageModel.java
Java
apache-2.0
1,042
#Copyright 2008 Orbitz WorldWide # #Licensed under the Apache License, Version 2.0 (the "License"); #you may not use this file except in compliance with the License. #You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. from datetime import date, datetime, timedelta from functools import partial from itertools import izip, imap import math import re import random import time from graphite.logger import log from graphite.render.attime import parseTimeOffset from graphite.events import models #XXX format_units() should go somewhere else from os import environ if environ.get('READTHEDOCS'): format_units = lambda *args, **kwargs: (0,'') else: from graphite.render.glyph import format_units from graphite.render.datalib import TimeSeries from graphite.util import timestamp NAN = float('NaN') INF = float('inf') DAY = 86400 HOUR = 3600 MINUTE = 60 #Utility functions def safeSum(values): safeValues = [v for v in values if v is not None] if safeValues: return sum(safeValues) def safeDiff(values): safeValues = [v for v in values if v is not None] if safeValues: values = map(lambda x: x*-1, safeValues[1:]) values.insert(0, safeValues[0]) return sum(values) def safeLen(values): return len([v for v in values if v is not None]) def safeDiv(a, b): if a is None: return None if b in (0,None): return None return float(a) / float(b) def safeMul(*factors): if None in factors: return None factors = [float(x) for x in factors] product = reduce(lambda x,y: x*y, factors) return product def safeSubtract(a,b): if a is None or b is None: return None return float(a) - float(b) def safeAvg(a): return safeDiv(safeSum(a),safeLen(a)) def safeStdDev(a): sm = safeSum(a) ln = safeLen(a) avg = safeDiv(sm,ln) sum = 0 safeValues = [v for v in a if v is not None] for val in safeValues: sum = sum + (val - avg) * (val - avg) return math.sqrt(sum/ln) def safeLast(values): for v in reversed(values): if v is not None: return v def safeMin(values): safeValues = [v for v in values if v is not None] if safeValues: return min(safeValues) def safeMax(values): safeValues = [v for v in values if v is not None] if safeValues: return max(safeValues) def safeMap(function, values): safeValues = [v for v in values if v is not None] if safeValues: return [function(x) for x in values] def safeAbs(value): if value is None: return None return abs(value) # Greatest common divisor def gcd(a, b): if b == 0: return a return gcd(b, a%b) # Least common multiple def lcm(a, b): if a == b: return a if a < b: (a, b) = (b, a) #ensure a > b return a / gcd(a,b) * b def normalize(seriesLists): seriesList = reduce(lambda L1,L2: L1+L2,seriesLists) step = reduce(lcm,[s.step for s in seriesList]) for s in seriesList: s.consolidate( step / s.step ) start = min([s.start for s in seriesList]) end = max([s.end for s in seriesList]) end -= (end - start) % step return (seriesList,start,end,step) def formatPathExpressions(seriesList): # remove duplicates pathExpressions = [] [pathExpressions.append(s.pathExpression) for s in seriesList if not pathExpressions.count(s.pathExpression)] return ','.join(pathExpressions) # Series Functions #NOTE: Some of the functions below use izip, which may be problematic. #izip stops when it hits the end of the shortest series #in practice this *shouldn't* matter because all series will cover #the same interval, despite having possibly different steps... def sumSeries(requestContext, *seriesLists): """ Short form: sum() This will add metrics together and return the sum at each datapoint. (See integral for a sum over time) Example: .. code-block:: none &target=sum(company.server.application*.requestsHandled) This would show the sum of all requests handled per minute (provided requestsHandled are collected once a minute). If metrics with different retention rates are combined, the coarsest metric is graphed, and the sum of the other metrics is averaged for the metrics with finer retention rates. """ try: (seriesList,start,end,step) = normalize(seriesLists) except: return [] name = "sumSeries(%s)" % formatPathExpressions(seriesList) values = ( safeSum(row) for row in izip(*seriesList) ) series = TimeSeries(name,start,end,step,values) series.pathExpression = name return [series] def sumSeriesWithWildcards(requestContext, seriesList, *position): #XXX """ Call sumSeries after inserting wildcards at the given position(s). Example: .. code-block:: none &target=sumSeriesWithWildcards(host.cpu-[0-7].cpu-{user,system}.value, 1) This would be the equivalent of ``target=sumSeries(host.*.cpu-user.value)&target=sumSeries(host.*.cpu-system.value)`` """ if isinstance(position, int): positions = [position] else: positions = position newSeries = {} newNames = list() for series in seriesList: newname = '.'.join(map(lambda x: x[1], filter(lambda i: i[0] not in positions, enumerate(series.name.split('.'))))) if newname in newSeries.keys(): newSeries[newname] = sumSeries(requestContext, (series, newSeries[newname]))[0] else: newSeries[newname] = series newNames.append(newname) newSeries[newname].name = newname return [newSeries[name] for name in newNames] def averageSeriesWithWildcards(requestContext, seriesList, *position): #XXX """ Call averageSeries after inserting wildcards at the given position(s). Example: .. code-block:: none &target=averageSeriesWithWildcards(host.cpu-[0-7].cpu-{user,system}.value, 1) This would be the equivalent of ``target=averageSeries(host.*.cpu-user.value)&target=averageSeries(host.*.cpu-system.value)`` """ if isinstance(position, int): positions = [position] else: positions = position result = [] matchedList = {} for series in seriesList: newname = '.'.join(map(lambda x: x[1], filter(lambda i: i[0] not in positions, enumerate(series.name.split('.'))))) if newname not in matchedList: matchedList[newname] = [] matchedList[newname].append(series) for name in matchedList.keys(): result.append( averageSeries(requestContext, (matchedList[name]))[0] ) result[-1].name = name return result def diffSeries(requestContext, *seriesLists): """ Can take two or more metrics, or a single metric and a constant. Subtracts parameters 2 through n from parameter 1. Example: .. code-block:: none &target=diffSeries(service.connections.total,service.connections.failed) &target=diffSeries(service.connections.total,5) """ (seriesList,start,end,step) = normalize(seriesLists) name = "diffSeries(%s)" % formatPathExpressions(seriesList) values = ( safeDiff(row) for row in izip(*seriesList) ) series = TimeSeries(name,start,end,step,values) series.pathExpression = name return [series] def averageSeries(requestContext, *seriesLists): """ Short Alias: avg() Takes one metric or a wildcard seriesList. Draws the average value of all metrics passed at each time. Example: .. code-block:: none &target=averageSeries(company.server.*.threads.busy) """ (seriesList,start,end,step) = normalize(seriesLists) name = "averageSeries(%s)" % formatPathExpressions(seriesList) values = ( safeDiv(safeSum(row),safeLen(row)) for row in izip(*seriesList) ) series = TimeSeries(name,start,end,step,values) series.pathExpression = name return [series] def stddevSeries(requestContext, *seriesLists): """ Takes one metric or a wildcard seriesList. Draws the standard deviation of all metrics passed at each time. Example: .. code-block:: none &target=stddevSeries(company.server.*.threads.busy) """ (seriesList,start,end,step) = normalize(seriesLists) name = "stddevSeries(%s)" % formatPathExpressions(seriesList) values = ( safeStdDev(row) for row in izip(*seriesList) ) series = TimeSeries(name,start,end,step,values) series.pathExpression = name return [series] def minSeries(requestContext, *seriesLists): """ Takes one metric or a wildcard seriesList. For each datapoint from each metric passed in, pick the minimum value and graph it. Example: .. code-block:: none &target=minSeries(Server*.connections.total) """ (seriesList, start, end, step) = normalize(seriesLists) name = "minSeries(%s)" % formatPathExpressions(seriesList) values = ( safeMin(row) for row in izip(*seriesList) ) series = TimeSeries(name, start, end, step, values) series.pathExpression = name return [series] def maxSeries(requestContext, *seriesLists): """ Takes one metric or a wildcard seriesList. For each datapoint from each metric passed in, pick the maximum value and graph it. Example: .. code-block:: none &target=maxSeries(Server*.connections.total) """ (seriesList, start, end, step) = normalize(seriesLists) name = "maxSeries(%s)" % formatPathExpressions(seriesList) values = ( safeMax(row) for row in izip(*seriesList) ) series = TimeSeries(name, start, end, step, values) series.pathExpression = name return [series] def rangeOfSeries(requestContext, *seriesLists): """ Takes a wildcard seriesList. Distills down a set of inputs into the range of the series Example: .. code-block:: none &target=rangeOfSeries(Server*.connections.total) """ (seriesList,start,end,step) = normalize(seriesLists) name = "rangeOfSeries(%s)" % formatPathExpressions(seriesList) values = ( safeSubtract(max(row), min(row)) for row in izip(*seriesList) ) series = TimeSeries(name,start,end,step,values) series.pathExpression = name return [series] def percentileOfSeries(requestContext, seriesList, n, interpolate=False): """ percentileOfSeries returns a single series which is composed of the n-percentile values taken across a wildcard series at each point. Unless `interpolate` is set to True, percentile values are actual values contained in one of the supplied series. """ if n <= 0: raise ValueError('The requested percent is required to be greater than 0') name = 'percentilesOfSeries(%s,%g)' % (seriesList[0].pathExpression, n) (start, end, step) = normalize([seriesList])[1:] values = [ _getPercentile(row, n, interpolate) for row in izip(*seriesList) ] resultSeries = TimeSeries(name, start, end, step, values) resultSeries.pathExpression = name return [resultSeries] def keepLastValue(requestContext, seriesList, limit = INF): """ Takes one metric or a wildcard seriesList, and optionally a limit to the number of 'None' values to skip over. Continues the line with the last received value when gaps ('None' values) appear in your data, rather than breaking your line. Example: .. code-block:: none &target=keepLastValue(Server01.connections.handled) &target=keepLastValue(Server01.connections.handled, 10) """ for series in seriesList: series.name = "keepLastValue(%s)" % (series.name) series.pathExpression = series.name consecutiveNones = 0 for i,value in enumerate(series): series[i] = value # No 'keeping' can be done on the first value because we have no idea # what came before it. if i == 0: continue if value is None: consecutiveNones += 1 else: if 0 < consecutiveNones <= limit: # If a non-None value is seen before the limit of Nones is hit, # backfill all the missing datapoints with the last known value. for index in xrange(i - consecutiveNones, i): series[index] = series[i - consecutiveNones - 1] consecutiveNones = 0 # If the series ends with some None values, try to backfill a bit to cover it. if 0 < consecutiveNones < limit: for index in xrange(len(series) - consecutiveNones, len(series)): series[index] = series[len(series) - consecutiveNones - 1] return seriesList def asPercent(requestContext, seriesList, total=None): """ Calculates a percentage of the total of a wildcard series. If `total` is specified, each series will be calculated as a percentage of that total. If `total` is not specified, the sum of all points in the wildcard series will be used instead. The `total` parameter may be a single series or a numeric value. Example: .. code-block:: none &target=asPercent(Server01.connections.{failed,succeeded}, Server01.connections.attempted) &target=asPercent(apache01.threads.busy,1500) &target=asPercent(Server01.cpu.*.jiffies) """ normalize([seriesList]) if total is None: totalValues = [ safeSum(row) for row in izip(*seriesList) ] totalText = None # series.pathExpression elif isinstance(total, list): if len(total) != 1: raise ValueError("asPercent second argument must reference exactly 1 series") normalize([seriesList, total]) totalValues = total[0] totalText = totalValues.name else: totalValues = [total] * len(seriesList[0]) totalText = str(total) resultList = [] for series in seriesList: resultValues = [ safeMul(safeDiv(val, totalVal), 100.0) for val,totalVal in izip(series,totalValues) ] name = "asPercent(%s, %s)" % (series.name, totalText or series.pathExpression) resultSeries = TimeSeries(name,series.start,series.end,series.step,resultValues) resultSeries.pathExpression = name resultList.append(resultSeries) return resultList def divideSeries(requestContext, dividendSeriesList, divisorSeriesList): """ Takes a dividend metric and a divisor metric and draws the division result. A constant may *not* be passed. To divide by a constant, use the scale() function (which is essentially a multiplication operation) and use the inverse of the dividend. (Division by 8 = multiplication by 1/8 or 0.125) Example: .. code-block:: none &target=divideSeries(Series.dividends,Series.divisors) """ if len(divisorSeriesList) != 1: raise ValueError("divideSeries second argument must reference exactly 1 series") divisorSeries = divisorSeriesList[0] results = [] for dividendSeries in dividendSeriesList: name = "divideSeries(%s,%s)" % (dividendSeries.name, divisorSeries.name) bothSeries = (dividendSeries, divisorSeries) step = reduce(lcm,[s.step for s in bothSeries]) for s in bothSeries: s.consolidate( step / s.step ) start = min([s.start for s in bothSeries]) end = max([s.end for s in bothSeries]) end -= (end - start) % step values = ( safeDiv(v1,v2) for v1,v2 in izip(*bothSeries) ) quotientSeries = TimeSeries(name, start, end, step, values) quotientSeries.pathExpression = name results.append(quotientSeries) return results def multiplySeries(requestContext, *seriesLists): """ Takes two or more series and multiplies their points. A constant may not be used. To multiply by a constant, use the scale() function. Example: .. code-block:: none &target=multiplySeries(Series.dividends,Series.divisors) """ (seriesList,start,end,step) = normalize(seriesLists) if len(seriesList) == 1: return seriesList name = "multiplySeries(%s)" % ','.join([s.name for s in seriesList]) product = imap(lambda x: safeMul(*x), izip(*seriesList)) resultSeries = TimeSeries(name, start, end, step, product) resultSeries.pathExpression = name return [ resultSeries ] def weightedAverage(requestContext, seriesListAvg, seriesListWeight, node): """ Takes a series of average values and a series of weights and produces a weighted average for all values. The corresponding values should share a node as defined by the node parameter, 0-indexed. Example: .. code-block:: none &target=weightedAverage(*.transactions.mean,*.transactions.count,0) """ sortedSeries={} for seriesAvg, seriesWeight in izip(seriesListAvg , seriesListWeight): key = seriesAvg.name.split(".")[node] if key not in sortedSeries: sortedSeries[key]={} sortedSeries[key]['avg']=seriesAvg key = seriesWeight.name.split(".")[node] if key not in sortedSeries: sortedSeries[key]={} sortedSeries[key]['weight']=seriesWeight productList = [] for key in sortedSeries.keys(): if 'weight' not in sortedSeries[key]: continue if 'avg' not in sortedSeries[key]: continue seriesWeight = sortedSeries[key]['weight'] seriesAvg = sortedSeries[key]['avg'] productValues = [ safeMul(val1, val2) for val1,val2 in izip(seriesAvg,seriesWeight) ] name='product(%s,%s)' % (seriesWeight.name, seriesAvg.name) productSeries = TimeSeries(name,seriesAvg.start,seriesAvg.end,seriesAvg.step,productValues) productSeries.pathExpression=name productList.append(productSeries) sumProducts=sumSeries(requestContext, productList)[0] sumWeights=sumSeries(requestContext, seriesListWeight)[0] resultValues = [ safeDiv(val1, val2) for val1,val2 in izip(sumProducts,sumWeights) ] name = "weightedAverage(%s, %s)" % (','.join(set(s.pathExpression for s in seriesListAvg)) ,','.join(set(s.pathExpression for s in seriesListWeight))) resultSeries = TimeSeries(name,sumProducts.start,sumProducts.end,sumProducts.step,resultValues) resultSeries.pathExpression = name return resultSeries def movingMedian(requestContext, seriesList, windowSize): """ Graphs the moving median of a metric (or metrics) over a fixed number of past points, or a time interval. Takes one metric or a wildcard seriesList followed by a number N of datapoints or a quoted string with a length of time like '1hour' or '5min' (See ``from / until`` in the render\_api_ for examples of time formats). Graphs the median of the preceeding datapoints for each point on the graph. All previous datapoints are set to None at the beginning of the graph. Example: .. code-block:: none &target=movingMedian(Server.instance01.threads.busy,10) &target=movingMedian(Server.instance*.threads.idle,'5min') """ windowInterval = None if isinstance(windowSize, basestring): delta = parseTimeOffset(windowSize) windowInterval = abs(delta.seconds + (delta.days * 86400)) if windowInterval: bootstrapSeconds = windowInterval else: bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize) bootstrapList = _fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds) result = [] for bootstrap, series in zip(bootstrapList, seriesList): if windowInterval: windowPoints = windowInterval / series.step else: windowPoints = int(windowSize) if isinstance(windowSize, basestring): newName = 'movingMedian(%s,"%s")' % (series.name, windowSize) else: newName = "movingMedian(%s,%d)" % (series.name, windowPoints) newSeries = TimeSeries(newName, series.start, series.end, series.step, []) newSeries.pathExpression = newName offset = len(bootstrap) - len(series) for i in range(len(series)): window = bootstrap[i + offset - windowPoints:i + offset] nonNull = [v for v in window if v is not None] if nonNull: m_index = len(nonNull) / 2 newSeries.append(sorted(nonNull)[m_index]) else: newSeries.append(None) result.append(newSeries) return result def scale(requestContext, seriesList, factor): """ Takes one metric or a wildcard seriesList followed by a constant, and multiplies the datapoint by the constant provided at each point. Example: .. code-block:: none &target=scale(Server.instance01.threads.busy,10) &target=scale(Server.instance*.threads.busy,10) """ for series in seriesList: series.name = "scale(%s,%g)" % (series.name,float(factor)) series.pathExpression = series.name for i,value in enumerate(series): series[i] = safeMul(value,factor) return seriesList def invert(requestContext, seriesList): """ Takes one metric or a wildcard seriesList, and inverts each datapoint (i.e. 1/x). Example: .. code-block:: none &target=invert(Server.instance01.threads.busy) """ for series in seriesList: series.name = "invert(%s)" % (series.name) for i,value in enumerate(series): series[i] = safeDiv(1,value) return seriesList def scaleToSeconds(requestContext, seriesList, seconds): """ Takes one metric or a wildcard seriesList and returns "value per seconds" where seconds is a last argument to this functions. Useful in conjunction with derivative or integral function if you want to normalize its result to a known resolution for arbitrary retentions """ for series in seriesList: series.name = "scaleToSeconds(%s,%d)" % (series.name,seconds) series.pathExpression = series.name for i,value in enumerate(series): factor = seconds * 1.0 / series.step series[i] = safeMul(value,factor) return seriesList def absolute(requestContext, seriesList): """ Takes one metric or a wildcard seriesList and applies the mathematical abs function to each datapoint transforming it to its absolute value. Example: .. code-block:: none &target=absolute(Server.instance01.threads.busy) &target=absolute(Server.instance*.threads.busy) """ for series in seriesList: series.name = "absolute(%s)" % (series.name) series.pathExpression = series.name for i,value in enumerate(series): series[i] = safeAbs(value) return seriesList def offset(requestContext, seriesList, factor): """ Takes one metric, a wildcard seriesList followed by a constant or single time serie, and adds the value to each datapoint. Example: .. code-block:: none &target=offset(Server.instance01.threads.busy,10) &target=scale(offset(Server.instance01.threads.*.last_change, scale(Server.instance01.uptime, -1)),-1) """ for series in seriesList: if isinstance(factor, list): if len(factor) != 1: raise ValueError("offset second argument must reference exactly 1 series") factor_serie = factor[0] series.name = "offset(%s,%s)" % (series.name,factor_serie.name) series.pathExpression = series.name for i,value in enumerate(series): if value is not None: series[i] = value + factor_serie[i] else: series.name = "offset(%s,%g)" % (series.name,float(factor)) series.pathExpression = series.name for i,value in enumerate(series): if value is not None: series[i] = value + factor return seriesList def offsetToZero(requestContext, seriesList): """ Offsets a metric or wildcard seriesList by subtracting the minimum value in the series from each datapoint. Useful to compare different series where the values in each series may be higher or lower on average but you're only interested in the relative difference. An example use case is for comparing different round trip time results. When measuring RTT (like pinging a server), different devices may come back with consistently different results due to network latency which will be different depending on how many network hops between the probe and the device. To compare different devices in the same graph, the network latency to each has to be factored out of the results. This is a shortcut that takes the fastest response (lowest number in the series) and sets that to zero and then offsets all of the other datapoints in that series by that amount. This makes the assumption that the lowest response is the fastest the device can respond, of course the more datapoints that are in the series the more accurate this assumption is. Example: .. code-block:: none &target=offsetToZero(Server.instance01.responseTime) &target=offsetToZero(Server.instance*.responseTime) """ for series in seriesList: series.name = "offsetToZero(%s)" % (series.name) minimum = safeMin(series) for i,value in enumerate(series): if value is not None: series[i] = value - minimum return seriesList def movingAverage(requestContext, seriesList, windowSize): """ Graphs the moving average of a metric (or metrics) over a fixed number of past points, or a time interval. Takes one metric or a wildcard seriesList followed by a number N of datapoints or a quoted string with a length of time like '1hour' or '5min' (See ``from / until`` in the render\_api_ for examples of time formats). Graphs the average of the preceeding datapoints for each point on the graph. All previous datapoints are set to None at the beginning of the graph. Example: .. code-block:: none &target=movingAverage(Server.instance01.threads.busy,10) &target=movingAverage(Server.instance*.threads.idle,'5min') """ windowInterval = None if isinstance(windowSize, basestring): delta = parseTimeOffset(windowSize) windowInterval = abs(delta.seconds + (delta.days * 86400)) if windowInterval: bootstrapSeconds = windowInterval else: bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize) bootstrapList = _fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds) result = [] for bootstrap, series in zip(bootstrapList, seriesList): if windowInterval: windowPoints = windowInterval / series.step else: windowPoints = int(windowSize) if isinstance(windowSize, basestring): newName = 'movingAverage(%s,"%s")' % (series.name, windowSize) else: newName = "movingAverage(%s,%s)" % (series.name, windowSize) newSeries = TimeSeries(newName, series.start, series.end, series.step, []) newSeries.pathExpression = newName offset = len(bootstrap) - len(series) for i in range(len(series)): window = bootstrap[i + offset - windowPoints:i + offset] newSeries.append(safeAvg(window)) result.append(newSeries) return result def cumulative(requestContext, seriesList, consolidationFunc='sum'): """ Takes one metric or a wildcard seriesList, and an optional function. Valid functions are 'sum', 'average', 'min', and 'max' Sets the consolidation function to 'sum' for the given metric seriesList. Alias for :func:`consolidateBy(series, 'sum') <graphite.render.functions.consolidateBy>` .. code-block:: none &target=cumulative(Sales.widgets.largeBlue) """ return consolidateBy(requestContext, seriesList, 'sum') def consolidateBy(requestContext, seriesList, consolidationFunc): """ Takes one metric or a wildcard seriesList and a consolidation function name. Valid function names are 'sum', 'average', 'min', and 'max' When a graph is drawn where width of the graph size in pixels is smaller than the number of datapoints to be graphed, Graphite consolidates the values to to prevent line overlap. The consolidateBy() function changes the consolidation function from the default of 'average' to one of 'sum', 'max', or 'min'. This is especially useful in sales graphs, where fractional values make no sense and a 'sum' of consolidated values is appropriate. .. code-block:: none &target=consolidateBy(Sales.widgets.largeBlue, 'sum') &target=consolidateBy(Servers.web01.sda1.free_space, 'max') """ for series in seriesList: # datalib will throw an exception, so it's not necessary to validate here series.consolidationFunc = consolidationFunc series.name = 'consolidateBy(%s,"%s")' % (series.name, series.consolidationFunc) series.pathExpression = series.name return seriesList def derivative(requestContext, seriesList): """ This is the opposite of the integral function. This is useful for taking a running total metric and calculating the delta between subsequent data points. This function does not normalize for periods of time, as a true derivative would. Instead see the perSecond() function to calculate a rate of change over time. Example: .. code-block:: none &target=derivative(company.server.application01.ifconfig.TXPackets) Each time you run ifconfig, the RX and TXPackets are higher (assuming there is network traffic.) By applying the derivative function, you can get an idea of the packets per minute sent or received, even though you're only recording the total. """ results = [] for series in seriesList: newValues = [] prev = None for val in series: if None in (prev,val): newValues.append(None) prev = val continue newValues.append(val - prev) prev = val newName = "derivative(%s)" % series.name newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def perSecond(requestContext, seriesList, maxValue=None): """ Derivative adjusted for the series time interval This is useful for taking a running total metric and showing how many requests per second were handled. Example: .. code-block:: none &target=perSecond(company.server.application01.ifconfig.TXPackets) Each time you run ifconfig, the RX and TXPackets are higher (assuming there is network traffic.) By applying the derivative function, you can get an idea of the packets per minute sent or received, even though you're only recording the total. """ results = [] for series in seriesList: newValues = [] prev = None for val in series: step = series.step if None in (prev,val): newValues.append(None) prev = val continue diff = val - prev if diff >= 0: newValues.append(diff / step) elif maxValue is not None and maxValue >= val: newValues.append( ((maxValue - prev) + val + 1) / step ) else: newValues.append(None) prev = val newName = "perSecond(%s)" % series.name newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def integral(requestContext, seriesList): """ This will show the sum over time, sort of like a continuous addition function. Useful for finding totals or trends in metrics that are collected per minute. Example: .. code-block:: none &target=integral(company.sales.perMinute) This would start at zero on the left side of the graph, adding the sales each minute, and show the total sales for the time period selected at the right side, (time now, or the time specified by '&until='). """ results = [] for series in seriesList: newValues = [] current = 0.0 for val in series: if val is None: newValues.append(None) else: current += val newValues.append(current) newName = "integral(%s)" % series.name newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def nonNegativeDerivative(requestContext, seriesList, maxValue=None): """ Same as the derivative function above, but ignores datapoints that trend down. Useful for counters that increase for a long time, then wrap or reset. (Such as if a network interface is destroyed and recreated by unloading and re-loading a kernel module, common with USB / WiFi cards. Example: .. code-block:: none &target=nonNegativederivative(company.server.application01.ifconfig.TXPackets) """ results = [] for series in seriesList: newValues = [] prev = None for val in series: if None in (prev, val): newValues.append(None) prev = val continue diff = val - prev if diff >= 0: newValues.append(diff) elif maxValue is not None and maxValue >= val: newValues.append( (maxValue - prev) + val + 1 ) else: newValues.append(None) prev = val newName = "nonNegativeDerivative(%s)" % series.name newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def stacked(requestContext,seriesLists,stackName='__DEFAULT__'): """ Takes one metric or a wildcard seriesList and change them so they are stacked. This is a way of stacking just a couple of metrics without having to use the stacked area mode (that stacks everything). By means of this a mixed stacked and non stacked graph can be made It can also take an optional argument with a name of the stack, in case there is more than one, e.g. for input and output metrics. Example: .. code-block:: none &target=stacked(company.server.application01.ifconfig.TXPackets, 'tx') """ if 'totalStack' in requestContext: totalStack = requestContext['totalStack'].get(stackName, []) else: requestContext['totalStack'] = {} totalStack = []; results = [] for series in seriesLists: newValues = [] for i in range(len(series)): if len(totalStack) <= i: totalStack.append(0) if series[i] is not None: totalStack[i] += series[i] newValues.append(totalStack[i]) else: newValues.append(None) # Work-around for the case when legend is set if stackName=='__DEFAULT__': newName = "stacked(%s)" % series.name else: newName = series.name newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues) newSeries.options['stacked'] = True newSeries.pathExpression = newName results.append(newSeries) requestContext['totalStack'][stackName] = totalStack return results def areaBetween(requestContext, seriesList): """ Draws the area in between the two series in seriesList """ assert len(seriesList) == 2, "areaBetween series argument must reference *exactly* 2 series" lower = seriesList[0] upper = seriesList[1] lower.options['stacked'] = True lower.options['invisible'] = True upper.options['stacked'] = True lower.name = upper.name = "areaBetween(%s)" % upper.pathExpression return seriesList def aliasSub(requestContext, seriesList, search, replace): """ Runs series names through a regex search/replace. .. code-block:: none &target=aliasSub(ip.*TCP*,"^.*TCP(\d+)","\\1") """ try: seriesList.name = re.sub(search, replace, seriesList.name) except AttributeError: for series in seriesList: series.name = re.sub(search, replace, series.name) return seriesList def alias(requestContext, seriesList, newName): """ Takes one metric or a wildcard seriesList and a string in quotes. Prints the string instead of the metric name in the legend. .. code-block:: none &target=alias(Sales.widgets.largeBlue,"Large Blue Widgets") """ try: seriesList.name = newName except AttributeError: for series in seriesList: series.name = newName return seriesList def cactiStyle(requestContext, seriesList, system=None): """ Takes a series list and modifies the aliases to provide column aligned output with Current, Max, and Min values in the style of cacti. Optonally takes a "system" value to apply unit formatting in the same style as the Y-axis. NOTE: column alignment only works with monospace fonts such as terminus. .. code-block:: none &target=cactiStyle(ganglia.*.net.bytes_out,"si") """ if 0 == len(seriesList): return seriesList if system: fmt = lambda x:"%.2f%s" % format_units(x,system=system) else: fmt = lambda x:"%.2f"%x nameLen = max([0] + [len(getattr(series,"name")) for series in seriesList]) lastLen = max([0] + [len(fmt(int(safeLast(series) or 3))) for series in seriesList]) + 3 maxLen = max([0] + [len(fmt(int(safeMax(series) or 3))) for series in seriesList]) + 3 minLen = max([0] + [len(fmt(int(safeMin(series) or 3))) for series in seriesList]) + 3 for series in seriesList: name = series.name last = safeLast(series) maximum = safeMax(series) minimum = safeMin(series) if last is None: last = NAN else: last = fmt(float(last)) if maximum is None: maximum = NAN else: maximum = fmt(float(maximum)) if minimum is None: minimum = NAN else: minimum = fmt(float(minimum)) series.name = "%*s Current:%*s Max:%*s Min:%*s " % \ (-nameLen, series.name, -lastLen, last, -maxLen, maximum, -minLen, minimum) return seriesList def aliasByNode(requestContext, seriesList, *nodes): """ Takes a seriesList and applies an alias derived from one or more "node" portion/s of the target name. Node indices are 0 indexed. .. code-block:: none &target=aliasByNode(ganglia.*.cpu.load5,1) """ if isinstance(nodes, int): nodes=[nodes] for series in seriesList: metric_pieces = re.search('(?:.*\()?(?P<name>[-\w*\.]+)(?:,|\)?.*)?',series.name).groups()[0].split('.') series.name = '.'.join(metric_pieces[n] for n in nodes) return seriesList def aliasByMetric(requestContext, seriesList): """ Takes a seriesList and applies an alias derived from the base metric name. .. code-block:: none &target=aliasByMetric(carbon.agents.graphite.creates) """ for series in seriesList: series.name = series.name.split('.')[-1] return seriesList def legendValue(requestContext, seriesList, *valueTypes): """ Takes one metric or a wildcard seriesList and a string in quotes. Appends a value to the metric name in the legend. Currently one or several of: `last`, `avg`, `total`, `min`, `max`. The last argument can be `si` (default) or `binary`, in that case values will be formatted in the corresponding system. .. code-block:: none &target=legendValue(Sales.widgets.largeBlue, 'avg', 'max', 'si') """ def last(s): "Work-around for the missing last point" v = s[-1] if v is None: return s[-2] return v valueFuncs = { 'avg': lambda s: safeDiv(safeSum(s), safeLen(s)), 'total': safeSum, 'min': safeMin, 'max': safeMax, 'last': last } system = None if valueTypes[-1] in ('si', 'binary'): system = valueTypes[-1] valueTypes = valueTypes[:-1] for valueType in valueTypes: valueFunc = valueFuncs.get(valueType, lambda s: '(?)') if system is None: for series in seriesList: series.name += " (%s: %s)" % (valueType, valueFunc(series)) else: for series in seriesList: value = valueFunc(series) formatted = None if value is not None: formatted = "%.2f%s" % format_units(abs(value), system=system) series.name = "%-20s%-5s%-10s" % (series.name, valueType, formatted) return seriesList def alpha(requestContext, seriesList, alpha): """ Assigns the given alpha transparency setting to the series. Takes a float value between 0 and 1. """ for series in seriesList: series.options['alpha'] = alpha return seriesList def color(requestContext, seriesList, theColor): """ Assigns the given color to the seriesList Example: .. code-block:: none &target=color(collectd.hostname.cpu.0.user, 'green') &target=color(collectd.hostname.cpu.0.system, 'ff0000') &target=color(collectd.hostname.cpu.0.idle, 'gray') &target=color(collectd.hostname.cpu.0.idle, '6464ffaa') """ for series in seriesList: series.color = theColor return seriesList def substr(requestContext, seriesList, start=0, stop=0): """ Takes one metric or a wildcard seriesList followed by 1 or 2 integers. Assume that the metric name is a list or array, with each element separated by dots. Prints n - length elements of the array (if only one integer n is passed) or n - m elements of the array (if two integers n and m are passed). The list starts with element 0 and ends with element (length - 1). Example: .. code-block:: none &target=substr(carbon.agents.hostname.avgUpdateTime,2,4) The label would be printed as "hostname.avgUpdateTime". """ for series in seriesList: left = series.name.rfind('(') + 1 right = series.name.find(')') if right < 0: right = len(series.name)+1 cleanName = series.name[left:right:] if int(stop) == 0: series.name = '.'.join(cleanName.split('.')[int(start)::]) else: series.name = '.'.join(cleanName.split('.')[int(start):int(stop):]) # substr(func(a.b,'c'),1) becomes b instead of b,'c' series.name = re.sub(',.*$', '', series.name) return seriesList def logarithm(requestContext, seriesList, base=10): """ Takes one metric or a wildcard seriesList, a base, and draws the y-axis in logarithmic format. If base is omitted, the function defaults to base 10. Example: .. code-block:: none &target=log(carbon.agents.hostname.avgUpdateTime,2) """ results = [] for series in seriesList: newValues = [] for val in series: if val is None: newValues.append(None) elif val <= 0: newValues.append(None) else: newValues.append(math.log(val, base)) newName = "log(%s, %s)" % (series.name, base) newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def maximumAbove(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by a constant n. Draws only the metrics with a maximum value above n. Example: .. code-block:: none &target=maximumAbove(system.interface.eth*.packetsSent,1000) This would only display interfaces which sent more than 1000 packets/min. """ results = [] for series in seriesList: if max(series) > n: results.append(series) return results def minimumAbove(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by a constant n. Draws only the metrics with a minimum value above n. Example: .. code-block:: none &target=minimumAbove(system.interface.eth*.packetsSent,1000) This would only display interfaces which sent more than 1000 packets/min. """ results = [] for series in seriesList: if min(series) > n: results.append(series) return results def maximumBelow(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by a constant n. Draws only the metrics with a maximum value below n. Example: .. code-block:: none &target=maximumBelow(system.interface.eth*.packetsSent,1000) This would only display interfaces which sent less than 1000 packets/min. """ result = [] for series in seriesList: if max(series) <= n: result.append(series) return result def highestCurrent(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the N metrics with the highest value at the end of the time period specified. Example: .. code-block:: none &target=highestCurrent(server*.instance*.threads.busy,5) Draws the 5 servers with the highest busy threads. """ return sorted( seriesList, key=safeLast )[-n:] def highestMax(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the N metrics with the highest maximum value in the time period specified. Example: .. code-block:: none &target=highestMax(server*.instance*.threads.busy,5) Draws the top 5 servers who have had the most busy threads during the time period specified. """ result_list = sorted( seriesList, key=lambda s: max(s) )[-n:] return sorted(result_list, key=lambda s: max(s), reverse=True) def lowestCurrent(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the N metrics with the lowest value at the end of the time period specified. Example: .. code-block:: none &target=lowestCurrent(server*.instance*.threads.busy,5) Draws the 5 servers with the least busy threads right now. """ return sorted( seriesList, key=safeLast )[:n] def currentAbove(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the metrics whose value is above N at the end of the time period specified. Example: .. code-block:: none &target=currentAbove(server*.instance*.threads.busy,50) Draws the servers with more than 50 busy threads. """ return [ series for series in seriesList if safeLast(series) >= n ] def currentBelow(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the metrics whose value is below N at the end of the time period specified. Example: .. code-block:: none &target=currentBelow(server*.instance*.threads.busy,3) Draws the servers with less than 3 busy threads. """ return [ series for series in seriesList if safeLast(series) <= n ] def highestAverage(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the top N metrics with the highest average value for the time period specified. Example: .. code-block:: none &target=highestAverage(server*.instance*.threads.busy,5) Draws the top 5 servers with the highest average value. """ return sorted( seriesList, key=lambda s: safeDiv(safeSum(s),safeLen(s)) )[-n:] def lowestAverage(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the bottom N metrics with the lowest average value for the time period specified. Example: .. code-block:: none &target=lowestAverage(server*.instance*.threads.busy,5) Draws the bottom 5 servers with the lowest average value. """ return sorted( seriesList, key=lambda s: safeDiv(safeSum(s),safeLen(s)) )[:n] def averageAbove(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the metrics with an average value above N for the time period specified. Example: .. code-block:: none &target=averageAbove(server*.instance*.threads.busy,25) Draws the servers with average values above 25. """ return [ series for series in seriesList if safeDiv(safeSum(series),safeLen(series)) >= n ] def averageBelow(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Out of all metrics passed, draws only the metrics with an average value below N for the time period specified. Example: .. code-block:: none &target=averageBelow(server*.instance*.threads.busy,25) Draws the servers with average values below 25. """ return [ series for series in seriesList if safeDiv(safeSum(series),safeLen(series)) <= n ] def _getPercentile(points, n, interpolate=False): """ Percentile is calculated using the method outlined in the NIST Engineering Statistics Handbook: http://www.itl.nist.gov/div898/handbook/prc/section2/prc252.htm """ sortedPoints = sorted([ p for p in points if p is not None]) if len(sortedPoints) == 0: return None fractionalRank = (n/100.0) * (len(sortedPoints) + 1) rank = int(fractionalRank) rankFraction = fractionalRank - rank if not interpolate: rank += int(math.ceil(rankFraction)) if rank == 0: percentile = sortedPoints[0] elif rank - 1 == len(sortedPoints): percentile = sortedPoints[-1] else: percentile = sortedPoints[rank - 1] # Adjust for 0-index if interpolate: if rank != len(sortedPoints): # if a next value exists nextValue = sortedPoints[rank] percentile = percentile + rankFraction * (nextValue - percentile) return percentile def nPercentile(requestContext, seriesList, n): """Returns n-percent of each series in the seriesList.""" assert n, 'The requested percent is required to be greater than 0' results = [] for s in seriesList: # Create a sorted copy of the TimeSeries excluding None values in the values list. s_copy = TimeSeries( s.name, s.start, s.end, s.step, sorted( [item for item in s if item is not None] ) ) if not s_copy: continue # Skip this series because it is empty. perc_val = _getPercentile(s_copy, n) if perc_val is not None: name = 'nPercentile(%s, %g)' % (s_copy.name, n) point_count = int((s.end - s.start)/s.step) perc_series = TimeSeries(name, s_copy.start, s_copy.end, s_copy.step, [perc_val] * point_count ) perc_series.pathExpression = name results.append(perc_series) return results def averageOutsidePercentile(requestContext, seriesList, n): """ Removes functions lying inside an average percentile interval """ averages = [] for s in seriesList: averages.append(safeDiv(safeSum(s), safeLen(s))) if n < 50: n = 100 - n; lowPercentile = _getPercentile(averages, 100 - n) highPercentile = _getPercentile(averages, n) return [s for s in seriesList if not lowPercentile < safeDiv(safeSum(s), safeLen(s)) < highPercentile] def removeBetweenPercentile(requestContext, seriesList, n): """ Removes lines who do not have an value lying in the x-percentile of all the values at a moment """ if n < 50: n = 100 - n transposed = zip(*seriesList) lowPercentiles = [_getPercentile(col, 100-n) for col in transposed] highPercentiles = [_getPercentile(col, n) for col in transposed] return [l for l in seriesList if sum([not lowPercentiles[val_i] < val < highPercentiles[val_i] for (val_i, val) in enumerate(l)]) > 0] def removeAbovePercentile(requestContext, seriesList, n): """ Removes data above the nth percentile from the series or list of series provided. Values above this percentile are assigned a value of None. """ for s in seriesList: s.name = 'removeAbovePercentile(%s, %d)' % (s.name, n) s.pathExpression = s.name percentile = nPercentile(requestContext, [s], n)[0][0] for (index, val) in enumerate(s): if val > percentile: s[index] = None return seriesList def removeAboveValue(requestContext, seriesList, n): """ Removes data above the given threshold from the series or list of series provided. Values above this threshole are assigned a value of None """ for s in seriesList: s.name = 'removeAboveValue(%s, %d)' % (s.name, n) s.pathExpression = s.name for (index, val) in enumerate(s): if val > n: s[index] = None return seriesList def removeBelowPercentile(requestContext, seriesList, n): """ Removes data below the nth percentile from the series or list of series provided. Values below this percentile are assigned a value of None. """ for s in seriesList: s.name = 'removeBelowPercentile(%s, %d)' % (s.name, n) s.pathExpression = s.name percentile = nPercentile(requestContext, [s], n)[0][0] for (index, val) in enumerate(s): if val < percentile: s[index] = None return seriesList def removeBelowValue(requestContext, seriesList, n): """ Removes data below the given threshold from the series or list of series provided. Values below this threshole are assigned a value of None """ for s in seriesList: s.name = 'removeBelowValue(%s, %d)' % (s.name, n) s.pathExpression = s.name for (index, val) in enumerate(s): if val < n: s[index] = None return seriesList def limit(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Only draw the first N metrics. Useful when testing a wildcard in a metric. Example: .. code-block:: none &target=limit(server*.instance*.memory.free,5) Draws only the first 5 instance's memory free. """ return seriesList[0:n] def sortByName(requestContext, seriesList): """ Takes one metric or a wildcard seriesList. Sorts the list of metrics by the metric name. """ def compare(x,y): return cmp(x.name, y.name) seriesList.sort(compare) return seriesList def sortByTotal(requestContext, seriesList): """ Takes one metric or a wildcard seriesList. Sorts the list of metrics by the sum of values across the time period specified. """ def compare(x,y): return cmp(safeSum(y), safeSum(x)) seriesList.sort(compare) return seriesList def sortByMaxima(requestContext, seriesList): """ Takes one metric or a wildcard seriesList. Sorts the list of metrics by the maximum value across the time period specified. Useful with the &areaMode=all parameter, to keep the lowest value lines visible. Example: .. code-block:: none &target=sortByMaxima(server*.instance*.memory.free) """ def compare(x,y): return cmp(max(y), max(x)) seriesList.sort(compare) return seriesList def sortByMinima(requestContext, seriesList): """ Takes one metric or a wildcard seriesList. Sorts the list of metrics by the lowest value across the time period specified. Example: .. code-block:: none &target=sortByMinima(server*.instance*.memory.free) """ def compare(x,y): return cmp(min(x), min(y)) newSeries = [series for series in seriesList if max(series) > 0] newSeries.sort(compare) return newSeries def sortByName(requestContext, seriesList): """ Takes one metric or a wildcard seriesList. Sorts the list of metrics by the name. """ def compare(x,y): return 1 if x.name > y.name else -1 seriesList.sort(compare) return seriesList def useSeriesAbove(requestContext, seriesList, value, search, replace): """ Compares the maximum of each series against the given `value`. If the series maximum is greater than `value`, the regular expression search and replace is applied against the series name to plot a related metric e.g. given useSeriesAbove(ganglia.metric1.reqs,10,'reqs','time'), the response time metric will be plotted only when the maximum value of the corresponding request/s metric is > 10 .. code-block:: none &target=useSeriesAbove(ganglia.metric1.reqs,10,"reqs","time") """ newSeries = [] for series in seriesList: newname = re.sub(search, replace, series.name) if max(series) > value: n = evaluateTarget(requestContext, newname) if n is not None and len(n) > 0: newSeries.append(n[0]) return newSeries def mostDeviant(requestContext, seriesList, n): """ Takes one metric or a wildcard seriesList followed by an integer N. Draws the N most deviant metrics. To find the deviants, the standard deviation (sigma) of each series is taken and ranked. The top N standard deviations are returned. Example: .. code-block:: none &target=mostDeviant(5, server*.instance*.memory.free) Draws the 5 instances furthest from the average memory free. """ deviants = [] for series in seriesList: mean = safeDiv( safeSum(series), safeLen(series) ) if mean is None: continue square_sum = sum([ (value - mean) ** 2 for value in series if value is not None ]) sigma = safeDiv(square_sum, safeLen(series)) if sigma is None: continue deviants.append( (sigma, series) ) deviants.sort(key=lambda i: i[0], reverse=True) #sort by sigma return [ series for (sigma,series) in deviants ][:n] #return the n most deviant series def stdev(requestContext, seriesList, points, windowTolerance=0.1): """ Takes one metric or a wildcard seriesList followed by an integer N. Draw the Standard Deviation of all metrics passed for the past N datapoints. If the ratio of null points in the window is greater than windowTolerance, skip the calculation. The default for windowTolerance is 0.1 (up to 10% of points in the window can be missing). Note that if this is set to 0.0, it will cause large gaps in the output anywhere a single point is missing. Example: .. code-block:: none &target=stdev(server*.instance*.threads.busy,30) &target=stdev(server*.instance*.cpu.system,30,0.0) """ # For this we take the standard deviation in terms of the moving average # and the moving average of series squares. for (seriesIndex,series) in enumerate(seriesList): stddevSeries = TimeSeries("stddev(%s,%d)" % (series.name, int(points)), series.start, series.end, series.step, []) stddevSeries.pathExpression = "stddev(%s,%d)" % (series.name, int(points)) validPoints = 0 currentSum = 0 currentSumOfSquares = 0 for (index, newValue) in enumerate(series): # Mark whether we've reached our window size - dont drop points out otherwise if index < points: bootstrapping = True droppedValue = None else: bootstrapping = False droppedValue = series[index - points] # Track non-None points in window if not bootstrapping and droppedValue is not None: validPoints -= 1 if newValue is not None: validPoints += 1 # Remove the value that just dropped out of the window if not bootstrapping and droppedValue is not None: currentSum -= droppedValue currentSumOfSquares -= droppedValue**2 # Add in the value that just popped in the window if newValue is not None: currentSum += newValue currentSumOfSquares += newValue**2 if validPoints > 0 and \ float(validPoints)/points >= windowTolerance: try: deviation = math.sqrt(validPoints * currentSumOfSquares - currentSum**2)/validPoints except ValueError: deviation = None stddevSeries.append(deviation) else: stddevSeries.append(None) seriesList[seriesIndex] = stddevSeries return seriesList def secondYAxis(requestContext, seriesList): """ Graph the series on the secondary Y axis. """ for series in seriesList: series.options['secondYAxis'] = True series.name= 'secondYAxis(%s)' % series.name return seriesList def _fetchWithBootstrap(requestContext, seriesList, **delta_kwargs): 'Request the same data but with a bootstrap period at the beginning' bootstrapContext = requestContext.copy() bootstrapContext['startTime'] = requestContext['startTime'] - timedelta(**delta_kwargs) bootstrapContext['endTime'] = requestContext['startTime'] bootstrapList = [] for series in seriesList: if series.pathExpression in [ b.pathExpression for b in bootstrapList ]: # This pathExpression returns multiple series and we already fetched it continue bootstraps = evaluateTarget(bootstrapContext, series.pathExpression) bootstrapList.extend(bootstraps) newSeriesList = [] for bootstrap, original in zip(bootstrapList, seriesList): newValues = [] if bootstrap.step != original.step: ratio = bootstrap.step / original.step for value in bootstrap: #XXX For series with aggregationMethod = sum this should also # divide by the ratio to bring counts to the same time unit # ...but we have no way of knowing whether that's the case newValues.extend([ value ] * ratio) else: newValues.extend(bootstrap) newValues.extend(original) newSeries = TimeSeries(original.name, bootstrap.start, original.end, original.step, newValues) newSeries.pathExpression = series.pathExpression newSeriesList.append(newSeries) return newSeriesList def _trimBootstrap(bootstrap, original): 'Trim the bootstrap period off the front of this series so it matches the original' original_len = len(original) bootstrap_len = len(bootstrap) length_limit = (original_len * original.step) / bootstrap.step trim_start = bootstrap.end - (length_limit * bootstrap.step) trimmed = TimeSeries(bootstrap.name, trim_start, bootstrap.end, bootstrap.step, bootstrap[-length_limit:]) return trimmed def holtWintersIntercept(alpha,actual,last_season,last_intercept,last_slope): return alpha * (actual - last_season) \ + (1 - alpha) * (last_intercept + last_slope) def holtWintersSlope(beta,intercept,last_intercept,last_slope): return beta * (intercept - last_intercept) + (1 - beta) * last_slope def holtWintersSeasonal(gamma,actual,intercept,last_season): return gamma * (actual - intercept) + (1 - gamma) * last_season def holtWintersDeviation(gamma,actual,prediction,last_seasonal_dev): if prediction is None: prediction = 0 return gamma * math.fabs(actual - prediction) + (1 - gamma) * last_seasonal_dev def holtWintersAnalysis(series): alpha = gamma = 0.1 beta = 0.0035 # season is currently one day season_length = (24*60*60) / series.step intercept = 0 slope = 0 pred = 0 intercepts = list() slopes = list() seasonals = list() predictions = list() deviations = list() def getLastSeasonal(i): j = i - season_length if j >= 0: return seasonals[j] return 0 def getLastDeviation(i): j = i - season_length if j >= 0: return deviations[j] return 0 last_seasonal = 0 last_seasonal_dev = 0 next_last_seasonal = 0 next_pred = None for i,actual in enumerate(series): if actual is None: # missing input values break all the math # do the best we can and move on intercepts.append(None) slopes.append(0) seasonals.append(0) predictions.append(next_pred) deviations.append(0) next_pred = None continue if i == 0: last_intercept = actual last_slope = 0 # seed the first prediction as the first actual prediction = actual else: last_intercept = intercepts[-1] last_slope = slopes[-1] if last_intercept is None: last_intercept = actual prediction = next_pred last_seasonal = getLastSeasonal(i) next_last_seasonal = getLastSeasonal(i+1) last_seasonal_dev = getLastDeviation(i) intercept = holtWintersIntercept(alpha,actual,last_seasonal ,last_intercept,last_slope) slope = holtWintersSlope(beta,intercept,last_intercept,last_slope) seasonal = holtWintersSeasonal(gamma,actual,intercept,last_seasonal) next_pred = intercept + slope + next_last_seasonal deviation = holtWintersDeviation(gamma,actual,prediction,last_seasonal_dev) intercepts.append(intercept) slopes.append(slope) seasonals.append(seasonal) predictions.append(prediction) deviations.append(deviation) # make the new forecast series forecastName = "holtWintersForecast(%s)" % series.name forecastSeries = TimeSeries(forecastName, series.start, series.end , series.step, predictions) forecastSeries.pathExpression = forecastName # make the new deviation series deviationName = "holtWintersDeviation(%s)" % series.name deviationSeries = TimeSeries(deviationName, series.start, series.end , series.step, deviations) deviationSeries.pathExpression = deviationName results = { 'predictions': forecastSeries , 'deviations': deviationSeries , 'intercepts': intercepts , 'slopes': slopes , 'seasonals': seasonals } return results def holtWintersForecast(requestContext, seriesList): """ Performs a Holt-Winters forecast using the series as input data. Data from one week previous to the series is used to bootstrap the initial forecast. """ results = [] bootstrapList = _fetchWithBootstrap(requestContext, seriesList, days=7) for bootstrap, series in zip(bootstrapList, seriesList): analysis = holtWintersAnalysis(bootstrap) results.append(_trimBootstrap(analysis['predictions'], series)) return results def holtWintersConfidenceBands(requestContext, seriesList, delta=3): """ Performs a Holt-Winters forecast using the series as input data and plots upper and lower bands with the predicted forecast deviations. """ results = [] bootstrapList = _fetchWithBootstrap(requestContext, seriesList, days=7) for bootstrap,series in zip(bootstrapList, seriesList): analysis = holtWintersAnalysis(bootstrap) forecast = _trimBootstrap(analysis['predictions'], series) deviation = _trimBootstrap(analysis['deviations'], series) seriesLength = len(forecast) i = 0 upperBand = list() lowerBand = list() while i < seriesLength: forecast_item = forecast[i] deviation_item = deviation[i] i = i + 1 if forecast_item is None or deviation_item is None: upperBand.append(None) lowerBand.append(None) else: scaled_deviation = delta * deviation_item upperBand.append(forecast_item + scaled_deviation) lowerBand.append(forecast_item - scaled_deviation) upperName = "holtWintersConfidenceUpper(%s)" % series.name lowerName = "holtWintersConfidenceLower(%s)" % series.name upperSeries = TimeSeries(upperName, forecast.start, forecast.end , forecast.step, upperBand) lowerSeries = TimeSeries(lowerName, forecast.start, forecast.end , forecast.step, lowerBand) upperSeries.pathExpression = series.pathExpression lowerSeries.pathExpression = series.pathExpression results.append(lowerSeries) results.append(upperSeries) return results def holtWintersAberration(requestContext, seriesList, delta=3): """ Performs a Holt-Winters forecast using the series as input data and plots the positive or negative deviation of the series data from the forecast. """ results = [] for series in seriesList: confidenceBands = holtWintersConfidenceBands(requestContext, [series], delta) lowerBand = confidenceBands[0] upperBand = confidenceBands[1] aberration = list() for i, actual in enumerate(series): if series[i] is None: aberration.append(0) elif upperBand[i] is not None and series[i] > upperBand[i]: aberration.append(series[i] - upperBand[i]) elif lowerBand[i] is not None and series[i] < lowerBand[i]: aberration.append(series[i] - lowerBand[i]) else: aberration.append(0) newName = "holtWintersAberration(%s)" % series.name results.append(TimeSeries(newName, series.start, series.end , series.step, aberration)) return results def holtWintersConfidenceArea(requestContext, seriesList, delta=3): """ Performs a Holt-Winters forecast using the series as input data and plots the area between the upper and lower bands of the predicted forecast deviations. """ bands = holtWintersConfidenceBands(requestContext, seriesList, delta) results = areaBetween(requestContext, bands) for series in results: series.name = series.name.replace('areaBetween', 'holtWintersConfidenceArea') return results def drawAsInfinite(requestContext, seriesList): """ Takes one metric or a wildcard seriesList. If the value is zero, draw the line at 0. If the value is above zero, draw the line at infinity. If the value is null or less than zero, do not draw the line. Useful for displaying on/off metrics, such as exit codes. (0 = success, anything else = failure.) Example: .. code-block:: none drawAsInfinite(Testing.script.exitCode) """ for series in seriesList: series.options['drawAsInfinite'] = True series.name = 'drawAsInfinite(%s)' % series.name return seriesList def lineWidth(requestContext, seriesList, width): """ Takes one metric or a wildcard seriesList, followed by a float F. Draw the selected metrics with a line width of F, overriding the default value of 1, or the &lineWidth=X.X parameter. Useful for highlighting a single metric out of many, or having multiple line widths in one graph. Example: .. code-block:: none &target=lineWidth(server01.instance01.memory.free,5) """ for series in seriesList: series.options['lineWidth'] = width return seriesList def dashed(requestContext, *seriesList): """ Takes one metric or a wildcard seriesList, followed by a float F. Draw the selected metrics with a dotted line with segments of length F If omitted, the default length of the segments is 5.0 Example: .. code-block:: none &target=dashed(server01.instance01.memory.free,2.5) """ if len(seriesList) == 2: dashLength = seriesList[1] else: dashLength = 5 for series in seriesList[0]: series.name = 'dashed(%s, %d)' % (series.name, dashLength) series.options['dashed'] = dashLength return seriesList[0] def timeStack(requestContext, seriesList, timeShiftUnit, timeShiftStart, timeShiftEnd): """ Takes one metric or a wildcard seriesList, followed by a quoted string with the length of time (See ``from / until`` in the render\_api_ for examples of time formats). Also takes a start multiplier and end multiplier for the length of time create a seriesList which is composed the orginal metric series stacked with time shifts starting time shifts from the start multiplier through the end multiplier Useful for looking at history, or feeding into seriesAverage or seriesStdDev Example: .. code-block:: none &target=timeStack(Sales.widgets.largeBlue,"1d",0,7) # create a series for today and each of the previous 7 days """ # Default to negative. parseTimeOffset defaults to + if timeShiftUnit[0].isdigit(): timeShiftUnit = '-' + timeShiftUnit delta = parseTimeOffset(timeShiftUnit) series = seriesList[0] # if len(seriesList) > 1, they will all have the same pathExpression, which is all we care about. results = [] timeShiftStartint = int(timeShiftStart) timeShiftEndint = int(timeShiftEnd) for shft in range(timeShiftStartint,timeShiftEndint): myContext = requestContext.copy() innerDelta = delta * shft myContext['startTime'] = requestContext['startTime'] + innerDelta myContext['endTime'] = requestContext['endTime'] + innerDelta for shiftedSeries in evaluateTarget(myContext, series.pathExpression): shiftedSeries.name = 'timeShift(%s, %s, %s)' % (shiftedSeries.name, timeShiftUnit,shft) shiftedSeries.pathExpression = shiftedSeries.name shiftedSeries.start = series.start shiftedSeries.end = series.end results.append(shiftedSeries) return results def timeShift(requestContext, seriesList, timeShift, resetEnd=True): """ Takes one metric or a wildcard seriesList, followed by a quoted string with the length of time (See ``from / until`` in the render\_api_ for examples of time formats). Draws the selected metrics shifted in time. If no sign is given, a minus sign ( - ) is implied which will shift the metric back in time. If a plus sign ( + ) is given, the metric will be shifted forward in time. Will reset the end date range automatically to the end of the base stat unless resetEnd is False. Example case is when you timeshift to last week and have the graph date range set to include a time in the future, will limit this timeshift to pretend ending at the current time. If resetEnd is False, will instead draw full range including future time. Useful for comparing a metric against itself at a past periods or correcting data stored at an offset. Example: .. code-block:: none &target=timeShift(Sales.widgets.largeBlue,"7d") &target=timeShift(Sales.widgets.largeBlue,"-7d") &target=timeShift(Sales.widgets.largeBlue,"+1h") """ # Default to negative. parseTimeOffset defaults to + if timeShift[0].isdigit(): timeShift = '-' + timeShift delta = parseTimeOffset(timeShift) myContext = requestContext.copy() myContext['startTime'] = requestContext['startTime'] + delta myContext['endTime'] = requestContext['endTime'] + delta results = [] if len(seriesList) > 0: series = seriesList[0] # if len(seriesList) > 1, they will all have the same pathExpression, which is all we care about. for shiftedSeries in evaluateTarget(myContext, series.pathExpression): shiftedSeries.name = 'timeShift(%s, %s)' % (shiftedSeries.name, timeShift) if resetEnd: shiftedSeries.end = series.end else: shiftedSeries.end = shiftedSeries.end - shiftedSeries.start + series.start shiftedSeries.start = series.start results.append(shiftedSeries) return results def constantLine(requestContext, value): """ Takes a float F. Draws a horizontal line at value F across the graph. Example: .. code-block:: none &target=constantLine(123.456) """ start = timestamp( requestContext['startTime'] ) end = timestamp( requestContext['endTime'] ) step = (end - start) / 1.0 series = TimeSeries(str(value), start, end, step, [value, value]) return [series] def aggregateLine(requestContext, seriesList, func='avg'): """ Draws a horizontal line based the function applied to the series. Note: By default, the graphite renderer consolidates data points by averaging data points over time. If you are using the 'min' or 'max' function for aggregateLine, this can cause an unusual gap in the line drawn by this function and the data itself. To fix this, you should use the consolidateBy() function with the same function argument you are using for aggregateLine. This will ensure that the proper data points are retained and the graph should line up correctly. Example: .. code-block:: none &target=aggregateLineSeries(server.connections.total, 'avg') """ t_funcs = { 'avg': safeAvg, 'min': safeMin, 'max': safeMax } if func not in t_funcs: raise ValueError("Invalid function %s" % func) value = t_funcs[func]( seriesList[0] ) name = 'aggregateLine(%s,%d)' % (seriesList[0].pathExpression, value) series = constantLine(requestContext, value)[0] series.name = name return [series] def threshold(requestContext, value, label=None, color=None): """ Takes a float F, followed by a label (in double quotes) and a color. (See ``bgcolor`` in the render\_api_ for valid color names & formats.) Draws a horizontal line at value F across the graph. Example: .. code-block:: none &target=threshold(123.456, "omgwtfbbq", red) """ series = constantLine(requestContext, value)[0] if label: series.name = label if color: series.color = color return [series] def transformNull(requestContext, seriesList, default=0): """ Takes a metric or wild card seriesList and an optional value to transform Nulls to. Default is 0. This method compliments drawNullAsZero flag in graphical mode but also works in text only mode. Example: .. code-block:: none &target=transformNull(webapp.pages.*.views,-1) This would take any page that didn't have values and supply negative 1 as a default. Any other numeric value may be used as well. """ def transform(v): if v is None: return default else: return v for series in seriesList: series.name = "transformNull(%s,%g)" % (series.name, default) series.pathExpression = series.name values = [transform(v) for v in series] series.extend(values) del series[:len(values)] return seriesList def isNonNull(requestContext, seriesList): """ Takes a metric or wild card seriesList and counts up how many non-null values are specified. This is useful for understanding which metrics have data at a given point in time (ie, to count which servers are alive). Example: .. code-block:: none &target=isNonNull(webapp.pages.*.views) Returns a seriesList where 1 is specified for non-null values, and 0 is specified for null values. """ def transform(v): if v is None: return 0 else: return 1 for series in seriesList: series.name = "isNonNull(%s)" % (series.name) series.pathExpression = series.name values = [transform(v) for v in series] series.extend(values) del series[:len(values)] return seriesList def upperBound(requestContext, seriesList, boundary): """ Takes a metric or wild card seriesList and returns min(value, boundary) for non-null values. This is useful for when you only care about the value up to a certain point - for example if you are logging error codes and you only care if the value is >= 1 and not the value itself. Example: .. code-block:: none &target=upperBound(application.myapp.*.exitcode, 1.0) Returns a seriesList where the maximum value is the boundary or lower. """ def transform(v): if v is None: return None return min(v, boundary) for series in seriesList: series.name = "upperBound(%s, %d)" % (series.name, boundary) series.pathExpression = series.name values = [transform(v) for v in series] series.extend(values) del series[:len(values)] return seriesList def lowerBound(requestContext, seriesList, boundary): """ Takes a metric or wild card seriesList and returns max(value, boundary) for non-null values. This is useful for when you only care about the value up to a certain point - for example if you are logging error codes and you only care if the value is <= -1 and not the value itself. Example: .. code-block:: none &target=lowerBound(application.myapp.*.exitcode, -1.0) Returns a seriesList where the minimum value is the boundary or greater. """ def transform(v): if v is None: return None return max(v, boundary) for series in seriesList: series.name = "lowerBound(%s, %d)" % (series.name, boundary) series.pathExpression = series.name values = [transform(v) for v in series] series.extend(values) del series[:len(values)] return seriesList def identity(requestContext, name): """ Identity function: Returns datapoints where the value equals the timestamp of the datapoint. Useful when you have another series where the value is a timestamp, and you want to compare it to the time of the datapoint, to render an age Example: .. code-block:: none &target=identity("The.time.series") This would create a series named "The.time.series" that contains points where x(t) == t. """ step = 60 delta = timedelta(seconds=step) start = time.mktime(requestContext["startTime"].timetuple()) end = time.mktime(requestContext["endTime"].timetuple()) values = range(start, end, step) series = TimeSeries(name, start, end, step, values) series.pathExpression = 'identity("%s")' % name return [series] def countSeries(requestContext, *seriesLists): """ Draws a horizontal line representing the number of nodes found in the seriesList. .. code-block:: none &target=countSeries(carbon.agents.*.*) """ (seriesList,start,end,step) = normalize(seriesLists) name = "countSeries(%s)" % formatPathExpressions(seriesList) values = ( int(len(row)) for row in izip(*seriesList) ) series = TimeSeries(name,start,end,step,values) series.pathExpression = name return [series] def group(requestContext, *seriesLists): """ Takes an arbitrary number of seriesLists and adds them to a single seriesList. This is used to pass multiple seriesLists to a function which only takes one """ seriesGroup = [] for s in seriesLists: seriesGroup.extend(s) return seriesGroup def mapSeries(requestContext, seriesList, mapNode): """ Takes a seriesList and maps it to a list of sub-seriesList. Each sub-seriesList has the given mapNode in common. Example: .. code-block:: none map(servers.*.cpu.*,1) => [ servers.server1.cpu.*, servers.server2.cpu.*, ... servers.serverN.cpu.* ] """ metaSeries = {} keys = [] for series in seriesList: key = series.name.split(".")[mapNode] if key not in metaSeries: metaSeries[key] = [series] keys.append(key) else: metaSeries[key].append(series) return [ metaSeries[key] for key in keys ] def reduceSeries(requestContext, seriesLists, reduceFunction, reduceNode, *reduceMatchers): """ Takes a list of seriesLists and reduces it to a list of series by means of the reduceFunction. Reduction is performed by matching the reduceNode in each series against the list of reduceMatchers. The each series is then passed to the reduceFunction as arguments in the order given by reduceMatchers. The reduceFunction should yield a single series. Example: .. code-block:: none reduce(map(servers.*.disk.*,1),3,"asPercent","bytes_used","total_bytes") => asPercent(servers.server1.disk.bytes_used,servers.server1.disk.total_bytes), asPercent(servers.server2.disk.bytes_used,servers.server2.disk.total_bytes), ... asPercent(servers.serverN.disk.bytes_used,servers.serverN.disk.total_bytes) The resulting list of series are aliased so that they can easily be nested in other functions. In the above example, the resulting series names would become: .. code-block:: none servers.server1.disk.reduce.asPercent, servers.server2.disk.reduce.asPercent, ... servers.serverN.disk.reduce.asPercent """ metaSeries = {} keys = [] for seriesList in seriesLists: for series in seriesList: nodes = series.name.split('.') node = nodes[reduceNode] reduceSeriesName = '.'.join(nodes[0:reduceNode]) + '.reduce.' + reduceFunction if node in reduceMatchers: if reduceSeriesName not in metaSeries: metaSeries[reduceSeriesName] = [None] * len(reduceMatchers) keys.append(reduceSeriesName) i = reduceMatchers.index(node) metaSeries[reduceSeriesName][i] = series for key in keys: metaSeries[key] = SeriesFunctions[reduceFunction](requestContext,metaSeries[key])[0] metaSeries[key].name = key return [ metaSeries[key] for key in keys ] def groupByNode(requestContext, seriesList, nodeNum, callback): """ Takes a serieslist and maps a callback to subgroups within as defined by a common node .. code-block:: none &target=groupByNode(ganglia.by-function.*.*.cpu.load5,2,"sumSeries") Would return multiple series which are each the result of applying the "sumSeries" function to groups joined on the second node (0 indexed) resulting in a list of targets like sumSeries(ganglia.by-function.server1.*.cpu.load5),sumSeries(ganglia.by-function.server2.*.cpu.load5),... """ metaSeries = {} keys = [] for series in seriesList: key = series.name.split(".")[nodeNum] if key not in metaSeries.keys(): metaSeries[key] = [series] keys.append(key) else: metaSeries[key].append(series) for key in metaSeries.keys(): metaSeries[key] = SeriesFunctions[callback](requestContext, metaSeries[key])[0] metaSeries[key].name = key return [ metaSeries[key] for key in keys ] def exclude(requestContext, seriesList, pattern): """ Takes a metric or a wildcard seriesList, followed by a regular expression in double quotes. Excludes metrics that match the regular expression. Example: .. code-block:: none &target=exclude(servers*.instance*.threads.busy,"server02") """ regex = re.compile(pattern) return [s for s in seriesList if not regex.search(s.name)] def grep(requestContext, seriesList, pattern): """ Takes a metric or a wildcard seriesList, followed by a regular expression in double quotes. Excludes metrics that don't match the regular expression. Example: .. code-block:: none &target=grep(servers*.instance*.threads.busy,"server02") """ regex = re.compile(pattern) return [s for s in seriesList if regex.search(s.name)] def smartSummarize(requestContext, seriesList, intervalString, func='sum', alignToFrom=False): """ Smarter experimental version of summarize. The alignToFrom parameter has been deprecated, it no longer has any effect. Alignment happens automatically for days, hours, and minutes. """ if alignToFrom: log.info("Deprecated parameter 'alignToFrom' is being ignored.") results = [] delta = parseTimeOffset(intervalString) interval = delta.seconds + (delta.days * 86400) # Adjust the start time to fit an entire day for intervals >= 1 day requestContext = requestContext.copy() s = requestContext['startTime'] if interval >= DAY: requestContext['startTime'] = datetime(s.year, s.month, s.day) elif interval >= HOUR: requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour) elif interval >= MINUTE: requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour, s.minute) for i,series in enumerate(seriesList): # XXX: breaks with summarize(metric.{a,b}) # each series.pathExpression == metric.{a,b} newSeries = evaluateTarget(requestContext, series.pathExpression)[0] series[0:len(series)] = newSeries series.start = newSeries.start series.end = newSeries.end series.step = newSeries.step for series in seriesList: buckets = {} # { timestamp: [values] } timestamps = range( int(series.start), int(series.end), int(series.step) ) datapoints = zip(timestamps, series) # Populate buckets for (timestamp, value) in datapoints: bucketInterval = int((timestamp - series.start) / interval) if bucketInterval not in buckets: buckets[bucketInterval] = [] if value is not None: buckets[bucketInterval].append(value) newValues = [] for timestamp in range(series.start, series.end, interval): bucketInterval = int((timestamp - series.start) / interval) bucket = buckets.get(bucketInterval, []) if bucket: if func == 'avg': newValues.append( float(sum(bucket)) / float(len(bucket)) ) elif func == 'last': newValues.append( bucket[len(bucket)-1] ) elif func == 'max': newValues.append( max(bucket) ) elif func == 'min': newValues.append( min(bucket) ) else: newValues.append( sum(bucket) ) else: newValues.append( None ) newName = "smartSummarize(%s, \"%s\", \"%s\")" % (series.name, intervalString, func) alignedEnd = series.start + (bucketInterval * interval) + interval newSeries = TimeSeries(newName, series.start, alignedEnd, interval, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def summarize(requestContext, seriesList, intervalString, func='sum', alignToFrom=False): """ Summarize the data into interval buckets of a certain size. By default, the contents of each interval bucket are summed together. This is useful for counters where each increment represents a discrete event and retrieving a "per X" value requires summing all the events in that interval. Specifying 'avg' instead will return the mean for each bucket, which can be more useful when the value is a gauge that represents a certain value in time. 'max', 'min' or 'last' can also be specified. By default, buckets are caculated by rounding to the nearest interval. This works well for intervals smaller than a day. For example, 22:32 will end up in the bucket 22:00-23:00 when the interval=1hour. Passing alignToFrom=true will instead create buckets starting at the from time. In this case, the bucket for 22:32 depends on the from time. If from=6:30 then the 1hour bucket for 22:32 is 22:30-23:30. Example: .. code-block:: none &target=summarize(counter.errors, "1hour") # total errors per hour &target=summarize(nonNegativeDerivative(gauge.num_users), "1week") # new users per week &target=summarize(queue.size, "1hour", "avg") # average queue size per hour &target=summarize(queue.size, "1hour", "max") # maximum queue size during each hour &target=summarize(metric, "13week", "avg", true)&from=midnight+20100101 # 2010 Q1-4 """ results = [] delta = parseTimeOffset(intervalString) interval = delta.seconds + (delta.days * 86400) for series in seriesList: buckets = {} timestamps = range( int(series.start), int(series.end), int(series.step) ) datapoints = zip(timestamps, series) for (timestamp, value) in datapoints: if alignToFrom: bucketInterval = int((timestamp - series.start) / interval) else: bucketInterval = timestamp - (timestamp % interval) if bucketInterval not in buckets: buckets[bucketInterval] = [] if value is not None: buckets[bucketInterval].append(value) if alignToFrom: newStart = series.start newEnd = series.end else: newStart = series.start - (series.start % interval) newEnd = series.end - (series.end % interval) + interval newValues = [] for timestamp in range(newStart, newEnd, interval): if alignToFrom: newEnd = timestamp bucketInterval = int((timestamp - series.start) / interval) else: bucketInterval = timestamp - (timestamp % interval) bucket = buckets.get(bucketInterval, []) if bucket: if func == 'avg': newValues.append( float(sum(bucket)) / float(len(bucket)) ) elif func == 'last': newValues.append( bucket[len(bucket)-1] ) elif func == 'max': newValues.append( max(bucket) ) elif func == 'min': newValues.append( min(bucket) ) else: newValues.append( sum(bucket) ) else: newValues.append( None ) if alignToFrom: newEnd += interval newName = "summarize(%s, \"%s\", \"%s\"%s)" % (series.name, intervalString, func, alignToFrom and ", true" or "") newSeries = TimeSeries(newName, newStart, newEnd, interval, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def hitcount(requestContext, seriesList, intervalString, alignToInterval = False): """ Estimate hit counts from a list of time series. This function assumes the values in each time series represent hits per second. It calculates hits per some larger interval such as per day or per hour. This function is like summarize(), except that it compensates automatically for different time scales (so that a similar graph results from using either fine-grained or coarse-grained records) and handles rarely-occurring events gracefully. """ results = [] delta = parseTimeOffset(intervalString) interval = int(delta.seconds + (delta.days * 86400)) if alignToInterval: requestContext = requestContext.copy() s = requestContext['startTime'] if interval >= DAY: requestContext['startTime'] = datetime(s.year, s.month, s.day) elif interval >= HOUR: requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour) elif interval >= MINUTE: requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour, s.minute) for i,series in enumerate(seriesList): newSeries = evaluateTarget(requestContext, series.pathExpression)[0] intervalCount = int((series.end - series.start) / interval) series[0:len(series)] = newSeries series.start = newSeries.start series.end = newSeries.start + (intervalCount * interval) + interval series.step = newSeries.step for series in seriesList: length = len(series) step = int(series.step) bucket_count = int(math.ceil(float(series.end - series.start) / interval)) buckets = [[] for _ in range(bucket_count)] newStart = int(series.end - bucket_count * interval) for i, value in enumerate(series): if value is None: continue start_time = int(series.start + i * step) start_bucket, start_mod = divmod(start_time - newStart, interval) end_time = start_time + step end_bucket, end_mod = divmod(end_time - newStart, interval) if end_bucket >= bucket_count: end_bucket = bucket_count - 1 end_mod = interval if start_bucket == end_bucket: # All of the hits go to a single bucket. if start_bucket >= 0: buckets[start_bucket].append(value * (end_mod - start_mod)) else: # Spread the hits among 2 or more buckets. if start_bucket >= 0: buckets[start_bucket].append(value * (interval - start_mod)) hits_per_bucket = value * interval for j in range(start_bucket + 1, end_bucket): buckets[j].append(hits_per_bucket) if end_mod > 0: buckets[end_bucket].append(value * end_mod) newValues = [] for bucket in buckets: if bucket: newValues.append( sum(bucket) ) else: newValues.append(None) newName = 'hitcount(%s, "%s"%s)' % (series.name, intervalString, alignToInterval and ", true" or "") newSeries = TimeSeries(newName, newStart, series.end, interval, newValues) newSeries.pathExpression = newName results.append(newSeries) return results def timeFunction(requestContext, name): """ Short Alias: time() Just returns the timestamp for each X value. T Example: .. code-block:: none &target=time("The.time.series") This would create a series named "The.time.series" that contains in Y the same value (in seconds) as X. """ step = 60 delta = timedelta(seconds=step) when = requestContext["startTime"] values = [] while when < requestContext["endTime"]: values.append(time.mktime(when.timetuple())) when += delta series = TimeSeries(name, int(time.mktime(requestContext["startTime"].timetuple())), int(time.mktime(requestContext["endTime"].timetuple())), step, values) series.pathExpression = name return [series] def sinFunction(requestContext, name, amplitude=1): """ Short Alias: sin() Just returns the sine of the current time. The optional amplitude parameter changes the amplitude of the wave. Example: .. code-block:: none &target=sin("The.time.series", 2) This would create a series named "The.time.series" that contains sin(x)*2. """ step = 60 delta = timedelta(seconds=step) when = requestContext["startTime"] values = [] while when < requestContext["endTime"]: values.append(math.sin(time.mktime(when.timetuple()))*amplitude) when += delta return [TimeSeries(name, int(time.mktime(requestContext["startTime"].timetuple())), int(time.mktime(requestContext["endTime"].timetuple())), step, values)] def randomWalkFunction(requestContext, name): """ Short Alias: randomWalk() Returns a random walk starting at 0. This is great for testing when there is no real data in whisper. Example: .. code-block:: none &target=randomWalk("The.time.series") This would create a series named "The.time.series" that contains points where x(t) == x(t-1)+random()-0.5, and x(0) == 0. """ step = 60 delta = timedelta(seconds=step) when = requestContext["startTime"] values = [] current = 0 while when < requestContext["endTime"]: values.append(current) current += random.random() - 0.5 when += delta return [TimeSeries(name, int(time.mktime(requestContext["startTime"].timetuple())), int(time.mktime(requestContext["endTime"].timetuple())), step, values)] def events(requestContext, *tags): """ Returns the number of events at this point in time. Usable with drawAsInfinite. Example: .. code-block:: none &target=events("tag-one", "tag-two") &target=events("*") Returns all events tagged as "tag-one" and "tag-two" and the second one returns all events. """ def to_epoch(datetime_object): return int(time.mktime(datetime_object.timetuple())) step = 1 name = "events(" + ", ".join(tags) + ")" if tags == ("*",): tags = None # Django returns database timestamps in timezone-ignorant datetime objects # so we use epoch seconds and do the conversion ourselves start_timestamp = to_epoch(requestContext["startTime"]) start_timestamp = start_timestamp - start_timestamp % step end_timestamp = to_epoch(requestContext["endTime"]) end_timestamp = end_timestamp - end_timestamp % step points = (end_timestamp - start_timestamp)/step events = models.Event.find_events(datetime.fromtimestamp(start_timestamp), datetime.fromtimestamp(end_timestamp), tags=tags) values = [None] * points for event in events: event_timestamp = to_epoch(event.when) value_offset = (event_timestamp - start_timestamp)/step if values[value_offset] is None: values[value_offset] = 1 else: values[value_offset] += 1 result_series = TimeSeries(name, start_timestamp, end_timestamp, step, values, 'sum') result_series.pathExpression = name return [result_series] def pieAverage(requestContext, series): return safeDiv(safeSum(series),safeLen(series)) def pieMaximum(requestContext, series): return max(series) def pieMinimum(requestContext, series): return min(series) PieFunctions = { 'average' : pieAverage, 'maximum' : pieMaximum, 'minimum' : pieMinimum, } SeriesFunctions = { # Combine functions 'sumSeries' : sumSeries, 'sum' : sumSeries, 'multiplySeries' : multiplySeries, 'averageSeries' : averageSeries, 'stddevSeries' : stddevSeries, 'avg' : averageSeries, 'sumSeriesWithWildcards': sumSeriesWithWildcards, 'averageSeriesWithWildcards': averageSeriesWithWildcards, 'minSeries' : minSeries, 'maxSeries' : maxSeries, 'rangeOfSeries': rangeOfSeries, 'percentileOfSeries': percentileOfSeries, 'countSeries': countSeries, 'weightedAverage': weightedAverage, # Transform functions 'scale' : scale, 'invert' : invert, 'scaleToSeconds' : scaleToSeconds, 'offset' : offset, 'offsetToZero' : offsetToZero, 'derivative' : derivative, 'perSecond' : perSecond, 'integral' : integral, 'percentileOfSeries': percentileOfSeries, 'nonNegativeDerivative' : nonNegativeDerivative, 'log' : logarithm, 'timeStack': timeStack, 'timeShift': timeShift, 'summarize' : summarize, 'smartSummarize' : smartSummarize, 'hitcount' : hitcount, 'absolute' : absolute, # Calculate functions 'movingAverage' : movingAverage, 'movingMedian' : movingMedian, 'stdev' : stdev, 'holtWintersForecast': holtWintersForecast, 'holtWintersConfidenceBands': holtWintersConfidenceBands, 'holtWintersConfidenceArea': holtWintersConfidenceArea, 'holtWintersAberration': holtWintersAberration, 'asPercent' : asPercent, 'pct' : asPercent, 'diffSeries' : diffSeries, 'divideSeries' : divideSeries, # Series Filter functions 'mostDeviant' : mostDeviant, 'highestCurrent' : highestCurrent, 'lowestCurrent' : lowestCurrent, 'highestMax' : highestMax, 'currentAbove' : currentAbove, 'currentBelow' : currentBelow, 'highestAverage' : highestAverage, 'lowestAverage' : lowestAverage, 'averageAbove' : averageAbove, 'averageBelow' : averageBelow, 'maximumAbove' : maximumAbove, 'minimumAbove' : minimumAbove, 'maximumBelow' : maximumBelow, 'nPercentile' : nPercentile, 'limit' : limit, 'sortByTotal' : sortByTotal, 'sortByName' : sortByName, 'averageOutsidePercentile' : averageOutsidePercentile, 'removeBetweenPercentile' : removeBetweenPercentile, 'sortByMaxima' : sortByMaxima, 'sortByMinima' : sortByMinima, 'sortByName' : sortByName, 'useSeriesAbove': useSeriesAbove, 'exclude' : exclude, # Data Filter functions 'removeAbovePercentile' : removeAbovePercentile, 'removeAboveValue' : removeAboveValue, 'removeBelowPercentile' : removeBelowPercentile, 'removeBelowValue' : removeBelowValue, # Special functions 'legendValue' : legendValue, 'alias' : alias, 'aliasSub' : aliasSub, 'aliasByNode' : aliasByNode, 'aliasByMetric' : aliasByMetric, 'cactiStyle' : cactiStyle, 'color' : color, 'alpha' : alpha, 'cumulative' : cumulative, 'consolidateBy' : consolidateBy, 'keepLastValue' : keepLastValue, 'drawAsInfinite' : drawAsInfinite, 'secondYAxis': secondYAxis, 'lineWidth' : lineWidth, 'dashed' : dashed, 'substr' : substr, 'group' : group, 'map': mapSeries, 'reduce': reduceSeries, 'groupByNode' : groupByNode, 'constantLine' : constantLine, 'stacked' : stacked, 'areaBetween' : areaBetween, 'threshold' : threshold, 'transformNull' : transformNull, 'isNonNull' : isNonNull, 'identity': identity, 'aggregateLine' : aggregateLine, 'upperBound' : upperBound, 'lowerBound' : lowerBound, # test functions 'time': timeFunction, "sin": sinFunction, "randomWalk": randomWalkFunction, 'timeFunction': timeFunction, "sinFunction": sinFunction, "randomWalkFunction": randomWalkFunction, #events 'events': events, } #Avoid import circularity if not environ.get('READTHEDOCS'): from graphite.render.evaluator import evaluateTarget
dhtech/graphite-web
webapp/graphite/render/functions.py
Python
apache-2.0
101,346
package net.javaci.mobile.bomberman.core.mediator; import net.javaci.mobile.bomberman.core.BomberManGame; import net.javaci.mobile.bomberman.core.view.BomberManScreen; import net.javaci.mobile.bomberman.core.view.LobbyScreen; public class LobbyScreenMediator extends BomberManMediator { private LobbyScreen lobbyScreen; public LobbyScreenMediator(BomberManGame game) { super(game); } @Override public BomberManScreen createScreen() { this.screen = new LobbyScreen(this.game, this); this.lobbyScreen = (LobbyScreen) this.screen; return screen; } @Override protected void onScreenShow() { super.onScreenShow(); } }
firstthumb/Bomberman
core/src/main/java/net/javaci/mobile/bomberman/core/mediator/LobbyScreenMediator.java
Java
apache-2.0
696
require 'rails_helper' require 'pry' RSpec.describe ApiController, type: :controller do before :each do request.env["HTTP_ACCEPT"] = 'application/json' end describe "GET 'on_this_day'" do let(:event_params) do { date: "3/26/1991", type: "event" } end let(:song_params) do { date: "3/29/1986", type: "song" } end let(:movie_params) do { date: "1/5/1986", type: "movie" } end let(:weather_params) do { date: "3/13/1990", type: "weather" } end let(:null_song) do { date: "1/5/1986", type: "song" } end let(:null_movie) do { date: "1/6/1986", type: "movie" } end let(:null_event) do { date: "1/2/1986", type: "event" } end let(:null_weather) do { date: "5/14/1990", type: "weather" } end it "returns expected event for a given day" do get :on_this_day, event_params, {:format => :json} expect(JSON.parse(response.body).length).to eql 3 end it "returns expected song for a given day" do get :on_this_day, song_params, {:format => :json} expect(JSON.parse(response.body)[0]["artist"]).to eq "Falco" end it "returns expected movie for a given day" do get :on_this_day, movie_params, {:format => :json} expect(JSON.parse(response.body)[0]["title"]).to eq "Rocky IV" end it "returns expected weather for a given day" do get :on_this_day, weather_params, {:format => :json} expect(JSON.parse(response.body)[0]["message"]).to eq "A tornado caused $2.5M of damage in Nebraska." end it "handles a null movie day as expected" do get :on_this_day, null_movie, {:format => :json} expect(JSON.parse(response.body)[0]).to eq nil end it "handles a null song day as expected" do get :on_this_day, null_song, {:format => :json} expect(JSON.parse(response.body)[0]).to eq nil end it "handles a null event day as expected" do get :on_this_day, null_event, {:format => :json} expect(JSON.parse(response.body)[0]).to eq nil end it "handles a null weather day as expected" do get :on_this_day, null_weather, {:format => :json} expect(JSON.parse(response.body)[0]).to eq nil end end describe "GET 'on_this_day_range'" do let(:event_range_params) do { start: "1/5/1986", end: "1/20/1986", type: "event" } end let(:song_range_params) do { start: "1/5/1986", end: "1/20/1986", type: "song" } end let(:movie_range_params) do { start: "1/5/1986", end: "1/20/1986", type: "movie" } end let(:weather_range_params) do { start: "6/1/1990", end: "7/1/1990", type: "weather" } end let(:null_event_range) do { start: "1/2/1986", end: "1/8/1986", type: "event" } end let(:null_song_range) do { start: "1/5/1986", end: "1/10/1986", type: "song" } end let(:null_movie_range) do { start: "1/6/1986", end: "1/11/1986", type: "movie" } end let(:null_weather_range) do { start: "1/6/1986", end: "1/11/1986", type: "weather" } end it "returns the expected events for a given range" do get :on_this_day_range, event_range_params expect(JSON.parse(response.body).length).to eq 6 end it "returns the expected songs for a given range" do get :on_this_day_range, song_range_params expect(JSON.parse(response.body).length).to eq 2 end it "returns the expected movies for a given range" do get :on_this_day_range, movie_range_params expect(JSON.parse(response.body).length).to eq 3 end it "returns the expected weather events for a given range" do get :on_this_day_range, weather_range_params expect(JSON.parse(response.body).length).to eq 11 end it "returns a null response for a range with no events" do get :on_this_day_range, null_event_range expect(JSON.parse(response.body)).to eq [] end it "returns a null response for a range with no songs" do get :on_this_day_range, null_song_range expect(JSON.parse(response.body)).to eq [] end it "returns a null response for a range with no events" do get :on_this_day_range, null_movie_range expect(JSON.parse(response.body)).to eq [] end it "returns a null response for a range with no events" do get :on_this_day_range, null_weather_range expect(JSON.parse(response.body)).to eq [] end end describe "GET 'conception_range'" do let(:params) do { birthday: "3/21/1991" } end let(:early_params) do { unusual: "true", birthday: "3/21/1991", number: 2, modifier: "early" } end let(:late_params) do { unusual: "true", birthday: "3/21/1991", number: 2, modifier: "late" } end it "returns a conception range for a normal birthday" do get :conception_range, params expect(JSON.parse(response.body)[0]).to eq "1990-06-25" expect(JSON.parse(response.body)[1]).to eq "1990-07-01" end it "returns a conception range for an early birthday" do get :conception_range, early_params expect(JSON.parse(response.body)[0]).to eq "1990-07-09" expect(JSON.parse(response.body)[1]).to eq "1990-07-15" end it "returns a conception range for a late birthday" do get :conception_range, late_params expect(JSON.parse(response.body)[0]).to eq "1990-06-11" expect(JSON.parse(response.body)[1]).to eq "1990-06-17" end end describe "GET 'get_youtube_id'" do let(:movie_params) do { type: "movie", title: "Lars and the Real Girl" } end let(:song_params) do { type: "song", title: "Your arms around me", artist: "Jens Lekman" } end it "returns correct video id for movie input" do get :get_youtube_id, movie_params expect(JSON.parse(response.body)[0]).to eq "XNcs9DrKYRU" end it "returns correct video id for movie input" do get :get_youtube_id, song_params expect(JSON.parse(response.body)[0]).to eq "NIwIAbcLFhI" end end describe "GET 'old_movie'" do it "gets a movie from before 1960" do get :get_old_movie, year: 1957, code: "19570000" expect(JSON.parse(response.body)[0][0]["title"]).to eq "The Bridge on the River Kwai" end end end
noglows/conception-calculator
spec/controllers/api_controller_spec.rb
Ruby
apache-2.0
6,855
package software.committed.rejux.impl; import software.committed.rejux.interfaces.State; public class AbstractState<S> implements State<S> { private final Class<S> stateClass; protected S state; public AbstractState(final Class<S> clazz, final S initial) { stateClass = clazz; state = initial; } @Override public Class<S> getType() { return stateClass; } protected boolean setState(final S newState) { if (newState != null && !newState.equals(state)) { state = newState; return true; } return false; } @Override public S state() { return state; } }
commitd/rejux
src/main/java/software/committed/rejux/impl/AbstractState.java
Java
apache-2.0
621
package se.ericsson.cf.scott.sandbox.whc.xtra.managers; import eu.scott.warehouse.lib.LoggingMqttCallback; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import se.ericsson.cf.scott.sandbox.whc.xtra.AdaptorHelper; import se.ericsson.cf.scott.sandbox.whc.xtra.WhcConfig; public class MqttManager { private final static Logger log = LoggerFactory.getLogger(MqttManager.class); private static MqttClient mqttClient; public static MqttClient initMqttClient() { try { final String mqttBroker = AdaptorHelper.p(WhcConfig.MQTT_TOPIC_PROP); mqttClient = new MqttClient(mqttBroker, AdaptorHelper.getMqttClientId()); final MqttConnectOptions mqttConnectOptions = new MqttConnectOptions(); mqttConnectOptions.setAutomaticReconnect(true); mqttClient.setCallback(new LoggingMqttCallback()); // TODO Andrew@2018-03-13: set highest QoS mqttClient.connect(mqttConnectOptions); return mqttClient; } catch (MqttException e) { log.error("MQTT connection failed", e); // TODO Andrew@2018-07-30: exception strategy throw new IllegalStateException(e); } } public static void disconnect() { if(mqttClient != null) { try { mqttClient.disconnect(); } catch (MqttException e) { log.warn("Unable to cleanly terminate the MQTT connection: ", e); } } } }
EricssonResearch/scott-eu
lyo-services/webapp-whc/src/main/java/se/ericsson/cf/scott/sandbox/whc/xtra/managers/MqttManager.java
Java
apache-2.0
1,668
// GENERATED FILE! // VERSION-INFO: com.valtech.source.ag.evf.codegen.Dispatcher // DATE/TIME-INFO: 17.05.04 09:36:44 package com.valtech.source.dependometer.app.controller.pack; import com.valtech.source.ag.evf.Dispatcher; import com.valtech.source.ag.evf.EventIf; final class DispatcherPackageCycleCollectedEvent extends Dispatcher { protected void dispatch(Object handler, EventIf event) { assert handler != null; assert event != null; ((HandlePackageCycleCollectedEventIf)handler) .handleEvent((com.valtech.source.dependometer.app.controller.pack.PackageCycleCollectedEvent)event); } }
dheraclio/dependometer
dependometer-core/src/main/java/com/valtech/source/dependometer/app/controller/pack/DispatcherPackageCycleCollectedEvent.java
Java
apache-2.0
648
<?php /** * Debug library * * @author Raymond van Velzen <raymond@latunyi.com> * @package LatunyiFramework **/ /** * pr * * Function to print anything in a convenient way * Will print any string/number/etc, array, or object. * * @param mixed $x Anything * @param string $descr Description to print above output, optional * @return void **/ function pr($x, $descr = '') { if (is_object($x)) { printobj($x, $descr); } elseif(is_array($x)) { printarr($x, $descr); } else { if (is_string($x) && substr($x, 0, 5) == '<?xml') { printxml($x, $descr); } else { printbr($x, $descr); } } } /** * prd * * Print something (d)irectly to the screen, using pr(). * Will flush (not destroy) output buffer. * * @param mixed $x Anything * @return void **/ function prd($x, $descr = '') { pr($x, $descr); ob_flush(); flush(); } /** * prx * * Print something using pr() and exit. * * @param mixed $x Anything * @param string $descr Description to print above output, optional * @return void **/ function prx($x, $descr = '') { pr($x, $descr); exit; } /** * printarr * * Print an array * * @param mixed $a Any array * @param string $descr Description to print above output, optional * @return void **/ function printarr($a, $descr = '') { if (!is_array($a)) { ob_start(); if (!empty($descr)) { printbr('<b>' . $descr . '</b>'); } var_dump($a); $str = ob_get_clean(); printNicely($str); } else { ob_start(); print '<pre>'; if (!empty($descr)) { print('<b>' . $descr . '</b>' . "\n"); } print_r($a); print '</pre>'; $str = ob_get_clean(); printNicely($str); } } /** * printobj * * Print an object * * @param mixed $a Any object * @param string $descr Description to print above output, optional * @return void **/ function printobj($a, $descr = '') { ob_start(); print '<pre>'; if (!empty($descr)) { print('<b>' . $descr . '</b>' . "\n"); } print_r($a); print '</pre>'; $str = ob_get_clean(); printNicely($str); } /** * printbr * * Print something with a HTML line break. Obsolete; use pr() instead. * * @param string $text Any array * @param string $descr Description to print before output, optional * @return void **/ function printbr ($text = '', $descr = '') { printNicely($descr . ' ' . $text); } // ----------------------------------------------------------------------- /** * printNicely * * Print some text in a box with fixed-width font * * @param string $text Any text * @return void **/ function printNicely($str) { print '<div style="font: normal 13px Courier New !important; border: 1px solid #CCC; background-color: #EEE; padding: 10px; margin: 10px; text-align: left !important;">'; print '<pre>' . trim($str) . '</pre>'; print '</div>'; } // ----------------------------------------------------------------------- /** * prg * * Print GET array * * @return void **/ function prg() { printarr($_GET, '$_GET:'); } /** * prg * * Print GET array and exit * * @return void **/ function prgx() { prg(); exit; } // ----------------------------------------------------------------------- /** * prp * * Print POST array * * @return void **/ function prp() { printarr($_POST, '$_POST:'); } /** * prpx * * Print POST array and exit * * @return void **/ function prpx() { prp(); exit; } // ----------------------------------------------------------------------- /** * prs * * Print SESSION * * @return void **/ function prs($key = null) { if (isset($key)) { if (isset($_SESSION[$key])) { pr($_SESSION[$key]); } else { pr('Key "' . $key . '" not found in SESSION object.'); } } else { printarr($_SESSION, '$_SESSION:'); } } /** * prsx * * Print SESSION and exit * * @return void **/ function prsx($key = null) { prs($key); exit; } // ----------------------------------------------------------------------- /** * prc * * Print COOKIE * * @return void **/ function prc() { printarr($_COOKIE); } /** * prcx * * Print COOKIE and exit * * @return void **/ function prcx() { prc(); exit; } // ----------------------------------------------------------------------- /** * prsvr * * Print SERVER * * @return void **/ function prsvr() { printarr($_SERVER, '$_SERVER:'); } /** * prsvrx * * Print SERVER and exit * * @return void **/ function prsvrx() { prsvr(); exit; } // ----------------------------------------------------------------------- /** * prf * * Print $_FILES array * * @return void **/ function prf() { printarr($_FILES, '$_FILES:'); } /** * prfx * * Print $_FILES and exit * * @return void **/ function prfx() { prf(); exit; } // ----------------------------------------------------------------------- /** * Print ISO date from timestamp * @param int $ts */ function prdt($ts) { pr(date('Y-m-d', $ts)); } /** * Print ISO date from timestamp and exit * @param int $ts */ function prdtx($ts) { prdt($ts); exit; } /** * prv * * Print var_dump * * @param mixed $x Some variable * @return void **/ function prv($x, $descr = '') { ob_start(); if (!empty($descr)) { print('<b>' . $descr . "</b>\n"); } var_dump($x); $str = ob_get_clean(); printNicely($str); } /** * prvx * * Print var_dump and exit * * @param mixed $x Some variable * @return void **/ function prvx($x, $descr = '') { prv($x, $descr); exit; } /** * printxml * * Prints XML. * * @param mixed $xml Some XML variable * @return void **/ function printxml($xml, $descr = '') { pr(nl2br(htmlentities($xml)), $descr); } // ----------------------------------------------------------------------- /** * getExecutionTime * * Calculates the execution time of a script, depending on start time. * * @param float $start Start time, as float * @return void **/ function GetExecutionTime($start) { //return sprintf('%1.6f', (microtime(true) - $start)) . ' sec'; return (microtime(true) - $start) . ' sec'; } /** * GetMemory * * Reports the amount of memory used by PHP, wrapper * on memory_get_usage(). Returns warning if memory_get_usage * function doesn't exist. * * @param string $unit Measure unit: All (default), B, KB, MB * @return string 'x B', 'x KB', 'x MB'; **/ function GetMemory($unit = 'All') { if (!function_exists('memory_get_usage')) { return 'Can\'t report memory usage; memory_get_usage is not available.'; } else { $bytes = memory_get_usage(true); switch ($unit) { case 'All': $kb = number_format( ($bytes / 1024), 2) . ' KB'; $mb = number_format( (($bytes / 1024) / 1024), 2) . ' MB'; return $bytes . ' bytes, ' . $kb . ', ' . $mb; break; case 'B': return $bytes . ' bytes'; break; case 'KB': return number_format( ($bytes / 1024), 2) . ' KB'; break; case 'MB': return number_format( (($bytes / 1024) / 1024), 2) . ' MB'; break; } } } /** * Print GetMemory() */ function prmem() { pr(GetMemory()); } /** * Print GetMemory() and exit */ function prmemx() { prx(GetMemory()); } /** * Executes a var_dump and strips tags. * * @param mixed $x * @return string */ function vardump($x) { ob_start(); var_dump($x); $str = ob_get_clean(); return strip_tags($str); } /** * texc * * Throw an exception with the given message. * * @param string $x Message string * @return void **/ function texc($x) { throw new \Exception('Debug: ' . $x); } /** * show_errors * * Sets error reporting to E_ALL and display_errors to 1. * * @return void **/ function show_errors() { error_reporting(E_ALL); ini_set('display_errors', '1'); } /** * prn * * Prints something followed by a newline. * * @param string $x Something to print * @return void **/ function prn($x) { if (is_array($x) || is_object($x)) { print_r($x); } else { print $x . "\n"; } } /** * prnx * * Prints something followed by a newline, then exits. * * @param string $s Something to print * @return void **/ function prnx($s) { prn($s); exit; }
unimatrix02/latunyi-framework
system/includes/debug.inc.php
PHP
apache-2.0
8,463
package memap.helper.milp; import java.util.Arrays; import lpsolve.LpSolve; import lpsolve.LpSolveException; import memap.controller.TopologyController; import memap.helper.configurationOptions.OptimizationCriteria; import memap.helper.configurationOptions.ToolUsage; import memap.messages.BuildingMessage; import memap.messages.extension.NetworkType; import memap.messages.planning.CouplerMessage; import memap.messages.planning.DemandMessage; import memap.messages.planning.ProducerMessage; import memap.messages.planning.StorageMessage; import memap.messages.planning.VolatileProducerMessage; /** * This class is responsible to modify the problem of the MILP solver. It is * very similar to MILPProblemWithConnections. It would be good to merge these 2 * classes, but this is left for later. Be careful, when modifying those two * classes. */ public class MILPProblemNoConnections extends MILPProblem { public MILPProblemNoConnections(TopologyController topologyController, int currentTimeStep, int nStepsMPC, int nCols) { super(topologyController, currentTimeStep, nStepsMPC, nCols); } public LpSolve createNames(LpSolve problem, BuildingMessage buildingMessage) throws LpSolveException { MILPIndexHelper mihelper = new MILPIndexHelper(nStepsMPC); for (ProducerMessage producerMessage : buildingMessage.controllableProducerList) { MILPHelper.addControllableToProblem(producerMessage, problem, mihelper); mihelper.controllableHandled++; } for (ProducerMessage producerMessage : buildingMessage.volatileProducerList) { MILPHelper.addVolatileToProblem(producerMessage, problem, mihelper); mihelper.volatileHandled++; } for (CouplerMessage couplerMessage : buildingMessage.couplerList) { MILPHelper.addCouplerToProblem(couplerMessage, problem, mihelper); mihelper.couplerHandled++; } for (StorageMessage storageMessage : buildingMessage.storageList) { MILPHelper.addStorageToProblem(storageMessage, problem, mihelper); mihelper.storageHandled++; } MILPHelper.addMarkets(problem, mihelper); updateLambdaEURbuilding(buildingMessage, 0); updateLambdaEURMarket(); updateLambdaCO2building(buildingMessage, 0); updateLambdaCO2Market(); return problem; } public LpSolve createDemandConstraints(LpSolve problem, BuildingMessage buildingMessage) throws LpSolveException { double[] demand = buildingMessage.getCombinedDemandVector(); /* first HEAT components */ for (int i = 0; i < demand.length / 2; i++) { int controllableHandled = 0; int volatileHandled = 0; int couplerHandled = 0; int storageHandled = 0; double[] rowHEAT = new double[nCols + 1]; for (ProducerMessage pm : buildingMessage.controllableProducerList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (pm.networkType == NetworkType.HEAT) rowHEAT[index] = -pm.efficiency; controllableHandled++; } for (ProducerMessage pm : buildingMessage.volatileProducerList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (pm.networkType == NetworkType.HEAT) rowHEAT[index] = -1; volatileHandled++; } for (CouplerMessage cm : buildingMessage.couplerList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); rowHEAT[index] = -cm.efficiencyHeat; couplerHandled++; } for (StorageMessage sm : buildingMessage.storageList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (sm.networkType == NetworkType.HEAT) { rowHEAT[index] = -1; rowHEAT[index + nStepsMPC] = 1; } storageHandled++; } problem.addConstraint(rowHEAT, LpSolve.EQ, demand[i]); // System.out.println("Adding Constraint at i = " + i + ", --> rowHEAT: " + Arrays.toString(rowHEAT) // + " EQ: " + demand[i]); } /* second ELEC componentes */ int countTimeStep = 0; for (int i = demand.length / 2; i < demand.length; i++) { int controllableHandled = 0; int volatileHandled = 0; int couplerHandled = 0; int storageHandled = 0; double[] rowELEC = new double[nCols + 1]; for (ProducerMessage pm : buildingMessage.controllableProducerList) { int index = countTimeStep + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (pm.networkType == NetworkType.ELECTRICITY) rowELEC[index] = -pm.efficiency; controllableHandled++; } for (ProducerMessage pm : buildingMessage.volatileProducerList) { int index = countTimeStep + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (pm.networkType == NetworkType.ELECTRICITY) rowELEC[index] = -1; volatileHandled++; } for (CouplerMessage cm : buildingMessage.couplerList) { int index = countTimeStep + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); rowELEC[index] = -cm.efficiencyElec; couplerHandled++; } for (StorageMessage sm : buildingMessage.storageList) { int index = countTimeStep + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (sm.networkType == NetworkType.ELECTRICITY) { rowELEC[index] = -1; rowELEC[index + nStepsMPC] = 1; } storageHandled++; } // ADD Markets (at last position) int index = countTimeStep + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); rowELEC[index] = -1; rowELEC[index + nStepsMPC] = 1; problem.addConstraint(rowELEC, LpSolve.EQ, demand[i]); // System.out.println("Adding markets --> rowELEC: " + Arrays.toString(rowELEC) // + " EQ: " + demand[i]); countTimeStep++; } return problem; } public LpSolve createComponentBoundaries(LpSolve problem, BuildingMessage buildingMessage) throws LpSolveException { int cts = currentTimeStep; double[] networkBuyCap = new double[nStepsMPC]; Arrays.fill(networkBuyCap, 99999.0); // fill with 99999 kWh -> (no) upper limit. for (int i = 0; i < nStepsMPC; i++) { MILPIndexHelper ih = new MILPIndexHelper(nStepsMPC); for (ProducerMessage pm : buildingMessage.controllableProducerList) { int addon = nStepsMPC * ((ih.controllableHandled * 2) + ih.volatileHandled + (ih.couplerHandled * 2) + (ih.storageHandled * 2)); int index = i + 1 + addon; // MAX Constraints, for both el. and heat double[] row = new double[nCols + 1]; row[index] = pm.efficiency; row[index + nStepsMPC] = pm.maxPower; problem.addConstraint(row, LpSolve.LE, pm.maxPower); // System.out.println("Adding max constraint CP --> row: " + // Arrays.toString(row) + " <= " + pm.maxPower); // MIN Constraints, for both el. and heat double[] row2 = new double[nCols + 1]; row2[index] = -pm.efficiency; row2[index + nStepsMPC] = -pm.minPower; problem.addConstraint(row2, LpSolve.LE, -pm.minPower); // System.out.println("Adding min constraint CP --> row2: " + // Arrays.toString(row2) + " <= -" + pm.minPower); ih.controllableHandled++; } for (ProducerMessage pm : buildingMessage.volatileProducerList) { int index = i + 1 + nStepsMPC * ((ih.controllableHandled * 2) + ih.volatileHandled + (ih.couplerHandled * 2) + (ih.storageHandled * 2)); VolatileProducerMessage vpm = (VolatileProducerMessage) pm; // MAX Constraints, for both el. and heat double[] row = new double[nCols + 1]; row[index] = 1; problem.addConstraint(row, LpSolve.LE, vpm.forecast[i]); // System.out.println("Adding max constraint VP --> row: " + // Arrays.toString(row) + " <= " + vpm.forecast[i]); ih.volatileHandled++; } for (CouplerMessage cm : buildingMessage.couplerList) { int index = i + 1 + nStepsMPC * ((ih.controllableHandled * 2) + ih.volatileHandled + (ih.couplerHandled * 2) + (ih.storageHandled * 2)); // MAX Constraints for heat double[] row = new double[nCols + 1]; row[index] = cm.efficiencyHeat; row[index + nStepsMPC] = cm.maxPower; problem.addConstraint(row, LpSolve.LE, cm.maxPower); // System.out.println("Adding max constraint coupler --> row: " + // Arrays.toString(row) + " <= " + cm.installedPower); // MIN Constraints for heat double[] row2 = new double[nCols + 1]; row2[index] = -cm.efficiencyHeat; row2[index + nStepsMPC] = -cm.minPower; problem.addConstraint(row2, LpSolve.LE, -cm.minPower); // System.out.println("Adding min constraint coupler --> row3: " + // Arrays.toString(row2) + " <= -" + cm.minPower); ih.couplerHandled++; } for (StorageMessage sm : buildingMessage.storageList) { int index = i + 1 + nStepsMPC * ((ih.controllableHandled * 2) + ih.volatileHandled + (ih.couplerHandled * 2) + (ih.storageHandled * 2)); // MAX Constraints, for both el. and heat double[] row = new double[nCols + 1]; row[index] = 1; problem.addConstraint(row, LpSolve.LE, sm.maxDischarge); // System.out.println("Adding max discharging ST --> row: " + // Arrays.toString(row) + " <= " + sm.maxDischarge); // MIN Constraints, for both el. and heat double[] row2 = new double[nCols + 1]; row2[index + nStepsMPC] = 1; problem.addConstraint(row2, LpSolve.LE, sm.maxLoad); // System.out.println("Adding max charging --> row2: " + Arrays.toString(row2) + // " <= " + sm.maxLoad); ih.storageHandled++; } // Market Constraints: // takes the minimum Electricity Buy Cap from all houses to be overtaken by Memap // and multiplies it by the time-dependent function for the MaxBuyLimit, if defined in the global simulation settings. int index = i + 1 + nStepsMPC * ((ih.controllableHandled * 2) + ih.volatileHandled + (ih.couplerHandled * 2) + (ih.storageHandled * 2)); double[] rowN = new double[nCols + 1]; for (DemandMessage dm : buildingMessage.demandList) { if ((dm.networkType == NetworkType.ELECTRICITY) && (dm.varNetworkBuyCap != null)) { if ( (dm.varNetworkBuyCap[i] < networkBuyCap[i])) { networkBuyCap[i] = dm.varNetworkBuyCap[i]*energyPrices.getMaxBuyLimit(cts + i); if (energyPrices.getMaxBuyLimit(cts + i) == 0) networkBuyCap[i] = 9999.0; } } // if (dm.networkType == NetworkType.ELECTRICITY && i == 0) System.out.println("ElecBuy for " + buildingMessage.name + " limited to " + networkBuyCap[0]); } rowN[index] = 1; problem.addConstraint(rowN, LpSolve.LE, networkBuyCap[i]); // System.out.println(Arrays.toString(rowN) + ": ElecBuy for " + buildingMessage.name + " limited to " + networkBuyCap[i] + " at index " + index ); } return problem; } public LpSolve createSOCBoundaries(LpSolve problem, BuildingMessage buildingMessage) throws LpSolveException { int controllableHandled = buildingMessage.getNrOfControllableProducers(); int volatileHandled = buildingMessage.getNrOfVolatileProducers(); int couplerHandled = buildingMessage.getNrOfCouplers(); int storageHandled = 0; double delta_time_factor = 24.0 / topologyConfig.getTimeStepsPerDay(); // = 0.25 fuer 96 Schritte /Tag for (StorageMessage sm : buildingMessage.storageList) { double SOC_perc = sm.stateOfCharge; double standbyLosses = sm.storageLosses; // check and enforce that SOC is between 0 and 1, due to numerical issues. // if (SOC_perc >= 1) { // SOC_perc = 1; // } // if (SOC_perc <= 0) { // SOC_perc = 0; // } // New for SOC within 0 and 1 and standby loss consideration: // helper parameters, only depend on time step length and storage parameters double alpha = 1 - standbyLosses * delta_time_factor; // Units [-] double beta_to = delta_time_factor/sm.capacity * sm.efficiencyCharge; // Units [h/kWh] double beta_fm = delta_time_factor/(sm.capacity * sm.efficiencyDischarge); // Units [h/kWh] // create new SOC constraints. They are not based on energy/capacity but solely on SOC, i.e. between 0 and 1 int index = 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled*2)+ (storageHandled*2) ); for (int i = 0; i < nStepsMPC; i++) { double[] rowCHARGE = new double[nCols+1]; double[] rowDISCHARGE = new double[nCols+1]; for (int k = 0; k <= i; k++) { // First add the factor for the discharge decision variable (x_fm) rowCHARGE[index + k] = - beta_fm * Math.pow(alpha, i-k); rowDISCHARGE[index + k] = beta_fm * Math.pow(alpha, i-k); // Now add the factor for the charging decision variable (x_to): rowCHARGE[index + k + nStepsMPC] = beta_to * Math.pow(alpha, i-k); rowDISCHARGE[index + k + nStepsMPC] = - beta_to * Math.pow(alpha, i-k); } // Add the factor vectors to the problem as constraint: problem.addConstraint(rowCHARGE, LpSolve.LE, (1-(SOC_perc * Math.pow(alpha, i+1)))); // for the last timestep in the horizon, the discharge limit is set to be 10% if (i == (nStepsMPC-1) && (Double) sm.minimumSOC != null) { problem.addConstraint(rowDISCHARGE, LpSolve.LE, ((SOC_perc * Math.pow(alpha, i+1)) - sm.minimumSOC)); } else { problem.addConstraint(rowDISCHARGE, LpSolve.LE, (SOC_perc * Math.pow(alpha, i+1))); } } storageHandled++; } return problem; } public LpSolve createObjectiveFunction(LpSolve problem, BuildingMessage buildingMessage) throws LpSolveException { int[] colno = new int[nCols]; double[] row = new double[nCols]; int cts = currentTimeStep; int counter = 0; for (int i = 0; i < nStepsMPC; i++) { int controllableHandled = 0; int volatileHandled = 0; int couplerHandled = 0; int storageHandled = 0; for (ProducerMessage pm : buildingMessage.controllableProducerList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); colno[counter] = index; if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { row[counter++] = pm.operationalCostEUR; // System.out.println("CP-costs " + pm.operationalCostEUR + " written at counter " + (counter-1)); } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { row[counter++] = pm.operationalCostCO2; } if (topologyController.getToolUsage() == ToolUsage.SERVER && pm.varOperationalCostEUR != null) { // This part overwrites the previous costs if above condition is given counter--; // TODO: solve better if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { row[counter++] = pm.varOperationalCostEUR[i]; // System.out.println("CP-costs " + pm.varOperationalCostEUR[i] + " overwritten at counter " + (counter-1)); } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { row[counter++] = pm.varOperationalCostEUR[i]; } } controllableHandled++; } for (ProducerMessage pm : buildingMessage.volatileProducerList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); colno[counter] = index; if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { row[counter++] = pm.operationalCostEUR; } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { row[counter++] = pm.operationalCostCO2; } volatileHandled++; } for (CouplerMessage cm : buildingMessage.couplerList) { int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); colno[counter] = index; if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { row[counter++] = cm.operationalCostEUR; } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { row[counter++] = cm.operationalCostCO2; } if (topologyController.getToolUsage() == ToolUsage.SERVER && cm.varOperationalCostEUR != null) { // This part overwrites the previous costs if above condition is given counter = counter -1; // TODO: solve better if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { row[counter++] = cm.varOperationalCostEUR[i]; } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { row[counter++] = cm.varOperationalCostEUR[i]; } } couplerHandled++; } for (StorageMessage sm : buildingMessage.storageList) { double chargingCosts = 0; double dischargingCosts = 0; if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { dischargingCosts = sm.operationalCostEUR; chargingCosts = sm.operationalCostEUR; } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { dischargingCosts = sm.operationalCostCO2; chargingCosts = -0.0001; // TODO hardcoded heuristic as discussed in Github } int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); colno[counter] = index; row[counter++] = dischargingCosts; // x_fm colno[counter] = index + nStepsMPC; row[counter++] = chargingCosts; // x_to storageHandled++; } // Check which House has the lowest buy-price for electricity: double[] bestBuyPrice = new double[nStepsMPC]; Arrays.fill(bestBuyPrice, 100.0); // fill with 100 €/kWh double[] bestSellPrice = new double[nStepsMPC]; Arrays.fill(bestSellPrice, 0.0); // fill with 0 €/kWh for (DemandMessage dm : buildingMessage.demandList) { if (dm.networkType == NetworkType.ELECTRICITY) { if (dm.varNetworkBuyCostEUR != null && dm.varNetworkBuyCostEUR[0] < bestBuyPrice[0]) bestBuyPrice = dm.varNetworkBuyCostEUR; if (dm.varNetworkSellCostEUR != null && dm.varNetworkSellCostEUR[0] > bestSellPrice[0]) bestSellPrice = dm.varNetworkSellCostEUR; } } // buy int index = i + 1 + nStepsMPC * ((controllableHandled * 2) + volatileHandled + (couplerHandled * 2) + (storageHandled * 2)); if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { // buy colno[counter] = index; row[counter++] = energyPrices.getElecBuyingPrice(cts+i); // sell colno[counter] = index+nStepsMPC; row[counter++] = -energyPrices.getElecSellingPrice(cts+i); } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { // buy colno[counter] = index; row[counter++] = energyPrices.getCO2EmissionFactor(cts+i); // sell, no compensation for selling colno[counter] = index+nStepsMPC; row[counter++] = 0; } if (topologyController.getToolUsage() == ToolUsage.SERVER) { // This part overwrites the previous costs if above condition is given if (topologyController.getOptimizationCriteria() == OptimizationCriteria.EUR) { // TODO: Better solution for this to avoid double code for server / planning counter = counter - 2; // buy colno[counter] = index; row[counter++] = bestBuyPrice[i]; // sell colno[counter] = index+nStepsMPC; row[counter++] = -bestSellPrice[i]; } if (topologyController.getOptimizationCriteria() == OptimizationCriteria.CO2) { // buy colno[counter] = index; row[counter++] = energyPrices.getCO2EmissionFactor(cts+i); // sell, no compensation for selling colno[counter] = index+nStepsMPC; row[counter++] = 0; } } } /* set the objective in lpsolve */ problem.setObjFnex(counter, row, colno); /* set the object direction to minimize */ problem.setMinim(); return problem; } }
SES-fortiss/SmartGridCoSimulation
projects/memapCore/src/main/java/memap/helper/milp/MILPProblemNoConnections.java
Java
apache-2.0
21,486
using System.Collections.Generic; namespace SuperMap.Web.iServerJava2 { /// <summary> /// <para>${iServer2_sqlParam_Title}</para> /// <para>${iServer2_sqlParam_Description}</para> /// </summary> public class SqlParam { /// <summary>${iServer2_sqlParam_constructor_None_D}</summary> public SqlParam() { } /// <summary>${iServer2_sqlParam_attribute_ids_D}</summary> public IList<int> IDs { get; set; } /// <summary>${iServer2_sqlParam_attribute_returnFields_D}</summary> public IList<string> ReturnFields { get; set; } /// <summary>${iServer2_sqlParam_attribute_sortClause_D}</summary> public string SortClause { get; set; } /// <summary>${iServer2_sqlParam_attribute_whereClause_D}</summary> public string WhereClause { get; set; } /// <summary>${iServer2_sqlParam_attribute_groupClause_D}</summary> public string GroupClause { get; set; } internal static string ToJson(SqlParam sqlParam) { if (sqlParam == null) { return null; } //if (mapName.Contains("@")) //{ // string str = mapName.Substring(0, mapName.IndexOf("@")); //} string json = "{"; List<string> list = new List<string>(); if (!string.IsNullOrEmpty(sqlParam.WhereClause)) { list.Add(string.Format("\"whereClause\":\"{0}\"", Bridge.EncodeString(sqlParam.WhereClause))); //if (!sqlParam.WhereClause.Contains(".") && mapName.Contains("@")) //{ // string str = mapName.Substring(0, mapName.IndexOf("@")); // //list.Add(string.Format("\"whereClause\":\"{0}.{1}\"", str, Bridge.EncodeString(sqlParam.WhereClause))); // list.Add(string.Format("\"whereClause\":\"{0}\"", Bridge.EncodeString(sqlParam.WhereClause))); //} //else //{ // list.Add(string.Format("\"whereClause\":\"{0}\"", Bridge.EncodeString(sqlParam.WhereClause))); //} } if (!string.IsNullOrEmpty(sqlParam.SortClause)) { list.Add(string.Format("\"sortClause\":\"{0}\"", Bridge.EncodeString(sqlParam.SortClause))); //if (!sqlParam.SortClause.Contains(".") && mapName.Contains("@")) //{ // string str = mapName.Substring(0, mapName.IndexOf("@")); // list.Add(string.Format("\"sortClause\":\"{0}.{1}\"", str, Bridge.EncodeString(sqlParam.SortClause))); //} //else //{ // list.Add(string.Format("\"sortClause\":\"{0}\"", Bridge.EncodeString(sqlParam.SortClause))); //} } if (!string.IsNullOrEmpty(sqlParam.GroupClause)) { list.Add(string.Format("\"groupClause\":\"{0}\"", Bridge.EncodeString(sqlParam.GroupClause))); //if (!sqlParam.GroupClause.Contains(".") && mapName.Contains("@")) //{ // string str = mapName.Substring(0, mapName.IndexOf("@")); // list.Add(string.Format("\"groupClause\":\"{0}.{1}\"", str, Bridge.EncodeString(sqlParam.GroupClause))); //} //else //{ // list.Add(string.Format("\"groupClause\":\"{0}\"", Bridge.EncodeString(sqlParam.GroupClause))); //} } // if (sqlParam.ReturnFields != null && sqlParam.ReturnFields.Count > 0) { List<string> temp = new List<string>(); for (int i = 0; i < sqlParam.ReturnFields.Count; i++) { temp.Add(string.Format("\"{0}\"", sqlParam.ReturnFields[i])); } list.Add(string.Format("\"returnFields\":[{0}]", string.Join(",", temp.ToArray()))); } if (sqlParam.IDs != null && sqlParam.IDs.Count > 0) { List<string> temp = new List<string>(); foreach (int id in sqlParam.IDs) { temp.Add(id.ToString()); } list.Add(string.Format("\"ids\":[{0}]", string.Join(",", temp.ToArray()))); } json += string.Join(",", list.ToArray()); json += "}"; return json; } } }
SuperMap/iClient-for-Silverlight
SuperMap.Web.iServerJava2/Query/SqlParam.cs
C#
apache-2.0
4,581
/** * Package: MAG - VistA Imaging WARNING: Per VHA Directive 2004-038, this routine should not be modified. Date Created: Apr, 2010 Site Name: Washington OI Field Office, Silver Spring, MD Developer: vhaiswlouthj Description: DICOM Study cache manager. Maintains the cache of study instances and expires old studies after 15 minutes. ;; +--------------------------------------------------------------------+ ;; Property of the US Government. ;; No permission to copy or redistribute this software is given. ;; Use of unreleased versions of this software requires the user ;; to execute a written test agreement with the VistA Imaging ;; Development Office of the Department of Veterans Affairs, ;; telephone (301) 734-0100. ;; ;; The Food and Drug Administration classifies this software as ;; a Class II medical device. As such, it may not be changed ;; in any way. Modifications to this software may result in an ;; adulterated medical device under 21CFR820, the use of which ;; is considered to be a violation of US Federal Statutes. ;; +--------------------------------------------------------------------+ */ package gov.va.med.imaging.core.router.commands.storage.datasource; import gov.va.med.imaging.core.interfaces.exceptions.ConnectionException; import gov.va.med.imaging.core.interfaces.exceptions.MethodException; import gov.va.med.imaging.core.router.commands.storage.AbstractStorageDataSourceCommandImpl; import gov.va.med.imaging.datasource.StorageDataSourceSpi; import gov.va.med.imaging.exchange.business.storage.ArtifactRetentionPolicy; public class PostArtifactRetentionPolicyCommandImpl extends AbstractStorageDataSourceCommandImpl<ArtifactRetentionPolicy> { private static final long serialVersionUID = 1L; private static final String SPI_METHOD_NAME = "createArtifactRetentionPolicy"; private final ArtifactRetentionPolicy artifactRetentionPolicy; public PostArtifactRetentionPolicyCommandImpl(ArtifactRetentionPolicy artifactRetentionPolicy) { super(); this.artifactRetentionPolicy = artifactRetentionPolicy; } @Override protected Class<?>[] getSpiMethodParameterTypes() { return new Class<?>[]{ArtifactRetentionPolicy.class}; } @Override protected Object[] getSpiMethodParameters() { return new Object[]{artifactRetentionPolicy} ; } @Override protected String parameterToString() { // TODO Auto-generated method stub return null; } /* (non-Javadoc) * @see gov.va.med.imaging.core.router.AbstractDataSourceCommandImpl#getCommandResult(gov.va.med.imaging.datasource.VersionableDataSourceSpi) */ @Override protected ArtifactRetentionPolicy getCommandResult(StorageDataSourceSpi spi) throws ConnectionException, MethodException { return spi.createArtifactRetentionPolicy(artifactRetentionPolicy); } /* (non-Javadoc) * @see gov.va.med.imaging.core.router.AbstractDataSourceCommandImpl#getSpiMethodName() */ @Override protected String getSpiMethodName() { return SPI_METHOD_NAME; } }
VHAINNOVATIONS/Telepathology
Source/Java/CoreRouter/main/src/java/gov/va/med/imaging/core/router/commands/storage/datasource/PostArtifactRetentionPolicyCommandImpl.java
Java
apache-2.0
3,120
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.referencing.operation; import java.util.Map; import java.util.HashMap; import java.util.Collection; import javax.xml.bind.annotation.XmlTransient; import javax.measure.Unit; import org.opengis.metadata.Identifier; import org.opengis.metadata.quality.PositionalAccuracy; import org.opengis.util.InternationalString; import org.opengis.parameter.ParameterValue; import org.opengis.parameter.ParameterValueGroup; import org.opengis.parameter.ParameterDescriptor; import org.opengis.parameter.GeneralParameterValue; import org.opengis.parameter.GeneralParameterDescriptor; import org.opengis.referencing.operation.OperationMethod; import org.opengis.referencing.operation.SingleOperation; import org.apache.sis.internal.metadata.ReferencingServices; import org.apache.sis.internal.referencing.SignReversalComment; import org.apache.sis.internal.referencing.provider.AbstractProvider; import org.apache.sis.metadata.iso.ImmutableIdentifier; import org.apache.sis.util.collection.Containers; import org.apache.sis.util.Deprecable; /** * Description of the inverse of another method. This class should be used only when no operation is defined * for the inverse, or when the inverse operation can not be represented by inverting the sign of parameters. * * @author Martin Desruisseaux (Geomatys) * @version 0.8 * @since 0.7 * @module */ @XmlTransient final class InverseOperationMethod extends DefaultOperationMethod { /** * For cross-version compatibility. */ private static final long serialVersionUID = 6395008927817202180L; /** * The original operation method for which this {@code InverseOperationMethod} is the inverse. */ private final OperationMethod inverse; /** * Creates the inverse of the given method. */ private InverseOperationMethod(final Map<String,?> properties, final OperationMethod method) { super(properties, method.getTargetDimensions(), method.getSourceDimensions(), method.getParameters()); inverse = method; } /** * Returns {@code true} if the given method flags itself as invertible. */ private static boolean isInvertible(final OperationMethod method) { return method instanceof AbstractProvider && ((AbstractProvider) method).isInvertible(); } /** * Returns or create the inverse of the given operation method. If the same operation method can be used * for the inverse operation either with the exact same parameter values or with the sign of some values * reversed, then the given method is returned as-is. Otherwise a synthetic method is created. */ static OperationMethod create(final OperationMethod method) { if (method instanceof InverseOperationMethod) { return ((InverseOperationMethod) method).inverse; } if (!isInvertible(method)) { boolean useSameParameters = false; for (final GeneralParameterDescriptor descriptor : method.getParameters().descriptors()) { useSameParameters = (descriptor.getRemarks() instanceof SignReversalComment); if (!useSameParameters) break; } if (!useSameParameters) { Identifier name = method.getName(); name = new ImmutableIdentifier(null, null, "Inverse of " + name.getCode()); final Map<String,Object> properties = new HashMap<>(6); properties.put(NAME_KEY, name); properties.put(FORMULA_KEY, method.getFormula()); properties.put(REMARKS_KEY, method.getRemarks()); if (method instanceof Deprecable) { properties.put(DEPRECATED_KEY, ((Deprecable) method).isDeprecated()); } return new InverseOperationMethod(properties, method); } } return method; } /** * Infers the properties to give to an inverse coordinate operation. * The returned map will contains three kind of information: * * <ul> * <li>Metadata (domain of validity, accuracy)</li> * <li>Parameter values, if possible</li> * </ul> * * This method copies accuracy and domain of validity metadata from the given operation. * We presume that the inverse operation has the same accuracy than the direct operation. * * <div class="note"><b>Note:</b> * in many cases, the inverse operation is numerically less accurate than the direct operation because it * uses approximations like series expansions or iterative methods. However the numerical errors caused by * those approximations are not of interest here, because they are usually much smaller than the inaccuracy * due to the stochastic nature of coordinate transformations (not to be confused with coordinate conversions; * see ISO 19111 for more information).</div> * * If the inverse of the given operation can be represented by inverting the sign of all numerical * parameter values, then this method copies also those parameters in a {@code "parameters"} entry. * * @param source the operation for which to get the inverse parameters. * @param target where to store the inverse parameters. */ static void properties(final SingleOperation source, final Map<String,Object> target) { target.put(SingleOperation.DOMAIN_OF_VALIDITY_KEY, source.getDomainOfValidity()); final Collection<PositionalAccuracy> accuracy = source.getCoordinateOperationAccuracy(); if (!Containers.isNullOrEmpty(accuracy)) { target.put(SingleOperation.COORDINATE_OPERATION_ACCURACY_KEY, accuracy.toArray(new PositionalAccuracy[accuracy.size()])); } /* * If the inverse of the given operation can be represented by inverting the sign of all numerical * parameter values, copies those parameters in a "parameters" entry in the properties map. * Otherwise does nothing. */ final ParameterValueGroup parameters = source.getParameterValues(); final ParameterValueGroup copy = parameters.getDescriptor().createValue(); for (final GeneralParameterValue gp : parameters.values()) { if (gp instanceof ParameterValue<?>) { final ParameterValue<?> src = (ParameterValue<?>) gp; final Object value = src.getValue(); if (value instanceof Number) { final ParameterDescriptor<?> descriptor = src.getDescriptor(); final InternationalString remarks = descriptor.getRemarks(); if (remarks != SignReversalComment.SAME) { if (remarks != SignReversalComment.OPPOSITE) { /* * The parameter descriptor does not specify whether the values for the inverse operation * have the same sign or opposite sign. We could heuristically presume that we can invert * the sign if the minimum value has the opposite sign than the maximum value (as in the * [-10 … 10] range), but such assumption is dangerous. For example the values in a matrix * could be bounded to a range like [-1 … 1], which would mislead above heuristic rule. * * Note that abandoning here does not mean that we will never know the parameter values. * As a fallback, AbstractCoordinateOperation will try to get the parameter values from * the MathTransform. This is the appropriate thing to do at least for Affine operation. */ return; } /* * The parameter value of the inverse operation is (or is presumed to be) the negative of * the parameter value of the source operation. We need to preserve units of measurement * if they were specified. */ final ParameterValue<?> tgt = copy.parameter(descriptor.getName().getCode()); final Unit<?> unit = src.getUnit(); if (unit != null) { tgt.setValue(-src.doubleValue(), unit); } else if (value instanceof Integer || value instanceof Short || value instanceof Byte) { tgt.setValue(-src.intValue()); } else { tgt.setValue(-src.doubleValue()); } // No need to add 'tgt' to 'copy' since it was done by the call to copy.parameter(…). continue; } } } copy.values().add(gp); } target.put(ReferencingServices.PARAMETERS_KEY, copy); } }
Geomatys/sis
core/sis-referencing/src/main/java/org/apache/sis/referencing/operation/InverseOperationMethod.java
Java
apache-2.0
9,914
import alt from './../../alt'; class AsyncActions { constructor() { this.generateActions( 'toggle' ); } } export default alt.createActions(AsyncActions);
Lesha-spr/greed
public/src/actions/async/async.actions.js
JavaScript
apache-2.0
191
import * as index from "./index"; describe("icon-button/index", () => { [ { name: "default", value: expect.any(Function) } ].forEach(({ name, value }) => { it(`exports ${name}`, () => { expect(index).toHaveProperty(name, value); }); }); });
Autodesk/hig
packages/icon-button/src/index.test.js
JavaScript
apache-2.0
282
package unit import ( "reflect" "testing" ) func TestDeserialize(t *testing.T) { contents := ` This=Ignored [Unit] ;ignore this guy Description = Foo [Service] ExecStart=echo "ping"; ExecStop=echo "pong" # ignore me, too ExecStop=echo post [Fleet] X-ConditionMachineMetadata=foo=bar X-ConditionMachineMetadata=baz=qux ` expected := map[string]map[string][]string{ "Unit": { "Description": {"Foo"}, }, "Service": { "ExecStart": {`echo "ping";`}, "ExecStop": {`echo "pong"`, "echo post"}, }, "Fleet": { "X-ConditionMachineMetadata": {"foo=bar", "baz=qux"}, }, } unitFile, err := NewUnit(contents) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", contents, err) } if !reflect.DeepEqual(expected, unitFile.Contents) { t.Fatalf("Map func did not produce expected output.\nActual=%v\nExpected=%v", unitFile.Contents, expected) } } func TestDeserializedUnitGarbage(t *testing.T) { contents := ` >>>>>>>>>>>>> [Service] ExecStart=jim # As long as a line has an equals sign, systemd is happy, so we should pass it through <<<<<<<<<<<=bar ` expected := map[string]map[string][]string{ "Service": { "ExecStart": {"jim"}, "<<<<<<<<<<<": {"bar"}, }, } unitFile, err := NewUnit(contents) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", contents, err) } if !reflect.DeepEqual(expected, unitFile.Contents) { t.Fatalf("Map func did not produce expected output.\nActual=%v\nExpected=%v", unitFile.Contents, expected) } } func TestDeserializeEscapedMultilines(t *testing.T) { contents := ` [Service] ExecStart=echo \ "pi\ ng" ExecStop=\ echo "po\ ng" # comments within continuation should not be ignored ExecStopPre=echo\ #pang ExecStopPost=echo\ #peng\ pung ` expected := map[string]map[string][]string{ "Service": { "ExecStart": {`echo "pi ng"`}, "ExecStop": {`echo "po ng"`}, "ExecStopPre": {`echo #pang`}, "ExecStopPost": {`echo #peng pung`}, }, } unitFile, err := NewUnit(contents) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", contents, err) } if !reflect.DeepEqual(expected, unitFile.Contents) { t.Fatalf("Map func did not produce expected output.\nActual=%v\nExpected=%v", unitFile.Contents, expected) } } func TestSerializeDeserialize(t *testing.T) { contents := ` [Unit] Description = Foo ` deserialized, err := NewUnit(contents) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", contents, err) } section := deserialized.Contents["Unit"] if val, ok := section["Description"]; !ok || val[0] != "Foo" { t.Errorf("Failed to persist data through serialize/deserialize: %v", val) } serialized := deserialized.String() deserialized, err = NewUnit(serialized) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", serialized, err) } section = deserialized.Contents["Unit"] if val, ok := section["Description"]; !ok || val[0] != "Foo" { t.Errorf("Failed to persist data through serialize/deserialize: %v", val) } } func TestDescription(t *testing.T) { contents := ` [Unit] Description = Foo [Service] ExecStart=echo "ping"; ExecStop=echo "pong"; ` unitFile, err := NewUnit(contents) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", contents, err) } if unitFile.Description() != "Foo" { t.Fatalf("Unit.Description is incorrect") } } func TestDescriptionNotDefined(t *testing.T) { contents := ` [Unit] [Service] ExecStart=echo "ping"; ExecStop=echo "pong"; ` unitFile, err := NewUnit(contents) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", contents, err) } if unitFile.Description() != "" { t.Fatalf("Unit.Description is incorrect") } } func TestNewSystemdUnitFileFromLegacyContents(t *testing.T) { legacy := map[string]map[string]string{ "Unit": { "Description": "foobar", }, "Service": { "Type": "oneshot", "ExecStart": "/usr/bin/echo bar", }, } expected := map[string]map[string][]string{ "Unit": { "Description": {"foobar"}, }, "Service": { "Type": {"oneshot"}, "ExecStart": {"/usr/bin/echo bar"}, }, } u, err := NewUnitFromLegacyContents(legacy) if err != nil { t.Fatalf("Unexpected error parsing unit %q: %v", legacy, err) } actual := u.Contents if !reflect.DeepEqual(actual, expected) { t.Fatalf("Map func did not produce expected output.\nActual=%v\nExpected=%v", actual, expected) } } func TestDeserializeLine(t *testing.T) { deserializeLineExamples := map[string][]string{ `key=foo=bar`: {`foo=bar`}, `key="foo=bar"`: {`foo=bar`}, `key="foo=bar" "baz=qux"`: {`foo=bar`, `baz=qux`}, `key="foo=bar baz"`: {`foo=bar baz`}, `key="foo=bar" baz`: {`"foo=bar" baz`}, `key=baz "foo=bar"`: {`baz "foo=bar"`}, `key="foo=bar baz=qux"`: {`foo=bar baz=qux`}, } for q, w := range deserializeLineExamples { k, g, err := deserializeUnitLine(q) if err != nil { t.Fatalf("Unexpected error testing %q: %v", q, err) } if k != "key" { t.Fatalf("Unexpected key, got %q, want %q", k, "key") } if !reflect.DeepEqual(g, w) { t.Errorf("Unexpected line parse for %q:\ngot %q\nwant %q", q, g, w) } } // Any non-empty line without an '=' is bad badLines := []string{ `<<<<<<<<<<<<<<<<<<<<<<<<`, `asdjfkl;`, `>>>>>>>>>>>>>>>>>>>>>>>>`, `!@#$%^&&*`, } for _, l := range badLines { _, _, err := deserializeUnitLine(l) if err == nil { t.Fatalf("Did not get expected error deserializing %q", l) } } } func TestBadUnitsFail(t *testing.T) { bad := []string{ ` [Unit] [Service] <<<<<<<<<<<<<<<< `, ` [Unit] nonsense upon stilts `, } for _, tt := range bad { if _, err := NewUnit(tt); err == nil { t.Fatalf("Did not get expected error creating Unit from %q", tt) } } }
tclavier/fleet
unit/file_test.go
GO
apache-2.0
5,792
const loginJudger = require('./loginJudger'); const registerJudger = require('./registerJudger'); const adminJudger = require('./adminJudger'); module.exports = { loginJudger, registerJudger, adminJudger, };
sugerPocket/sp-blog
server/middlewares/index.js
JavaScript
apache-2.0
214
/* * Copyright 2012 Kristaps Kohs<kristaps.kohs@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.skinnyelephant.framework.core; import org.skinnyelephant.framework.util.EntityPool; import java.util.HashMap; import java.util.Map; /** * Manager for pooling entities to be reused later to avoid allocating new memory for creating new entities that are used often.. * Date: 13.1.2 * Time: 12:37 * * @author Kristaps Kohs */ public class PoolManager implements Manager { /** * Core class. */ private Core frameWorkCore; /** * Map containing Entity pool with specific id. */ private Map<Long, EntityPool> pooledEntities; /** * Flag indicating if manager has been initialized. */ private boolean initialized; /** * Period in which pool should be cleaned. */ private float removalPeriod; /** * Current time. */ private float accumulatedDelta; /** * Created new Entity Pool manager. * * @param frameWorkCore frameworks core. */ public PoolManager(Core frameWorkCore) { this.frameWorkCore = frameWorkCore; } private int singleMaxPoolSize = 512; @Override public void initialize() { if (initialized) { throw new IllegalStateException("Manager already initialized!"); } pooledEntities = new HashMap<Long, EntityPool>(); initialized = true; } /** * Creates or retrieves entity with given array of components from pool. * * @param components array of components * @return Entity from pool. */ protected Entity createPooledEntity(Class<?>... components) { if (!initialized) { throw new IllegalStateException("Manager not initialized"); } long componentIds = 0; for (Class<?> component : components) { componentIds |= frameWorkCore.getComponentId(component); } if (pooledEntities.containsKey(componentIds)) { Entity e = pooledEntities.get(componentIds).get(); if (e == null) { return createEntityWithComponents(components); } else { return e; } } else { pooledEntities.put(componentIds, new EntityPool(singleMaxPoolSize)); return createEntityWithComponents(components); } } /** * Destroys entity and returns it back to pool for later use. * * @param entity to be destroyed. */ public void destroyPooledEntity(Entity entity) { if (!initialized) { throw new IllegalStateException("Manager not initialized"); } final long componentIds = entity.getComponentsIds(); if (pooledEntities.containsKey(componentIds)) { pooledEntities.get(componentIds).put(entity); } else { pooledEntities.put(componentIds, new EntityPool(singleMaxPoolSize)); pooledEntities.get(componentIds).put(entity); } } private Entity createEntityWithComponents(Class<?>... components) { if (!initialized) { throw new IllegalStateException("Manager not initialized"); } Entity e = frameWorkCore.createEntity(); e.setPooled(true); for (Class<?> component : components) { try { e.addComponent(component.newInstance()); } catch (Exception e1) { throw new IllegalStateException("Failed to create pooled component", e1); } } return e; } /** * Cleans up entity pool if period delta > {@link PoolManager#removalPeriod}. * * @param delta time passed in milliseconds. */ public void cleanUpPool(final float delta) { if (!initialized) { throw new IllegalStateException("Manager not initialized"); } if (removalPeriod <= 0) { return; } if ((accumulatedDelta += (delta * .001f)) >= removalPeriod) { for (EntityPool entityPool : pooledEntities.values()) { entityPool.releaseFromPool(); } accumulatedDelta = 0; } } /** * Sets removal period in seconds. * * @param removalPeriod period in seconds */ public void setRemovalPeriod(float removalPeriod) { this.removalPeriod = removalPeriod; } @Override public void dispose() { if (!initialized) { throw new IllegalStateException("Manager not initialized"); } pooledEntities.clear(); } public int getSingleMaxPoolSize() { return singleMaxPoolSize; } public void setSingleMaxPoolSize(int singleMaxPoolSize) { this.singleMaxPoolSize = singleMaxPoolSize; } }
kkohs/SkinnyElephant-EntityFramework
src/main/java/org/skinnyelephant/framework/core/PoolManager.java
Java
apache-2.0
5,388
import {Component, EventEmitter, Input, Output} from '@angular/core' @Component({ template: require('./spinner.component.html'), selector: 'spinner' }) export class SpinnerComponent { @Input() value: number @Output() valueChange: EventEmitter<number> @Input() step: number = 1 timeout: number constructor() { this.valueChange = new EventEmitter<any>() } onChange(value: number) { this.value = value this.valueChange.emit(this.value) } startSpinUp() { this.timeout = window.setTimeout(() => { this.stepUp() this.startSpinUp() }, 100); } stopSpin() { clearTimeout(this.timeout) } startSpinDown() { this.timeout = window.setTimeout(() => { this.stepDown() this.startSpinDown() }, 100); } stepUp() { this.stepValue(this.step) } stepDown() { this.stepValue(-this.step) } stepValue(step: number) { this.value += step this.valueChange.emit(this.value) } }
timofeevda/seismic-beachballs-demo
app/src/components/spinner/spinner.component.ts
TypeScript
apache-2.0
1,091
<?php use yii\helpers\Html; use yii\grid\GridView; /* @var $this yii\web\View */ /* @var $dataProvider yii\data\ActiveDataProvider */ $this->title = Yii::t('app', 'Locations'); $this->params['breadcrumbs'][] = $this->title; ?> <div class="location-index"> <h1><?= Html::encode($this->title) ?></h1> <p> <?= Html::a(Yii::t('app', 'Create Location'), ['create'], ['class' => 'btn btn-success']) ?> </p> <?= GridView::widget([ 'dataProvider' => $dataProvider, 'columns' => [ ['class' => 'yii\grid\SerialColumn'], 'id', 'name', 'address:ntext', 'postcode', 'country', ['class' => 'yii\grid\ActionColumn'], ], ]); ?> </div>
yodathedark/moltis-tickets
tickets/views/location/index.php
PHP
apache-2.0
764
package ar.edu.ungs.presentation.componente; public class DownloadButton extends IconButton { /** * */ private static final long serialVersionUID = 1L; public DownloadButton() { super(); initialize("images/downloadIcon.png"); setToolTipText("Descarga archivo"); } }
alefherrera/sisalud
SiSaludSRL/src/main/java/ar/edu/ungs/presentation/componente/DownloadButton.java
Java
apache-2.0
284
//Package id provides an interface that defines a random N-byte id generator, //and an implementation of the ID interface. package id import ( "github.com/Sirupsen/logrus" ) const ( //DefaultSize is the default size of generated IDs. DefaultSize int = 16 ) var log = logrus.New() func init() { log.Debugln("Initialized id") } //ID interface provides functionality for creating bufferizable IDs to send // in packets. type ID interface { ServeIDs(chan ID) // sends IDs through the provided channel Equals(ID) bool // tells whether or not two ids are equal GetBytes() []byte // returns a byte-slice representation of the ID GetLengthInBytes() int // returns the number of bytes needed for the ID SetLength(int) // sets the number of bytes an ID uses CreateFromBytes([]byte) (ID, error) GetZeroID() (ID, error) createID() (ID, error) }
KodyKantor/p2p-gossip
id/id.go
GO
apache-2.0
876
package com.itfvck.wechatframework.api.coupon.location.model; import com.itfvck.wechatframework.core.common.BaseData; public class CardInfo extends BaseData { private static final long serialVersionUID = 241391040579231164L; // 卡券类型 private String card_type; // 基本的卡券数据,见下表,所有卡券通用,描述 private GeneralCoupon general_coupon; // 文本卡卷 private Groupon groupon; // 礼品卡卷 private Gift gift; // 代金卡卷 private Cash cash; // 折扣卡卷 private Discount discount; // 积分卷 private MemberCard member_card; // 票类型 private ScenicTicket scenic_ticket; // 电影卡卷 private MovieTicket movie_ticket; // 机票卡卷 private BoardingPass boarding_pass; // 红包 private LuckyMoney lucky_money; // 会议详情 private MeetingTicket meeting_ticket; public String getCard_type() { return card_type; } public void setCard_type(String card_type) { this.card_type = card_type; } public GeneralCoupon getGeneral_coupon() { return general_coupon; } public void setGeneral_coupon(GeneralCoupon general_coupon) { this.general_coupon = general_coupon; } public Groupon getGroupon() { return groupon; } public void setGroupon(Groupon groupon) { this.groupon = groupon; } public Gift getGift() { return gift; } public void setGift(Gift gift) { this.gift = gift; } public Cash getCash() { return cash; } public void setCash(Cash cash) { this.cash = cash; } public Discount getDiscount() { return discount; } public void setDiscount(Discount discount) { this.discount = discount; } public MemberCard getMember_card() { return member_card; } public void setMember_card(MemberCard member_card) { this.member_card = member_card; } public ScenicTicket getScenic_ticket() { return scenic_ticket; } public void setScenic_ticket(ScenicTicket scenic_ticket) { this.scenic_ticket = scenic_ticket; } public MovieTicket getMovie_ticket() { return movie_ticket; } public void setMovie_ticket(MovieTicket movie_ticket) { this.movie_ticket = movie_ticket; } public BoardingPass getBoarding_pass() { return boarding_pass; } public void setBoarding_pass(BoardingPass boarding_pass) { this.boarding_pass = boarding_pass; } public LuckyMoney getLucky_money() { return lucky_money; } public void setLucky_money(LuckyMoney lucky_money) { this.lucky_money = lucky_money; } public MeetingTicket getMeeting_ticket() { return meeting_ticket; } public void setMeeting_ticket(MeetingTicket meeting_ticket) { this.meeting_ticket = meeting_ticket; } }
vcdemon/wechat-framework
wechat-api/src/main/java/com/itfvck/wechatframework/api/coupon/location/model/CardInfo.java
Java
apache-2.0
2,627
package android.widget; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.MotionEvent; import android.widget.SeekBar; public class VerticalSeekBar extends SeekBar { public VerticalSeekBar(Context context) { super(context); } public VerticalSeekBar(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } public VerticalSeekBar(Context context, AttributeSet attrs) { super(context, attrs); } protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(h, w, oldh, oldw); } @Override public synchronized void setProgress(int progress) // it is necessary for calling setProgress on click of a button { super.setProgress(progress); onSizeChanged(getWidth(), getHeight(), 0, 0); } @Override protected synchronized void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(heightMeasureSpec, widthMeasureSpec); setMeasuredDimension(getMeasuredHeight(), getMeasuredWidth()); } protected void onDraw(Canvas c) { c.rotate(-90); c.translate(-getHeight(), 0); super.onDraw(c); } @Override public boolean onTouchEvent(MotionEvent event) { if (!isEnabled()) { return false; } switch (event.getAction()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_UP: setProgress(getMax() - (int) (getMax() * event.getY() / getHeight())); onSizeChanged(getWidth(), getHeight(), 0, 0); break; case MotionEvent.ACTION_CANCEL: break; } return true; } }
EspLight/EspLight-APP
app/src/main/java/android/widget/VerticalSeekBar.java
Java
apache-2.0
1,851
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.navigation; import com.intellij.codeInsight.CodeInsightActionHandler; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.daemon.DaemonBundle; import com.intellij.codeInsight.daemon.impl.PsiElementListNavigator; import com.intellij.ide.util.MethodCellRenderer; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.util.PsiSuperMethodUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; public class JavaGotoSuperHandler implements CodeInsightActionHandler { public void invoke(@NotNull final Project project, @NotNull final Editor editor, @NotNull final PsiFile file) { int offset = editor.getCaretModel().getOffset(); PsiElement[] superElements = findSuperElements(file, offset); if (superElements == null || superElements.length == 0) return; if (superElements.length == 1) { PsiElement superElement = superElements[0].getNavigationElement(); OpenFileDescriptor descriptor = new OpenFileDescriptor(project, superElement.getContainingFile().getVirtualFile(), superElement.getTextOffset()); FileEditorManager.getInstance(project).openTextEditor(descriptor, true); } else { if (superElements[0] instanceof PsiMethod) { boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature((PsiMethod[])superElements); PsiElementListNavigator.openTargets(editor, (PsiMethod[])superElements, CodeInsightBundle.message("goto.super.method.chooser.title"), new MethodCellRenderer(showMethodNames)); } else { NavigationUtil.getPsiElementPopup(superElements, CodeInsightBundle.message("goto.super.class.chooser.title")).showInBestPositionFor(editor); } } } @Nullable private static PsiElement[] findSuperElements(PsiFile file, int offset) { PsiElement element = file.findElementAt(offset); if (element == null) return null; PsiMember e = PsiTreeUtil.getParentOfType(element, PsiMethod.class, PsiClass.class); if (e instanceof PsiClass) { PsiClass aClass = (PsiClass) e; List<PsiClass> allSupers = new ArrayList<PsiClass>(Arrays.asList(aClass.getSupers())); for (Iterator<PsiClass> iterator = allSupers.iterator(); iterator.hasNext();) { PsiClass superClass = iterator.next(); if ("java.lang.Object".equals(superClass.getQualifiedName())) iterator.remove(); } return allSupers.toArray(new PsiClass[allSupers.size()]); } else if (e instanceof PsiMethod) { PsiMethod method = (PsiMethod) e; if (method.isConstructor()) { PsiMethod constructorInSuper = PsiSuperMethodUtil.findConstructorInSuper(method); if (constructorInSuper != null) { return new PsiMethod[]{constructorInSuper}; } } else { return method.findSuperMethods(false); } } return null; } public boolean startInWriteAction() { return false; } }
joewalnes/idea-community
java/java-impl/src/com/intellij/codeInsight/navigation/JavaGotoSuperHandler.java
Java
apache-2.0
4,019
/** * Original work: copyright 1999-2004 The Apache Software Foundation * (http://www.apache.org/) * * This project is based on the work licensed to the Apache Software * Foundation (ASF) under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * Modified work: copyright 2013-2022 Valery Silaev (http://vsilaev.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.javaflow.providers.asm5; import static org.objectweb.asm.Opcodes.*; import java.util.List; import org.objectweb.asm.Handle; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.InvokeDynamicInsnNode; import org.objectweb.asm.tree.MethodInsnNode; import org.objectweb.asm.tree.analysis.BasicValue; import org.objectweb.asm.tree.analysis.Frame; class ContinuableMethodVisitor extends MethodVisitor { private static final String STACK_RECORDER = "org/apache/commons/javaflow/core/StackRecorder"; private static final String POP_METHOD = "pop"; private static final String PUSH_METHOD = "push"; private final ContinuableMethodNode methodNode; private final Label startLabel = new Label(); private final List<Label> labels; private final List<AbstractInsnNode> nodes; private final int stackRecorderVar; private int currentIndex = 0; private Frame currentFrame = null; ContinuableMethodVisitor(int api, ContinuableMethodNode a) { super(api, a.mv); this.methodNode = a; this.labels = a.labels; this.nodes = a.nodes; this.stackRecorderVar = a.stackRecorderVar; } private static Type[] getArgumentTypes(AbstractInsnNode node) { if (node instanceof MethodInsnNode) { MethodInsnNode mnode = (MethodInsnNode)node; return Type.getArgumentTypes(mnode.desc); } else { InvokeDynamicInsnNode mnode = (InvokeDynamicInsnNode)node; return Type.getArgumentTypes(mnode.desc); } } private static int getOwnerSize(AbstractInsnNode node) { if (node instanceof MethodInsnNode) { return node.getOpcode() == INVOKESTATIC ? 0 : 1; } else { // INVOKEDYNAMIC return 0; } } @Override public void visitCode() { mv.visitCode(); int fsize = labels.size(); Label[] restoreLabels = new Label[fsize]; for (int i = 0; i < restoreLabels.length; i++) { restoreLabels[i] = new Label(); } // verify if restoring Label l0 = new Label(); // PC: StackRecorder stackRecorder = StackRecorder.get(); mv.visitMethodInsn(INVOKESTATIC, STACK_RECORDER, "get", "()L" + STACK_RECORDER + ";", false); mv.visitInsn(DUP); mv.visitVarInsn(ASTORE, stackRecorderVar); mv.visitLabel(startLabel); // PC: if (stackRecorder != null && !stackRecorder.isRestoring) { mv.visitJumpInsn(IFNULL, l0); mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitFieldInsn(GETFIELD, STACK_RECORDER, "isRestoring", "Z"); mv.visitJumpInsn(IFEQ, l0); mv.visitVarInsn(ALOAD, stackRecorderVar); // PC: stackRecorder.popInt(); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, "popInt", "()I", false); mv.visitTableSwitchInsn(0, fsize - 1, l0, restoreLabels); // switch cases for (int i = 0; i < fsize; i++) { Label frameLabel = (Label) labels.get(i); mv.visitLabel(restoreLabels[i]); AbstractInsnNode mnode = (AbstractInsnNode) nodes.get(i); //Frame frame = analyzer.getFrames()[methodNode.getIndex(mnode)]; Frame frame = methodNode.getFrameByNode(mnode); // for each local variable store the value in locals popping it from the stack! // locals int lsize = frame.getLocals(); for (int j = lsize - 1; j >= 0; j--) { BasicValue value = (BasicValue) frame.getLocal(j); if (isNull(value)) { mv.visitInsn(ACONST_NULL); mv.visitVarInsn(ASTORE, j); } else if (value == BasicValue.UNINITIALIZED_VALUE) { // TODO ?? } else if (value == BasicValue.RETURNADDRESS_VALUE) { // TODO ?? } else { mv.visitVarInsn(ALOAD, stackRecorderVar); Type type = value.getType(); if (value.isReference()) { mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, POP_METHOD + "Object", "()Ljava/lang/Object;", false); Type t = value.getType(); String desc = t.getDescriptor(); if (desc.charAt(0) == '[') { mv.visitTypeInsn(CHECKCAST, desc); } else { mv.visitTypeInsn(CHECKCAST, t.getInternalName()); } mv.visitVarInsn(ASTORE, j); } else { mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPopMethod(type), "()" + type.getDescriptor(), false); mv.visitVarInsn(type.getOpcode(ISTORE), j); } } } if (frame instanceof MonitoringFrame) { int[] monitoredLocals = ((MonitoringFrame) frame).getMonitored(); //System.out.println(System.identityHashCode(frame)+" AMonitored locals "+monitoredLocals.length); for (int j = 0; j < monitoredLocals.length; j++) { //System.out.println(System.identityHashCode(frame)+" AMonitored local "+monitoredLocals[j]); mv.visitVarInsn(ALOAD, monitoredLocals[j]); mv.visitInsn(MONITORENTER); } } // stack Type[] paramTypes = getArgumentTypes(mnode); int argSize = paramTypes.length; int ownerSize = getOwnerSize(mnode); int initSize = mnode.getOpcode() == INVOKESPECIAL && MethodInsnNode.class.cast(mnode).name.equals("<init>") ? 2 : 0; int ssize = frame.getStackSize(); for (int j = 0; j < ssize - argSize - ownerSize - initSize; j++) { BasicValue value = (BasicValue) frame.getStack(j); if (isNull(value)) { mv.visitInsn(ACONST_NULL); } else if (value == BasicValue.UNINITIALIZED_VALUE) { // TODO ?? } else if (value == BasicValue.RETURNADDRESS_VALUE) { // TODO ?? } else if (value.isReference()) { mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, POP_METHOD + "Object", "()Ljava/lang/Object;", false); mv.visitTypeInsn(CHECKCAST, value.getType().getInternalName()); } else { Type type = value.getType(); mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPopMethod(type), "()" + type.getDescriptor(), false); } } if (ownerSize > 0) { // Load the object whose method we are calling BasicValue value = ((BasicValue) frame.getStack(ssize - argSize - 1)); if (isNull(value)) { // If user code causes NPE, then we keep this behavior: load null to get NPE at runtime mv.visitInsn(ACONST_NULL); } else { mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, POP_METHOD + "Reference", "()Ljava/lang/Object;", false); mv.visitTypeInsn(CHECKCAST, value.getType().getInternalName()); } } // Create null types for the parameters of the method invocation for (int j = 0; j < argSize; j++) { pushDefault(paramTypes[j]); } // continue to the next method mv.visitJumpInsn(GOTO, frameLabel); } // PC: } // end of start block mv.visitLabel(l0); } @Override public void visitLabel(Label label) { if (currentIndex < labels.size() && label == labels.get(currentIndex)) { //int i = methodNode.getIndex((AbstractInsnNode)nodes.get(currentIndex)); //currentFrame = analyzer.getFrames()[i]; currentFrame = methodNode.getFrameByNode(nodes.get(currentIndex)); } mv.visitLabel(label); } @Override public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); visitCall(Opcodes.INVOKEDYNAMIC, desc); } @Override public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean intf) { mv.visitMethodInsn(opcode, owner, name, desc, intf); visitCall(opcode, desc); } @Deprecated @Override public void visitMethodInsn(int opcode, String owner, String name, String desc) { mv.visitMethodInsn(opcode, owner, name, desc); visitCall(opcode, desc); } private void visitCall(int opcode, String desc) { if (currentFrame != null) { Label fl = new Label(); mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitJumpInsn(IFNULL, fl); mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitFieldInsn(GETFIELD, STACK_RECORDER, "isCapturing", "Z"); mv.visitJumpInsn(IFEQ, fl); // save stack Type returnType = Type.getReturnType(desc); boolean hasReturn = returnType != Type.VOID_TYPE; if (hasReturn) { mv.visitInsn(returnType.getSize() == 1 ? POP : POP2); } Type[] params = Type.getArgumentTypes(desc); int argSize = params.length; int ownerSize = opcode == INVOKESTATIC || opcode == INVOKEDYNAMIC ? 0 : 1; // TODO int ssize = currentFrame.getStackSize() - argSize - ownerSize; for (int i = ssize - 1; i >= 0; i--) { BasicValue value = (BasicValue) currentFrame.getStack(i); if (isNull(value)) { mv.visitInsn(POP); } else if (value == BasicValue.UNINITIALIZED_VALUE) { // TODO ?? } else if (value.isReference()) { mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitInsn(SWAP); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, PUSH_METHOD + "Object", "(Ljava/lang/Object;)V", false); } else { Type type = value.getType(); if (type.getSize() > 1) { mv.visitInsn(ACONST_NULL); // dummy stack entry mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitInsn(DUP2_X2); // swap2 for long/double mv.visitInsn(POP2); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPushMethod(type), "(" + type.getDescriptor() + ")V", false); mv.visitInsn(POP); // remove dummy stack entry } else { mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitInsn(SWAP); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPushMethod(type), "(" + type.getDescriptor() + ")V", false); } } } boolean isInstanceMethod = (methodNode.access & ACC_STATIC) == 0; if (isInstanceMethod) { mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitVarInsn(ALOAD, 0); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, PUSH_METHOD + "Reference", "(Ljava/lang/Object;)V", false); } // save locals int fsize = currentFrame.getLocals(); for (int j = 0; j < fsize; j++) { BasicValue value = (BasicValue) currentFrame.getLocal(j); if (isNull(value)) { // no need to save null } else if (value == BasicValue.UNINITIALIZED_VALUE) { // no need to save uninitialized objects } else if (value.isReference()) { mv.visitVarInsn(ALOAD, stackRecorderVar); mv.visitVarInsn(ALOAD, j); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, PUSH_METHOD + "Object", "(Ljava/lang/Object;)V", false); } else { mv.visitVarInsn(ALOAD, stackRecorderVar); Type type = value.getType(); mv.visitVarInsn(type.getOpcode(ILOAD), j); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, getPushMethod(type), "(" + type.getDescriptor() + ")V", false); } } mv.visitVarInsn(ALOAD, stackRecorderVar); if (currentIndex <= 5) mv.visitInsn(ICONST_0 + currentIndex); else mv.visitIntInsn(SIPUSH, currentIndex); mv.visitMethodInsn(INVOKEVIRTUAL, STACK_RECORDER, "pushInt", "(I)V", false); if (currentFrame instanceof MonitoringFrame) { int[] monitoredLocals = ((MonitoringFrame) currentFrame).getMonitored(); //System.out.println(System.identityHashCode(currentFrame)+" Monitored locals "+monitoredLocals.length); for (int j = 0; j < monitoredLocals.length; j++) { //System.out.println(System.identityHashCode(currentFrame)+" Monitored local "+monitoredLocals[j]); mv.visitVarInsn(ALOAD, monitoredLocals[j]); mv.visitInsn(MONITOREXIT); } } Type methodReturnType = Type.getReturnType(methodNode.desc); pushDefault(methodReturnType); mv.visitInsn(methodReturnType.getOpcode(IRETURN)); mv.visitLabel(fl); currentIndex++; currentFrame = null; } } @Override public void visitMaxs(int maxStack, int maxLocals) { Label endLabel = new Label(); mv.visitLabel(endLabel); mv.visitLocalVariable("__stackRecorder", "L" + STACK_RECORDER + ";", null, startLabel, endLabel, stackRecorderVar); mv.visitMaxs(maxStack, maxLocals + 1); //was mv.visitMaxs(0, 0); } private static boolean isNull(BasicValue value) { if (null == value) return true; if (!value.isReference()) return false; Type type = value.getType(); return "Lnull;".equals(type.getDescriptor()); } private void pushDefault(Type type) { switch (type.getSort()) { case Type.VOID: break; case Type.DOUBLE: mv.visitInsn(DCONST_0); break; case Type.LONG: mv.visitInsn(LCONST_0); break; case Type.FLOAT: mv.visitInsn(FCONST_0); break; case Type.OBJECT: case Type.ARRAY: mv.visitInsn(ACONST_NULL); break; default: mv.visitInsn(ICONST_0); break; } } private static String[] SUFFIXES = { "Object", // 0 void "Int", // 1 boolean "Int", // 2 char "Int", // 3 byte "Int", // 4 short "Int", // 5 int "Float", // 6 float "Long", // 7 long "Double", // 8 double "Object", // 9 array "Object", // 10 object }; private static String getPopMethod(Type type) { return POP_METHOD + SUFFIXES[type.getSort()]; } private static String getPushMethod(Type type) { return PUSH_METHOD + SUFFIXES[type.getSort()]; } }
vsilaev/tascalate-javaflow
net.tascalate.javaflow.providers.asm5/src/main/java/org/apache/commons/javaflow/providers/asm5/ContinuableMethodVisitor.java
Java
apache-2.0
17,135
#include "DrowsyDriverModel.h" #include "BeliefSet.h" #include <iostream> #include <list> #include <fstream> #include <cstdlib> using namespace std; int main(int argc, char** argv) { DrowsyDriverModel hmshsmodel; int nBeliefs = 50000; CState initCState(hmshsmodel.getNumCStateVar(), 0); DState initDState = 0; vector<double> initQprob(hmshsmodel.getNumDState(), 0); initQprob[0] = 1.0; BeliefSet RandomBeliefs(nBeliefs); RandomBeliefs.SampleBelief(hmshsmodel, initCState, initDState, initQprob); return 0; }
ppnathan/HMSHS
problems/DrowsyDriverModel/BeliefsCollection.cc
C++
apache-2.0
565
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; use Illuminate\Support\Facades\Schema; class CreateUsersTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::dropIfExists('users'); Schema::create('users', function($table) { $table->increments('id', 5); $table->string('email', 60); $table->string('password', 60); $table->string('name', 20)->nullable(); $table->string('remember_token', 100)->nullable(); $table->timestamp('last_visit'); $table->timestamps(); $table->unique('email'); }); } /** * Reverse the migrations. * * @return void */ public function down() { // } }
ilfate/laravel.ilfate.net
app/database/migrations/2014_11_20_152233_create_users_table.php
PHP
apache-2.0
836
import type { IChangesObject, IComponentOptions, IController, IOnChangesObject } from 'angular'; import { module } from 'angular'; import type { INavigationPage } from './PageNavigationState'; import { PageNavigationState } from './PageNavigationState'; interface IPageSectionOnChanges extends IOnChangesObject { visible: IChangesObject<boolean>; label: IChangesObject<string>; badge: IChangesObject<string>; } class PageSectionController implements IController { public key: string; public label: string; public badge: string; public visible: boolean; public noWrapper: boolean; private pageConfig: INavigationPage; public $onInit(): void { this.visible = this.visible !== false; this.pageConfig = { key: this.key, label: this.label, visible: this.visible, badge: this.badge, }; PageNavigationState.registerPage(this.pageConfig); } public $onChanges(changes: IPageSectionOnChanges): void { if (changes.visible && !changes.visible.isFirstChange()) { this.pageConfig.visible = changes.visible.currentValue; } if (changes.label && !changes.label.isFirstChange()) { this.pageConfig.label = changes.label.currentValue; } if (changes.badge && !changes.badge.isFirstChange()) { this.pageConfig.badge = changes.badge.currentValue; } } } const pageSectionComponent: IComponentOptions = { bindings: { key: '@', label: '@', badge: '<', visible: '<', noWrapper: '<', }, controller: PageSectionController, transclude: true, template: ` <div ng-if="$ctrl.pageConfig.visible" class="page-subheading flex-1" data-page-id="{{$ctrl.pageConfig.key}}"> <h4 class="sticky-header">{{$ctrl.pageConfig.label}}</h4> <div ng-class="$ctrl.noWrapper ? 'no-wrapper' : 'section-body'" data-page-content="{{$ctrl.pageConfig.key}}" ng-transclude></div> </div> `, }; export const PAGE_SECTION_COMPONENT = 'spinnaker.core.presentation.navigation.pageSection'; module(PAGE_SECTION_COMPONENT, []).component('pageSection', pageSectionComponent);
spinnaker/deck
packages/core/src/presentation/navigation/pageSection.component.ts
TypeScript
apache-2.0
2,078
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.execution.buffer; import com.facebook.presto.OutputBuffers; import com.facebook.presto.OutputBuffers.OutputBufferId; import com.facebook.presto.block.BlockAssertions; import com.facebook.presto.operator.PageAssertions; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.units.DataSize; import io.airlift.units.Duration; import java.util.List; import java.util.Optional; import java.util.concurrent.Future; import java.util.stream.Collectors; import static com.facebook.presto.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde; import static com.google.common.base.Preconditions.checkArgument; import static io.airlift.concurrent.MoreFutures.tryGetFutureValue; import static io.airlift.units.DataSize.Unit.BYTE; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public final class BufferTestUtils { private BufferTestUtils() {} static final PagesSerde PAGES_SERDE = testingPagesSerde(); static final Duration NO_WAIT = new Duration(0, MILLISECONDS); static final Duration MAX_WAIT = new Duration(1, SECONDS); private static final DataSize BUFFERED_PAGE_SIZE = new DataSize(PAGES_SERDE.serialize(createPage(42)).getRetainedSizeInBytes(), BYTE); static BufferResult getFuture(ListenableFuture<BufferResult> future, Duration maxWait) { Optional<BufferResult> bufferResult = tryGetFutureValue(future, (int) maxWait.toMillis(), MILLISECONDS); checkArgument(bufferResult.isPresent(), "bufferResult is empty"); return bufferResult.get(); } static void assertBufferResultEquals(List<? extends Type> types, BufferResult actual, BufferResult expected) { assertEquals(actual.getSerializedPages().size(), expected.getSerializedPages().size(), "page count"); assertEquals(actual.getToken(), expected.getToken(), "token"); for (int i = 0; i < actual.getSerializedPages().size(); i++) { Page actualPage = PAGES_SERDE.deserialize(actual.getSerializedPages().get(i)); Page expectedPage = PAGES_SERDE.deserialize(expected.getSerializedPages().get(i)); assertEquals(actualPage.getChannelCount(), expectedPage.getChannelCount()); PageAssertions.assertPageEquals(types, actualPage, expectedPage); } assertEquals(actual.isBufferComplete(), expected.isBufferComplete(), "buffer complete"); } static BufferResult createBufferResult(String bufferId, long token, List<Page> pages) { checkArgument(!pages.isEmpty(), "pages is empty"); return new BufferResult( bufferId, token, token + pages.size(), false, pages.stream() .map(PAGES_SERDE::serialize) .collect(Collectors.toList())); } public static Page createPage(int i) { return new Page(BlockAssertions.createLongsBlock(i)); } static DataSize sizeOfPages(int count) { return new DataSize(BUFFERED_PAGE_SIZE.toBytes() * count, BYTE); } static BufferResult getBufferResult(OutputBuffer buffer, OutputBufferId bufferId, long sequenceId, DataSize maxSize, Duration maxWait) { ListenableFuture<BufferResult> future = buffer.get(bufferId, sequenceId, maxSize); return getFuture(future, maxWait); } // TODO: remove this after PR #7987 is landed static void acknowledgeBufferResult(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, long sequenceId) { buffer.acknowledge(bufferId, sequenceId); } static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page) { ListenableFuture<?> future = buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page))); assertFalse(future.isDone()); return future; } static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page, int partition) { ListenableFuture<?> future = buffer.enqueue(partition, ImmutableList.of(PAGES_SERDE.serialize(page))); assertFalse(future.isDone()); return future; } public static void addPage(OutputBuffer buffer, Page page) { assertTrue(buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page))).isDone(), "Expected add page to not block"); } public static void addPage(OutputBuffer buffer, Page page, int partition) { assertTrue(buffer.enqueue(partition, ImmutableList.of(PAGES_SERDE.serialize(page))).isDone(), "Expected add page to not block"); } static void assertQueueState( OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, int bufferedPages, int pagesSent) { assertEquals( getBufferInfo(buffer, bufferId), new BufferInfo( bufferId, false, bufferedPages, pagesSent, new PageBufferInfo( bufferId.getId(), bufferedPages, sizeOfPages(bufferedPages).toBytes(), bufferedPages + pagesSent, // every page has one row bufferedPages + pagesSent))); } static void assertQueueState( OutputBuffer buffer, int unassignedPages, OutputBuffers.OutputBufferId bufferId, int bufferedPages, int pagesSent) { OutputBufferInfo outputBufferInfo = buffer.getInfo(); long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum(); assertEquals( outputBufferInfo.getTotalBufferedPages() - assignedPages, unassignedPages, "unassignedPages"); BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream() .filter(info -> info.getBufferId().equals(bufferId)) .findAny() .orElse(null); assertEquals( bufferInfo, new BufferInfo( bufferId, false, bufferedPages, pagesSent, new PageBufferInfo( bufferId.getId(), bufferedPages, sizeOfPages(bufferedPages).toBytes(), bufferedPages + pagesSent, // every page has one row bufferedPages + pagesSent))); } @SuppressWarnings("ConstantConditions") static void assertQueueClosed(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, int pagesSent) { BufferInfo bufferInfo = getBufferInfo(buffer, bufferId); assertEquals(bufferInfo.getBufferedPages(), 0); assertEquals(bufferInfo.getPagesSent(), pagesSent); assertEquals(bufferInfo.isFinished(), true); } @SuppressWarnings("ConstantConditions") static void assertQueueClosed(OutputBuffer buffer, int unassignedPages, OutputBuffers.OutputBufferId bufferId, int pagesSent) { OutputBufferInfo outputBufferInfo = buffer.getInfo(); long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum(); assertEquals( outputBufferInfo.getTotalBufferedPages() - assignedPages, unassignedPages, "unassignedPages"); BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream() .filter(info -> info.getBufferId().equals(bufferId)) .findAny() .orElse(null); assertEquals(bufferInfo.getBufferedPages(), 0); assertEquals(bufferInfo.getPagesSent(), pagesSent); assertEquals(bufferInfo.isFinished(), true); } static void assertFinished(OutputBuffer buffer) { assertTrue(buffer.isFinished()); for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) { assertTrue(bufferInfo.isFinished()); assertEquals(bufferInfo.getBufferedPages(), 0); } } static void assertFutureIsDone(Future<?> future) { tryGetFutureValue(future, 5, SECONDS); assertTrue(future.isDone()); } private static BufferInfo getBufferInfo(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId) { for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) { if (bufferInfo.getBufferId().equals(bufferId)) { return bufferInfo; } } return null; } }
yuananf/presto
presto-main/src/test/java/com/facebook/presto/execution/buffer/BufferTestUtils.java
Java
apache-2.0
9,646
require 'ecloud/spec_helper' if Fog.mocking? describe "Fog::Compute::Ecloud::BackupInternetService", :type => :mock_tmrk_ecloud_model do subject { @vcloud.vdcs[0].backup_internet_services[0] } describe :class do subject { Fog::Compute::Ecloud::BackupInternetService } it { should have_identity(:href) } it { should have_only_these_attributes([:href, :name, :id, :protocol, :enabled, :description, :timeout, :redirect_url, :monitor]) } end context "with no uri" do subject { Fog::Compute::Ecloud::BackupInternetService.new() } it { should have_all_attributes_be_nil } end context "as a collection member" do subject { @vcloud.vdcs[0].backup_internet_services[0].reload } let(:composed_service_data) { @vcloud.vdcs[0].backup_internet_services[0].send(:_compose_service_data) } it { should be_an_instance_of(Fog::Compute::Ecloud::BackupInternetService) } its(:href) { should == @mock_backup_service.href } its(:identity) { should == @mock_backup_service.href } its(:name) { should == @mock_backup_service.name } its(:id) { should == @mock_backup_service.object_id.to_s } its(:protocol) { should == @mock_backup_service.protocol } its(:enabled) { should == @mock_backup_service.enabled.to_s } its(:description) { should == @mock_backup_service.description } its(:timeout) { should == @mock_backup_service.timeout.to_s } its(:redirect_url) { should == (@mock_backup_service.redirect_url || "") } its(:monitor) { should == nil } specify { composed_service_data[:href].should == subject.href.to_s } specify { composed_service_data[:name].should == subject.name } specify { composed_service_data[:id].should == subject.id.to_s } specify { composed_service_data[:protocol].should == subject.protocol } specify { composed_service_data[:enabled].should == subject.enabled.to_s } specify { composed_service_data[:description].should == subject.description } specify { composed_service_data[:timeout].should == subject.timeout.to_s } end end end
krobertson/knife-xenserver
vendor/fog/spec/ecloud/models/backup_internet_service_spec.rb
Ruby
apache-2.0
2,266
/* * Copyright 2017, Sascha Häberling * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.retrostore.resources; import java.util.Optional; /** * Functionality around the image service. */ public interface ImageServiceWrapper { int DEFAULT_SCREENSHOT_SIZE = 800; /** * Returns a URL that serves the image with the given blob key. * * @param blobKey the blob key of the image to serve. * @param imageSize the maximum size of the longest side. * @return The URL to serve the image in the given size. */ Optional<String> getServingUrl(String blobKey, int imageSize); /** * Like {@link #getServingUrl(String, int)} but with a default size. */ Optional<String> getServingUrl(String blobKey); }
shaeberling/retrostore
appengine/src/main/java/org/retrostore/resources/ImageServiceWrapper.java
Java
apache-2.0
1,268
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014-2015 Groupon, Inc * Copyright 2014-2015 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.invoice; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.UUID; import javax.annotation.Nullable; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.killbill.billing.ErrorCode; import org.killbill.billing.account.api.Account; import org.killbill.billing.account.api.AccountApiException; import org.killbill.billing.account.api.AccountInternalApi; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.catalog.api.BillingActionPolicy; import org.killbill.billing.catalog.api.BillingMode; import org.killbill.billing.catalog.api.CatalogApiException; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.catalog.api.PlanPhasePriceOverride; import org.killbill.billing.catalog.api.PlanPhaseSpecifier; import org.killbill.billing.catalog.api.Usage; import org.killbill.billing.entitlement.api.SubscriptionEventType; import org.killbill.billing.events.BusInternalEvent; import org.killbill.billing.events.EffectiveSubscriptionInternalEvent; import org.killbill.billing.events.InvoiceAdjustmentInternalEvent; import org.killbill.billing.events.InvoiceInternalEvent; import org.killbill.billing.events.InvoiceNotificationInternalEvent; import org.killbill.billing.invoice.InvoiceDispatcher.FutureAccountNotifications.SubscriptionNotification; import org.killbill.billing.invoice.api.DefaultInvoiceService; import org.killbill.billing.invoice.api.DryRunArguments; import org.killbill.billing.invoice.api.Invoice; import org.killbill.billing.invoice.api.InvoiceApiException; import org.killbill.billing.invoice.api.InvoiceItem; import org.killbill.billing.invoice.api.InvoiceItemType; import org.killbill.billing.invoice.api.InvoiceNotifier; import org.killbill.billing.invoice.api.user.DefaultInvoiceAdjustmentEvent; import org.killbill.billing.invoice.api.user.DefaultInvoiceCreationEvent; import org.killbill.billing.invoice.api.user.DefaultInvoiceNotificationInternalEvent; import org.killbill.billing.invoice.api.user.DefaultNullInvoiceEvent; import org.killbill.billing.invoice.dao.InvoiceDao; import org.killbill.billing.invoice.dao.InvoiceItemModelDao; import org.killbill.billing.invoice.dao.InvoiceModelDao; import org.killbill.billing.invoice.generator.BillingIntervalDetail; import org.killbill.billing.invoice.generator.InvoiceGenerator; import org.killbill.billing.invoice.model.DefaultInvoice; import org.killbill.billing.invoice.model.FixedPriceInvoiceItem; import org.killbill.billing.invoice.model.InvoiceItemFactory; import org.killbill.billing.invoice.model.RecurringInvoiceItem; import org.killbill.billing.invoice.notification.DefaultNextBillingDateNotifier; import org.killbill.billing.invoice.notification.NextBillingDateNotificationKey; import org.killbill.billing.junction.BillingEvent; import org.killbill.billing.junction.BillingEventSet; import org.killbill.billing.junction.BillingInternalApi; import org.killbill.billing.subscription.api.SubscriptionBaseInternalApi; import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType; import org.killbill.billing.subscription.api.user.SubscriptionBaseApiException; import org.killbill.billing.util.callcontext.CallContext; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import org.killbill.billing.util.callcontext.TenantContext; import org.killbill.billing.util.config.InvoiceConfig; import org.killbill.billing.util.globallocker.LockerType; import org.killbill.billing.util.timezone.DateAndTimeZoneContext; import org.killbill.bus.api.PersistentBus; import org.killbill.bus.api.PersistentBus.EventBusException; import org.killbill.clock.Clock; import org.killbill.commons.locker.GlobalLock; import org.killbill.commons.locker.GlobalLocker; import org.killbill.commons.locker.LockFailedException; import org.killbill.notificationq.api.NotificationEventWithMetadata; import org.killbill.notificationq.api.NotificationQueue; import org.killbill.notificationq.api.NotificationQueueService; import org.killbill.notificationq.api.NotificationQueueService.NoSuchNotificationQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.inject.Inject; public class InvoiceDispatcher { private static final Logger log = LoggerFactory.getLogger(InvoiceDispatcher.class); private static final int NB_LOCK_TRY = 5; private static final Ordering<DateTime> UPCOMING_NOTIFICATION_DATE_ORDERING = Ordering.natural(); private static final NullDryRunArguments NULL_DRY_RUN_ARGUMENTS = new NullDryRunArguments(); private final InvoiceGenerator generator; private final BillingInternalApi billingApi; private final AccountInternalApi accountApi; private final SubscriptionBaseInternalApi subscriptionApi; private final InvoiceDao invoiceDao; private final InternalCallContextFactory internalCallContextFactory; private final InvoiceNotifier invoiceNotifier; private final InvoicePluginDispatcher invoicePluginDispatcher; private final GlobalLocker locker; private final PersistentBus eventBus; private final Clock clock; private final NotificationQueueService notificationQueueService; private final InvoiceConfig invoiceConfig; @Inject public InvoiceDispatcher(final InvoiceGenerator generator, final AccountInternalApi accountApi, final BillingInternalApi billingApi, final SubscriptionBaseInternalApi SubscriptionApi, final InvoiceDao invoiceDao, final InternalCallContextFactory internalCallContextFactory, final InvoiceNotifier invoiceNotifier, final InvoicePluginDispatcher invoicePluginDispatcher, final GlobalLocker locker, final PersistentBus eventBus, final NotificationQueueService notificationQueueService, final InvoiceConfig invoiceConfig, final Clock clock) { this.generator = generator; this.billingApi = billingApi; this.subscriptionApi = SubscriptionApi; this.accountApi = accountApi; this.invoiceDao = invoiceDao; this.internalCallContextFactory = internalCallContextFactory; this.invoiceNotifier = invoiceNotifier; this.invoicePluginDispatcher = invoicePluginDispatcher; this.locker = locker; this.eventBus = eventBus; this.clock = clock; this.notificationQueueService = notificationQueueService; this.invoiceConfig = invoiceConfig; } public void processSubscriptionForInvoiceGeneration(final EffectiveSubscriptionInternalEvent transition, final InternalCallContext context) throws InvoiceApiException { final UUID subscriptionId = transition.getSubscriptionId(); final DateTime targetDate = transition.getEffectiveTransitionTime(); processSubscriptionForInvoiceGeneration(subscriptionId, targetDate, context); } public void processSubscriptionForInvoiceGeneration(final UUID subscriptionId, final DateTime targetDate, final InternalCallContext context) throws InvoiceApiException { processSubscriptionInternal(subscriptionId, targetDate, false, context); } public void processSubscriptionForInvoiceNotification(final UUID subscriptionId, final DateTime targetDate, final InternalCallContext context) throws InvoiceApiException { final Invoice dryRunInvoice = processSubscriptionInternal(subscriptionId, targetDate, true, context); if (dryRunInvoice != null && dryRunInvoice.getBalance().compareTo(BigDecimal.ZERO) > 0) { final InvoiceNotificationInternalEvent event = new DefaultInvoiceNotificationInternalEvent(dryRunInvoice.getAccountId(), dryRunInvoice.getBalance(), dryRunInvoice.getCurrency(), targetDate, context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken()); try { eventBus.post(event); } catch (EventBusException e) { log.error("Failed to post event " + event, e); } } } private Invoice processSubscriptionInternal(final UUID subscriptionId, final DateTime targetDate, final boolean dryRunForNotification, final InternalCallContext context) throws InvoiceApiException { try { if (subscriptionId == null) { log.error("Failed handling SubscriptionBase change.", new InvoiceApiException(ErrorCode.INVOICE_INVALID_TRANSITION)); return null; } final UUID accountId = subscriptionApi.getAccountIdFromSubscriptionId(subscriptionId, context); final DryRunArguments dryRunArguments = dryRunForNotification ? NULL_DRY_RUN_ARGUMENTS : null; return processAccount(accountId, targetDate, dryRunArguments, context); } catch (final SubscriptionBaseApiException e) { log.error("Failed handling SubscriptionBase change.", new InvoiceApiException(ErrorCode.INVOICE_NO_ACCOUNT_ID_FOR_SUBSCRIPTION_ID, subscriptionId.toString())); return null; } } public Invoice processAccount(final UUID accountId, final DateTime targetDate, @Nullable final DryRunArguments dryRunArguments, final InternalCallContext context) throws InvoiceApiException { GlobalLock lock = null; try { lock = locker.lockWithNumberOfTries(LockerType.ACCNT_INV_PAY.toString(), accountId.toString(), NB_LOCK_TRY); return processAccountWithLock(accountId, targetDate, dryRunArguments, context); } catch (final LockFailedException e) { // Not good! log.error(String.format("Failed to process invoice for account %s, targetDate %s", accountId.toString(), targetDate), e); } finally { if (lock != null) { lock.release(); } } return null; } private Invoice processAccountWithLock(final UUID accountId, @Nullable final DateTime inputTargetDateTime, @Nullable final DryRunArguments dryRunArguments, final InternalCallContext context) throws InvoiceApiException { final boolean isDryRun = dryRunArguments != null; // inputTargetDateTime is only allowed in dryRun mode to have the system compute it Preconditions.checkArgument(inputTargetDateTime != null || isDryRun, "inputTargetDateTime is required in non dryRun mode"); try { // Make sure to first set the BCD if needed then get the account object (to have the BCD set) final BillingEventSet billingEvents = billingApi.getBillingEventsForAccountAndUpdateAccountBCD(accountId, dryRunArguments, context); final List<DateTime> candidateDateTimes = (inputTargetDateTime != null) ? ImmutableList.of(inputTargetDateTime) : getUpcomingInvoiceCandidateDates(context); for (final DateTime curTargetDateTime : candidateDateTimes) { final Invoice invoice = processAccountWithLockAndInputTargetDate(accountId, curTargetDateTime, billingEvents, isDryRun, context); if (invoice != null) { return invoice; } } return null; } catch (CatalogApiException e) { log.error("Failed handling SubscriptionBase change.", e); return null; } } private Invoice processAccountWithLockAndInputTargetDate(final UUID accountId, final DateTime targetDateTime, final BillingEventSet billingEvents, final boolean isDryRun, final InternalCallContext context) throws InvoiceApiException { try { final Account account = accountApi.getAccountById(accountId, context); final DateAndTimeZoneContext dateAndTimeZoneContext = billingEvents.iterator().hasNext() ? new DateAndTimeZoneContext(billingEvents.iterator().next().getEffectiveDate(), account.getTimeZone(), clock) : null; final List<Invoice> invoices = billingEvents.isAccountAutoInvoiceOff() ? ImmutableList.<Invoice>of() : ImmutableList.<Invoice>copyOf(Collections2.transform(invoiceDao.getInvoicesByAccount(context), new Function<InvoiceModelDao, Invoice>() { @Override public Invoice apply(final InvoiceModelDao input) { return new DefaultInvoice(input); } })); final Currency targetCurrency = account.getCurrency(); final LocalDate targetDate = (dateAndTimeZoneContext != null && targetDateTime != null) ? dateAndTimeZoneContext.computeTargetDate(targetDateTime) : null; final Invoice invoice = targetDate != null ? generator.generateInvoice(account, billingEvents, invoices, targetDate, targetCurrency, context) : null; // // If invoice comes back null, there is nothing new to generate, we can bail early // if (invoice == null) { log.info("Generated null invoice for accountId {} and targetDate {} (targetDateTime {})", new Object[]{accountId, targetDate, targetDateTime}); if (!isDryRun) { final BusInternalEvent event = new DefaultNullInvoiceEvent(accountId, clock.getUTCToday(), context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken()); postEvent(event, accountId, context); } return invoice; } // Generate missing credit (> 0 for generation and < 0 for use) prior we call the plugin final InvoiceItem cbaItem = computeCBAOnExistingInvoice(invoice, context); if (cbaItem != null) { invoice.addInvoiceItem(cbaItem); } // // Ask external invoice plugins if additional items (tax, etc) shall be added to the invoice // final CallContext callContext = buildCallContext(context); invoice.addInvoiceItems(invoicePluginDispatcher.getAdditionalInvoiceItems(invoice, callContext)); if (!isDryRun) { commitInvoiceStateAndNotifyAccountIfConfigured(account, invoice, billingEvents, dateAndTimeZoneContext, targetDate, context); } return invoice; } catch (final AccountApiException e) { log.error("Failed handling SubscriptionBase change.", e); return null; } catch (SubscriptionBaseApiException e) { log.error("Failed handling SubscriptionBase change.", e); return null; } } private void commitInvoiceStateAndNotifyAccountIfConfigured(final Account account, final Invoice invoice, final BillingEventSet billingEvents, final DateAndTimeZoneContext dateAndTimeZoneContext, final LocalDate targetDate, final InternalCallContext context) throws SubscriptionBaseApiException, InvoiceApiException { boolean isRealInvoiceWithNonEmptyItems = false; // Extract the set of invoiceId for which we see items that don't belong to current generated invoice final Set<UUID> adjustedUniqueOtherInvoiceId = new TreeSet<UUID>(); adjustedUniqueOtherInvoiceId.addAll(Collections2.transform(invoice.getInvoiceItems(), new Function<InvoiceItem, UUID>() { @Nullable @Override public UUID apply(@Nullable final InvoiceItem input) { return input.getInvoiceId(); } })); boolean isRealInvoiceWithItems = adjustedUniqueOtherInvoiceId.remove(invoice.getId()); if (isRealInvoiceWithItems) { log.info("Generated invoice {} with {} items for accountId {} and targetDate {}", new Object[]{invoice.getId(), invoice.getNumberOfItems(), account.getId(), targetDate}); } else { final Joiner joiner = Joiner.on(","); final String adjustedInvoices = joiner.join(adjustedUniqueOtherInvoiceId.toArray(new UUID[adjustedUniqueOtherInvoiceId.size()])); log.info("Adjusting existing invoices {} with {} items for accountId {} and targetDate {})", new Object[]{adjustedInvoices, invoice.getNumberOfItems(), account.getId(), targetDate}); } // Transformation to Invoice -> InvoiceModelDao final InvoiceModelDao invoiceModelDao = new InvoiceModelDao(invoice); final Iterable<InvoiceItemModelDao> invoiceItemModelDaos = Iterables.transform(invoice.getInvoiceItems(), new Function<InvoiceItem, InvoiceItemModelDao>() { @Override public InvoiceItemModelDao apply(final InvoiceItem input) { return new InvoiceItemModelDao(input); } }); final FutureAccountNotifications futureAccountNotifications = createNextFutureNotificationDate(invoiceItemModelDaos, billingEvents, dateAndTimeZoneContext, context); // We filter any zero amount for USAGE items prior we generate the invoice, which may leave us with an invoice with no items; // we recompute the isRealInvoiceWithItems flag based on what is left (the call to invoice is still necessary to set the future notifications). final Iterable<InvoiceItemModelDao> filteredInvoiceItemModelDaos = Iterables.filter(invoiceItemModelDaos, new Predicate<InvoiceItemModelDao>() { @Override public boolean apply(@Nullable final InvoiceItemModelDao input) { return (input.getType() != InvoiceItemType.USAGE || input.getAmount().compareTo(BigDecimal.ZERO) != 0); } }); final boolean isThereAnyItemsLeft = filteredInvoiceItemModelDaos.iterator().hasNext(); isRealInvoiceWithNonEmptyItems = isThereAnyItemsLeft ? isRealInvoiceWithItems : false; if (isThereAnyItemsLeft) { invoiceDao.createInvoice(invoiceModelDao, ImmutableList.copyOf(filteredInvoiceItemModelDaos), isRealInvoiceWithItems, futureAccountNotifications, context); } else { invoiceDao.setFutureAccountNotificationsForEmptyInvoice(account.getId(), futureAccountNotifications, context); } final List<InvoiceItem> fixedPriceInvoiceItems = invoice.getInvoiceItems(FixedPriceInvoiceItem.class); final List<InvoiceItem> recurringInvoiceItems = invoice.getInvoiceItems(RecurringInvoiceItem.class); setChargedThroughDates(dateAndTimeZoneContext, fixedPriceInvoiceItems, recurringInvoiceItems, context); final List<InvoiceInternalEvent> events = new ArrayList<InvoiceInternalEvent>(); if (isRealInvoiceWithNonEmptyItems) { events.add(new DefaultInvoiceCreationEvent(invoice.getId(), invoice.getAccountId(), invoice.getBalance(), invoice.getCurrency(), context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken())); } for (final UUID cur : adjustedUniqueOtherInvoiceId) { final InvoiceAdjustmentInternalEvent event = new DefaultInvoiceAdjustmentEvent(cur, invoice.getAccountId(), context.getAccountRecordId(), context.getTenantRecordId(), context.getUserToken()); events.add(event); } for (final InvoiceInternalEvent event : events) { postEvent(event, account.getId(), context); } if (account.isNotifiedForInvoices() && isRealInvoiceWithNonEmptyItems) { // Need to re-hydrate the invoice object to get the invoice number (record id) // API_FIX InvoiceNotifier public API? invoiceNotifier.notify(account, new DefaultInvoice(invoiceDao.getById(invoice.getId(), context)), buildTenantContext(context)); } } private InvoiceItem computeCBAOnExistingInvoice(final Invoice invoice, final InternalCallContext context) throws InvoiceApiException { // Transformation to Invoice -> InvoiceModelDao final InvoiceModelDao invoiceModelDao = new InvoiceModelDao(invoice); final List<InvoiceItemModelDao> invoiceItemModelDaos = ImmutableList.copyOf(Collections2.transform(invoice.getInvoiceItems(), new Function<InvoiceItem, InvoiceItemModelDao>() { @Override public InvoiceItemModelDao apply(final InvoiceItem input) { return new InvoiceItemModelDao(input); } })); invoiceModelDao.addInvoiceItems(invoiceItemModelDaos); final InvoiceItemModelDao cbaItem = invoiceDao.doCBAComplexity(invoiceModelDao, context); return cbaItem != null ? InvoiceItemFactory.fromModelDao(cbaItem) : null; } private TenantContext buildTenantContext(final InternalTenantContext context) { return internalCallContextFactory.createTenantContext(context); } private CallContext buildCallContext(final InternalCallContext context) { return internalCallContextFactory.createCallContext(context); } @VisibleForTesting FutureAccountNotifications createNextFutureNotificationDate(final Iterable<InvoiceItemModelDao> invoiceItems, final BillingEventSet billingEvents, final DateAndTimeZoneContext dateAndTimeZoneContext, final InternalCallContext context) { final Map<UUID, List<SubscriptionNotification>> result = new HashMap<UUID, List<SubscriptionNotification>>(); final Map<String, LocalDate> perSubscriptionUsage = new HashMap<String, LocalDate>(); // For each subscription that has a positive (amount) recurring item, create the date // at which we should be called back for next invoice. // for (final InvoiceItemModelDao item : invoiceItems) { List<SubscriptionNotification> perSubscriptionCallback = result.get(item.getSubscriptionId()); if (perSubscriptionCallback == null && (item.getType() == InvoiceItemType.RECURRING || item.getType() == InvoiceItemType.USAGE)) { perSubscriptionCallback = new ArrayList<SubscriptionNotification>(); result.put(item.getSubscriptionId(), perSubscriptionCallback); } switch (item.getType()) { case RECURRING: if ((item.getEndDate() != null) && (item.getAmount() == null || item.getAmount().compareTo(BigDecimal.ZERO) >= 0)) { perSubscriptionCallback.add(new SubscriptionNotification(dateAndTimeZoneContext.computeUTCDateTimeFromLocalDate(item.getEndDate()), true)); } break; case USAGE: final String key = item.getSubscriptionId().toString() + ":" + item.getUsageName(); final LocalDate perSubscriptionUsageRecurringDate = perSubscriptionUsage.get(key); if (perSubscriptionUsageRecurringDate == null || perSubscriptionUsageRecurringDate.compareTo(item.getEndDate()) < 0) { perSubscriptionUsage.put(key, item.getEndDate()); } break; default: // Ignore } } for (final String key : perSubscriptionUsage.keySet()) { final String[] parts = key.split(":"); final UUID subscriptionId = UUID.fromString(parts[0]); final List<SubscriptionNotification> perSubscriptionCallback = result.get(subscriptionId); final String usageName = parts[1]; final LocalDate endDate = perSubscriptionUsage.get(key); final DateTime subscriptionUsageCallbackDate = getNextUsageBillingDate(subscriptionId, usageName, endDate, dateAndTimeZoneContext, billingEvents); perSubscriptionCallback.add(new SubscriptionNotification(subscriptionUsageCallbackDate, true)); } // If dryRunNotification is enabled we also need to fetch the upcoming PHASE dates (we add SubscriptionNotification with isForInvoiceNotificationTrigger = false) final boolean isInvoiceNotificationEnabled = invoiceConfig.getDryRunNotificationSchedule().getMillis() > 0; if (isInvoiceNotificationEnabled) { final Map<UUID, DateTime> upcomingPhasesForSubscriptions = subscriptionApi.getNextFutureEventForSubscriptions(SubscriptionBaseTransitionType.PHASE, context); for (UUID cur : upcomingPhasesForSubscriptions.keySet()) { final DateTime curDate = upcomingPhasesForSubscriptions.get(cur); List<SubscriptionNotification> resultValue = result.get(cur); if (resultValue == null) { resultValue = new ArrayList<SubscriptionNotification>(); } resultValue.add(new SubscriptionNotification(curDate, false)); result.put(cur, resultValue); } } return new FutureAccountNotifications(dateAndTimeZoneContext, result); } private DateTime getNextUsageBillingDate(final UUID subscriptionId, final String usageName, final LocalDate chargedThroughDate, final DateAndTimeZoneContext dateAndTimeZoneContext, final BillingEventSet billingEvents) { final Usage usage = billingEvents.getUsages().get(usageName); final BillingEvent billingEventSubscription = Iterables.tryFind(billingEvents, new Predicate<BillingEvent>() { @Override public boolean apply(@Nullable final BillingEvent input) { return input.getSubscription().getId().equals(subscriptionId); } }).orNull(); final LocalDate nextCallbackUsageDate = (usage.getBillingMode() == BillingMode.IN_ARREAR) ? BillingIntervalDetail.alignProposedBillCycleDate(chargedThroughDate.plusMonths(usage.getBillingPeriod().getNumberOfMonths()), billingEventSubscription.getBillCycleDayLocal()) : chargedThroughDate; return dateAndTimeZoneContext.computeUTCDateTimeFromLocalDate(nextCallbackUsageDate); } private void setChargedThroughDates(final DateAndTimeZoneContext dateAndTimeZoneContext, final Collection<InvoiceItem> fixedPriceItems, final Collection<InvoiceItem> recurringItems, final InternalCallContext context) throws SubscriptionBaseApiException { final Map<UUID, DateTime> chargeThroughDates = new HashMap<UUID, DateTime>(); addInvoiceItemsToChargeThroughDates(dateAndTimeZoneContext, chargeThroughDates, fixedPriceItems); addInvoiceItemsToChargeThroughDates(dateAndTimeZoneContext, chargeThroughDates, recurringItems); for (final UUID subscriptionId : chargeThroughDates.keySet()) { if (subscriptionId != null) { final DateTime chargeThroughDate = chargeThroughDates.get(subscriptionId); subscriptionApi.setChargedThroughDate(subscriptionId, chargeThroughDate, context); } } } private void postEvent(final BusInternalEvent event, final UUID accountId, final InternalCallContext context) { try { eventBus.post(event); } catch (final EventBusException e) { log.error(String.format("Failed to post event %s for account %s", event.getBusEventType(), accountId), e); } } private void addInvoiceItemsToChargeThroughDates(final DateAndTimeZoneContext dateAndTimeZoneContext, final Map<UUID, DateTime> chargeThroughDates, final Collection<InvoiceItem> items) { for (final InvoiceItem item : items) { final UUID subscriptionId = item.getSubscriptionId(); final LocalDate endDate = (item.getEndDate() != null) ? item.getEndDate() : item.getStartDate(); final DateTime proposedChargedThroughDate = dateAndTimeZoneContext.computeUTCDateTimeFromLocalDate(endDate); if (chargeThroughDates.containsKey(subscriptionId)) { if (chargeThroughDates.get(subscriptionId).isBefore(proposedChargedThroughDate)) { chargeThroughDates.put(subscriptionId, proposedChargedThroughDate); } } else { chargeThroughDates.put(subscriptionId, proposedChargedThroughDate); } } } public static class FutureAccountNotifications { private final DateAndTimeZoneContext accountDateAndTimeZoneContext; private final Map<UUID, List<SubscriptionNotification>> notifications; public FutureAccountNotifications(final DateAndTimeZoneContext accountDateAndTimeZoneContext, final Map<UUID, List<SubscriptionNotification>> notifications) { this.accountDateAndTimeZoneContext = accountDateAndTimeZoneContext; this.notifications = notifications; } public DateAndTimeZoneContext getAccountDateAndTimeZoneContext() { return accountDateAndTimeZoneContext; } public Map<UUID, List<SubscriptionNotification>> getNotifications() { return notifications; } public static class SubscriptionNotification { private final DateTime effectiveDate; private final boolean isForNotificationTrigger; public SubscriptionNotification(final DateTime effectiveDate, final boolean isForNotificationTrigger) { this.effectiveDate = effectiveDate; this.isForNotificationTrigger = isForNotificationTrigger; } public DateTime getEffectiveDate() { return effectiveDate; } public boolean isForInvoiceNotificationTrigger() { return isForNotificationTrigger; } } } private List<DateTime> getUpcomingInvoiceCandidateDates(final InternalCallContext internalCallContext) { final Iterable<DateTime> nextScheduledInvoiceDates = getNextScheduledInvoiceEffectiveDate(internalCallContext); final Iterable<DateTime> nextScheduledSubscriptionsEventDates = subscriptionApi.getFutureNotificationsForAccount(internalCallContext); Iterables.concat(nextScheduledInvoiceDates, nextScheduledSubscriptionsEventDates); return UPCOMING_NOTIFICATION_DATE_ORDERING.sortedCopy(Iterables.concat(nextScheduledInvoiceDates, nextScheduledSubscriptionsEventDates)); } private Iterable<DateTime> getNextScheduledInvoiceEffectiveDate(final InternalCallContext internalCallContext) { try { final NotificationQueue notificationQueue = notificationQueueService.getNotificationQueue(DefaultInvoiceService.INVOICE_SERVICE_NAME, DefaultNextBillingDateNotifier.NEXT_BILLING_DATE_NOTIFIER_QUEUE); final List<NotificationEventWithMetadata<NextBillingDateNotificationKey>> futureNotifications = notificationQueue.getFutureNotificationForSearchKeys(internalCallContext.getAccountRecordId(), internalCallContext.getTenantRecordId()); final Iterable<NotificationEventWithMetadata<NextBillingDateNotificationKey>> filtered = Iterables.filter(futureNotifications, new Predicate<NotificationEventWithMetadata<NextBillingDateNotificationKey>>() { @Override public boolean apply(@Nullable final NotificationEventWithMetadata<NextBillingDateNotificationKey> input) { final boolean isEventDryRunForNotifications = input.getEvent().isDryRunForInvoiceNotification() != null ? input.getEvent().isDryRunForInvoiceNotification() : false; return !isEventDryRunForNotifications; } }); return Iterables.transform(filtered, new Function<NotificationEventWithMetadata<NextBillingDateNotificationKey>, DateTime>() { @Nullable @Override public DateTime apply(@Nullable final NotificationEventWithMetadata<NextBillingDateNotificationKey> input) { return input.getEffectiveDate(); } }); } catch (final NoSuchNotificationQueue noSuchNotificationQueue) { throw new IllegalStateException(noSuchNotificationQueue); } } private final static class NullDryRunArguments implements DryRunArguments { @Override public PlanPhaseSpecifier getPlanPhaseSpecifier() { return null; } @Override public SubscriptionEventType getAction() { return null; } @Override public UUID getSubscriptionId() { return null; } @Override public DateTime getEffectiveDate() { return null; } @Override public UUID getBundleId() { return null; } @Override public BillingActionPolicy getBillingActionPolicy() { return null; } @Override public List<PlanPhasePriceOverride> getPlanPhasePriceoverrides() { return null; } } }
aeq/killbill
invoice/src/main/java/org/killbill/billing/invoice/InvoiceDispatcher.java
Java
apache-2.0
37,120
from pyparsing import ( CaselessLiteral, Combine, Literal, ParseException, Regex, Suppress, Word, alphanums, alphas, ) from great_expectations.exceptions import GreatExpectationsError try: import pyspark.sql.functions as F except ImportError: F = None try: import sqlalchemy as sa except ImportError: sa = None def _set_notnull(s, l, t): t["notnull"] = True column_name = Combine( Suppress(Literal('col("')) + Word(alphas, f"{alphanums}_.").setResultsName("column") + Suppress(Literal('")')) ) gt = Literal(">") lt = Literal("<") ge = Literal(">=") le = Literal("<=") eq = Literal("==") ops = (gt ^ lt ^ ge ^ le ^ eq).setResultsName("op") fnumber = Regex(r"[+-]?\d+(?:\.\d*)?(?:[eE][+-]?\d+)?").setResultsName("fnumber") condition_value = Suppress('"') + Word(f"{alphanums}.").setResultsName( "condition_value" ) + Suppress('"') ^ Suppress("'") + Word(f"{alphanums}.").setResultsName( "condition_value" ) + Suppress( "'" ) not_null = CaselessLiteral(".notnull()").setResultsName("notnull") condition = (column_name + not_null).setParseAction(_set_notnull) ^ ( column_name + ops + (fnumber ^ condition_value) ) class ConditionParserError(GreatExpectationsError): pass def _parse_great_expectations_condition(row_condition: str): try: return condition.parseString(row_condition) except ParseException: raise ConditionParserError(f"unable to parse condition: {row_condition}") # noinspection PyUnresolvedReferences def parse_condition_to_spark(row_condition: str) -> "pyspark.sql.Column": parsed = _parse_great_expectations_condition(row_condition) column = parsed["column"] if "condition_value" in parsed: if parsed["op"] == "==": return F.col(column) == parsed["condition_value"] else: raise ConditionParserError( f"Invalid operator: {parsed['op']} for string literal spark condition." ) elif "fnumber" in parsed: try: num = int(parsed["fnumber"]) except ValueError: num = float(parsed["fnumber"]) op = parsed["op"] if op == ">": return F.col(column) > num elif op == "<": return F.col(column) < num elif op == ">=": return F.col(column) >= num elif op == "<=": return F.col(column) <= num elif op == "==": return F.col(column) == num elif "notnull" in parsed and parsed["notnull"] is True: return F.col(column).isNotNull() else: raise ConditionParserError(f"unrecognized column condition: {row_condition}") def parse_condition_to_sqlalchemy( row_condition: str, ) -> "sqlalchemy.sql.expression.ColumnElement": parsed = _parse_great_expectations_condition(row_condition) column = parsed["column"] if "condition_value" in parsed: if parsed["op"] == "==": return sa.column(column) == parsed["condition_value"] else: raise ConditionParserError( f"Invalid operator: {parsed['op']} for string literal spark condition." ) elif "fnumber" in parsed: try: num = int(parsed["fnumber"]) except ValueError: num = float(parsed["fnumber"]) op = parsed["op"] if op == ">": return sa.column(column) > num elif op == "<": return sa.column(column) < num elif op == ">=": return sa.column(column) >= num elif op == "<=": return sa.column(column) <= num elif op == "==": return sa.column(column) == num elif "notnull" in parsed and parsed["notnull"] is True: return sa.not_(sa.column(column).is_(None)) else: raise ConditionParserError(f"unrecognized column condition: {row_condition}")
great-expectations/great_expectations
great_expectations/expectations/row_conditions.py
Python
apache-2.0
3,909
package com.nedap.archie.adlparser.treewalkers; import com.nedap.archie.adlparser.ADLParserErrors; import com.nedap.archie.adlparser.antlr.AdlParser.*; import com.nedap.archie.aom.*; import com.nedap.archie.base.MultiplicityInterval; import com.nedap.archie.rules.Assertion; import org.antlr.v4.runtime.tree.TerminalNode; import java.util.ArrayList; import java.util.List; /** * Parser for the definition part of an archetype * * Created by pieter.bos on 15/10/15. */ public class CComplexObjectParser extends BaseTreeWalker { private final PrimitivesConstraintParser primitivesConstraintParser; public CComplexObjectParser(ADLParserErrors errors) { super(errors); primitivesConstraintParser = new PrimitivesConstraintParser(errors); } public RulesSection parseRules(Rules_sectionContext context) { RulesSection result = new RulesSection(); result.setContent(context.getText()); RulesParser rulesParser = new RulesParser(getErrors()); for(AssertionContext assertion:context.assertion_list().assertion()) { result.addRule(rulesParser.parse(assertion)); } return result; } public CComplexObject parseComplexObject(C_complex_objectContext context) { CComplexObject object = new CComplexObject(); if(context.type_id() != null) { object.setRmTypeName(context.type_id().getText()); } if(context.ID_CODE() != null) { object.setNodeId(context.ID_CODE().getText()); } else if (context.ROOT_ID_CODE() != null) { object.setNodeId(context.ROOT_ID_CODE().getText()); } //TODO: object.setDeprecated(context.) ?; if (context.c_occurrences() != null) { object.setOccurrences(parseMultiplicityInterval(context.c_occurrences())); } for (C_attribute_defContext attribute : context.c_attribute_def()) { parseCAttribute(object, attribute); } return object; } private void parseCAttribute(CComplexObject parent, C_attribute_defContext attributeDefContext) { if (attributeDefContext.c_attribute() != null) { CAttribute attribute = new CAttribute(); C_attributeContext attributeContext = attributeDefContext.c_attribute(); if(attributeContext.attribute_id() != null) { attribute.setRmAttributeName(attributeContext.attribute_id().getText()); } else { attribute.setDifferentialPath(attributeContext.ADL_PATH().getText()); attribute.setRmAttributeName(getLastAttributeFromPath(attribute.getDifferentialPath())); } if (attributeContext.c_existence() != null) { attribute.setExistence(parseMultiplicityInterval(attributeContext.c_existence())); } if (attributeContext.c_cardinality() != null) { attribute.setCardinality(this.parseCardinalityInterval(attributeContext.c_cardinality())); } if (attributeContext.c_objects() != null) { attribute.setChildren(parseCObjects(attributeContext.c_objects())); } else if (attributeContext.CONTAINED_REGEXP() != null) { attribute.addChild(primitivesConstraintParser.parseRegex(attributeContext.CONTAINED_REGEXP())); } parent.addAttribute(attribute); } else if (attributeDefContext.c_attribute_tuple() != null) { parent.addAttributeTuple(parseAttributeTuple(parent, attributeDefContext.c_attribute_tuple())); } } public static String getFirstAttributeOfPath(String path) { return path.substring(0, path.indexOf('/')); } public static String getPathMinusFirstAttribute(String path) { return path.substring(path.indexOf('/')); } public static String getLastAttributeFromPath(String path) { return path.substring(path.lastIndexOf('/')+1); } private CAttributeTuple parseAttributeTuple(CComplexObject parent, C_attribute_tupleContext attributeTupleContext) { List<Attribute_idContext> attributeIdList = attributeTupleContext.attribute_id(); CAttributeTuple tuple = new CAttributeTuple(); for(Attribute_idContext idContext:attributeIdList) { CAttribute attribute = new CAttribute(); String id = idContext.getText();//TODO? parse odin string value? attribute.setRmAttributeName(id); tuple.addMember(attribute); parent.addAttribute(attribute); } List<C_object_tupleContext> tupleContexts = attributeTupleContext.c_object_tuple(); for(C_object_tupleContext tupleContext:tupleContexts) { CPrimitiveTuple primitiveTuple = new CPrimitiveTuple(); List<C_object_tuple_itemContext> primitiveObjectContexts = tupleContext.c_object_tuple_items().c_object_tuple_item(); int i = 0; for(C_object_tuple_itemContext tupleObjectContext:primitiveObjectContexts) { CPrimitiveObject primitiveObject = null; if(tupleObjectContext.c_primitive_object() != null) { primitiveObject = primitivesConstraintParser.parsePrimitiveObject(tupleObjectContext.c_primitive_object()); } else if (tupleObjectContext.CONTAINED_REGEXP() != null) { primitiveObject = primitivesConstraintParser.parseRegex(tupleObjectContext.CONTAINED_REGEXP()); } tuple.getMembers().get(i).addChild(primitiveObject); primitiveTuple.addMember(primitiveObject); i++; } tuple.addTuple(primitiveTuple); } return tuple; } private List<CObject> parseCObjects(C_objectsContext objectsContext) { ArrayList<CObject> result = new ArrayList<>(); if (objectsContext.c_primitive_object() != null) { result.add(primitivesConstraintParser.parsePrimitiveObject(objectsContext.c_primitive_object())); } else { List<C_non_primitive_object_orderedContext> nonPrimitiveObjectOrderedContext = objectsContext.c_non_primitive_object_ordered(); if (nonPrimitiveObjectOrderedContext != null) { for (C_non_primitive_object_orderedContext object : nonPrimitiveObjectOrderedContext) { CObject cobject = parseNonPrimitiveObject(object.c_non_primitive_object()); Sibling_orderContext siblingOrderContext = object.sibling_order(); if(siblingOrderContext != null) { SiblingOrder siblingOrder = new SiblingOrder(); if(siblingOrderContext.SYM_AFTER() != null) { siblingOrder.setBefore(false); } else if (siblingOrderContext.SYM_BEFORE() != null) { siblingOrder.setBefore(true); } siblingOrder.setSiblingNodeId(siblingOrderContext.ID_CODE().getText()); cobject.setSiblingOrder(siblingOrder); } result.add(cobject); } } } return result; } private CObject parseNonPrimitiveObject(C_non_primitive_objectContext objectContext) { /* c_complex_object | c_archetype_root | c_complex_object_proxy | archetype_slot */ if (objectContext.c_complex_object() != null) { return parseComplexObject(objectContext.c_complex_object()); } else if (objectContext.c_archetype_root() != null) { return parseArchetypeRoot(objectContext.c_archetype_root()); } else if (objectContext.c_complex_object_proxy() != null) { return parseCComplexObjectProxy(objectContext.c_complex_object_proxy()); } else if (objectContext.archetype_slot() != null) { return parseArchetypeSlot(objectContext.archetype_slot()); } return null; } private CComplexObjectProxy parseCComplexObjectProxy(C_complex_object_proxyContext proxyContext) { CComplexObjectProxy proxy = new CComplexObjectProxy(); proxy.setOccurrences(this.parseMultiplicityInterval(proxyContext.c_occurrences())); proxy.setTargetPath(proxyContext.adl_path().getText()); proxy.setRmTypeName(proxyContext.type_id().getText()); proxy.setNodeId(proxyContext.ID_CODE().getText()); return proxy; } private CArchetypeRoot parseArchetypeRoot(C_archetype_rootContext archetypeRootContext) { CArchetypeRoot root = new CArchetypeRoot(); root.setRmTypeName(archetypeRootContext.type_id().getText()); root.setNodeId(archetypeRootContext.ID_CODE().getText()); if(archetypeRootContext.archetype_ref() != null) { root.setArchetypeRef(archetypeRootContext.archetype_ref().getText()); } root.setOccurrences(this.parseMultiplicityInterval(archetypeRootContext.c_occurrences())); for (C_attribute_defContext attributeContext : archetypeRootContext.c_attribute_def()) { parseCAttribute(root, attributeContext); } //((Archetype_slotContext) slotContext).start.getInputStream().getText(slotContext.getSourceInterval()) return root; } private ArchetypeSlot parseArchetypeSlot(Archetype_slotContext slotContext) { ArchetypeSlot slot = new ArchetypeSlot(); C_archetype_slot_headContext headContext = slotContext.c_archetype_slot_head(); slot.setNodeId(headContext.c_archetype_slot_id().ID_CODE().getText()); slot.setRmTypeName(headContext.c_archetype_slot_id().type_id().getText()); if(headContext.c_archetype_slot_id().SYM_CLOSED() != null) { slot.setClosed(true); } if (headContext.c_occurrences() != null) { slot.setOccurrences(parseMultiplicityInterval(headContext.c_occurrences())); } RulesParser assertionParser = new RulesParser(getErrors()); if (slotContext.c_excludes() != null) { for (AssertionContext assertionContext : slotContext.c_excludes().assertion()) { slot.getExcludes().add((Assertion) assertionParser.parse(assertionContext)); } } if (slotContext.c_includes() != null) { for (AssertionContext assertionContext : slotContext.c_includes().assertion()) { slot.getIncludes().add((Assertion) assertionParser.parse(assertionContext)); } } return slot; } private Cardinality parseCardinalityInterval(C_cardinalityContext context) { Cardinality cardinality = new Cardinality(); MultiplicityInterval interval = parseMultiplicity(context.cardinality().multiplicity()); cardinality.setInterval(interval); List<Multiplicity_modContext> modContexts = context.cardinality().multiplicity_mod(); for(Multiplicity_modContext modContext:modContexts) { if(modContext.ordering_mod() != null) { cardinality.setOrdered(modContext.ordering_mod().SYM_ORDERED() != null); } if(modContext.unique_mod() != null) { cardinality.setUnique(true); } } return cardinality; } private MultiplicityInterval parseMultiplicityInterval(C_existenceContext existenceContext) { MultiplicityInterval interval = new MultiplicityInterval(); List<TerminalNode> integers = existenceContext.existence().INTEGER(); if(integers.size() == 1) { interval.setLower(Integer.parseInt(integers.get(0).getText())); interval.setUpper(interval.getLower()); } else if (integers.size() == 2) { interval.setLower(Integer.parseInt(integers.get(0).getText())); interval.setUpper(Integer.parseInt(integers.get(1).getText())); } return interval; } private MultiplicityInterval parseMultiplicityInterval(C_occurrencesContext occurrencesContext) { if(occurrencesContext == null) { return null; } return parseMultiplicity(occurrencesContext.multiplicity()); } private MultiplicityInterval parseMultiplicity(MultiplicityContext multiplicity) { if(multiplicity == null) { return null; } MultiplicityInterval interval = new MultiplicityInterval(); List<TerminalNode> integers = multiplicity.INTEGER(); if(multiplicity.SYM_INTERVAL_SEP() != null) { if(multiplicity.getText().contains("*")) { interval.setLower(Integer.parseInt(integers.get(0).getText())); interval.setUpperUnbounded(true); } else { interval.setLower(Integer.parseInt(integers.get(0).getText())); interval.setUpper(Integer.parseInt(integers.get(1).getText())); } } else { //one integer or * if(multiplicity.getText().contains("*")) { interval.setLowerUnbounded(false); interval.setLower(0); interval.setUpperUnbounded(true); } else { interval.setLower(Integer.parseInt(integers.get(0).getText())); interval.setUpper(interval.getLower()); } } return interval; } }
nedap/archie
src/main/java/com/nedap/archie/adlparser/treewalkers/CComplexObjectParser.java
Java
apache-2.0
13,477
/* * Copyright (C) 2005-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.audit.spi; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.TimeZone; import java.util.TimerTask; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.dom4j.DocumentFactory; import org.dom4j.Element; import org.jivesoftware.openfire.audit.AuditManager; import org.jivesoftware.openfire.audit.Auditor; import org.jivesoftware.openfire.session.Session; import org.jivesoftware.util.FastDateFormat; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.LocaleUtils; import org.jivesoftware.util.StringUtils; import org.jivesoftware.util.TaskEngine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.IQ; import org.xmpp.packet.Message; import org.xmpp.packet.Packet; import org.xmpp.packet.Presence; public class AuditorImpl implements Auditor { private static final Logger Log = LoggerFactory.getLogger(AuditorImpl.class); private AuditManager auditManager; private File currentAuditFile; private Writer writer; private org.jivesoftware.util.XMLWriter xmlWriter; /** * Limit date used to detect when we need to rollover files. This date will be * configured as the last second of the day. */ private Date currentDateLimit; /** * Max size in bytes that all audit log files may have. When the limit is reached * oldest audit log files will be removed until total size is under the limit. */ private long maxTotalSize; /** * Max size in bytes that each audit log file may have. Once the limit has been * reached a new audit file will be created. */ private long maxFileSize; /** * Max number of days to keep audit information. Once the limit has been reached * audit files that contain information that exceed the limit will be deleted. */ private int maxDays; /** * Flag that indicates if packets can still be accepted to be saved to the audit log. */ private boolean closed = false; /** * Directoty (absolute path) where the audit files will be saved. */ private String logDir; /** * File (or better say directory) of the folder that contains the audit logs. */ private File baseFolder; /** * Queue that holds the audited packets that will be later saved to an XML file. */ private BlockingQueue<AuditPacket> logQueue = new LinkedBlockingQueue<>(); /** * Allow only a limited number of files for each day, max. three digits (000-999) */ private final int maxTotalFilesDay = 1000; /** * Track the current index number `...-nnn.log´ */ private int filesIndex = 0; /** * Timer to save queued logs to the XML file. */ private SaveQueuedPacketsTask saveQueuedPacketsTask; private FastDateFormat dateFormat; private static FastDateFormat auditFormat; public AuditorImpl(AuditManager manager) { auditManager = manager; dateFormat = FastDateFormat.getInstance("yyyyMMdd", TimeZone.getTimeZone("UTC")); auditFormat = FastDateFormat.getInstance("MMM dd, yyyy hh:mm:ss:SSS a", JiveGlobals.getLocale()); } protected void setMaxValues(int totalSize, int fileSize, int days) { maxTotalSize = (long) totalSize * 1024l * 1024l; maxFileSize = (long) fileSize * 1024l * 1024l; maxDays = days; } public void setLogTimeout(int logTimeout) { // Cancel any existing task because the timeout has changed if (saveQueuedPacketsTask != null) { saveQueuedPacketsTask.cancel(); } // Create a new task and schedule it with the new timeout saveQueuedPacketsTask = new SaveQueuedPacketsTask(); TaskEngine.getInstance().schedule(saveQueuedPacketsTask, logTimeout, logTimeout); } public void setLogDir(String logDir) { this.logDir = logDir; // Create and catch file of the base folder that will contain audit files baseFolder = new File(logDir); // Create the folder if it does not exist if (!baseFolder.exists()) { if ( !baseFolder.mkdir() ) { Log.error( "Unable to create log directory: {}", baseFolder ); } } } @Override public int getQueuedPacketsNumber() { return logQueue.size(); } @Override public void audit(Packet packet, Session session) { if (auditManager.isEnabled()) { if (packet instanceof Message) { if (auditManager.isAuditMessage()) { writePacket(packet, session); } } else if (packet instanceof Presence) { if (auditManager.isAuditPresence()) { writePacket(packet, session); } } else if (packet instanceof IQ) { if (auditManager.isAuditIQ()) { writePacket(packet, session); } } } } private void writePacket(Packet packet, Session session) { if (!closed) { // Add to the logging queue this new entry that will be saved later logQueue.add(new AuditPacket(packet.createCopy(), session)); } } @Override public void stop() { // Stop queuing packets since we are being stopped closed = true; // Save all remaining queued packets to the XML file saveQueuedPackets(); close(); } private void close() { if (xmlWriter != null) { try { xmlWriter.flush(); writer.write("</jive>"); xmlWriter.close(); writer = null; xmlWriter = null; } catch (Exception e) { Log.error(LocaleUtils.getLocalizedString("admin.error"), e); } } } private void prepareAuditFile(Date auditDate) throws IOException { ensureMaxTotalSize(); // Rotate file if: we just started, current file size exceeded limit or date has changed if (currentAuditFile == null || currentAuditFile.length() > maxFileSize || xmlWriter == null || currentDateLimit == null || auditDate.after(currentDateLimit)) { createAuditFile(auditDate); } } /** * Ensures that max total size limit is not exceeded. If total size of audit files * exceed the limit then oldest audit files will be removed until total size does * not exceed limit. */ private void ensureMaxTotalSize() { // Get list of existing audit files FilenameFilter filter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("jive.audit-") && name.endsWith(".log"); } }; File[] files = baseFolder.listFiles(filter); if (files == null) { Log.debug( "Path '{}' does not denote a directory, or an IO exception occured while trying to list its content.", baseFolder ); return; } long totalLength = 0; for (File file : files) { totalLength = totalLength + file.length(); } // Check if total size has been exceeded if (totalLength > maxTotalSize) { // Sort files by name (chronological order) List<File> sortedFiles = new ArrayList<>(Arrays.asList(files)); Collections.sort(sortedFiles, new Comparator<File>() { @Override public int compare(File o1, File o2) { return o1.getName().compareTo(o2.getName()); } }); // Delete as many old files as required to be under the limit while (totalLength > maxTotalSize && !sortedFiles.isEmpty()) { File fileToDelete = sortedFiles.remove(0); totalLength = totalLength - fileToDelete.length(); if (fileToDelete.equals(currentAuditFile)) { // Close current file close(); } // Delete oldest file if ( !fileToDelete.delete() ) { Log.warn( "Unable to delete file '{}' as part of regular log rotation based on size of files (Openfire failed to clean up after itself)!", fileToDelete ); } } } } /** * Deletes old audit files that exceeded the max number of days limit. */ private void ensureMaxDays() { if (maxDays == -1) { // Do nothing since we don't have any limit return; } // Set limit date after which we need to delete old audit files Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, maxDays * -1); final String oldestFile = "jive.audit-" + dateFormat.format(calendar.getTime()) + "-000.log"; // Get list of audit files to delete FilenameFilter filter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("jive.audit-") && name.endsWith(".log") && name.compareTo(oldestFile) < 0; } }; File[] files = baseFolder.listFiles(filter); // Delete old audit files for (File fileToDelete : files) { if (fileToDelete.equals(currentAuditFile)) { // Close current file close(); } if ( !fileToDelete.delete() ) { Log.warn( "Unable to delete file '{}' as part of regular log rotation based on age of file. (Openfire failed to clean up after itself)!", fileToDelete ); } } } /* if this new logic still causes problems one may want to * use log4j or change the file format from YYYYmmdd-nnn to YYYYmmdd-HHMM */ /** * Sets <b>xmlWriter</b> so this class can use it to write audit logs<br> * The audit filename <b>currentAuditFile</b> will be `jive.audit-YYYYmmdd-nnn.log´<br> * `nnn´ will be reset to `000´ when a new log file is created the next day <br> * `nnn´ will be increased for log files which belong to the same day<br> * <b>WARNING:</b> If log files of the current day are deleted and the server is restarted then * the value of `nnn´ may be random (it's calculated by `Math.max(files.length, filesIndex);´ * with `filesIndex=0´ and `files.length=nr(existing jive.audit-YYYYmmdd-???.log files)´ - * if there are 10 audit files (033-043) then nnn will be 10 instead of 44).<br> * If `nnn=999´ then all audit data will be written to this file till the next day.<br> * @param auditDate * @throws IOException */ private void createAuditFile(Date auditDate) throws IOException { final String filePrefix = "jive.audit-" + dateFormat.format(auditDate) + "-"; if (currentDateLimit == null || auditDate.after(currentDateLimit)) { // Set limit date after which we need to rollover the audit file (based on the date) Calendar calendar = Calendar.getInstance(); calendar.setTime(auditDate); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); currentDateLimit = calendar.getTime(); filesIndex = 0; } // Get list of existing audit files FilenameFilter filter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith(filePrefix) && name.endsWith(".log"); } }; File[] files = baseFolder.listFiles(filter); // if some daily files were already deleted then files.length will be smaller than filesIndex // see also WARNING above filesIndex = Math.max(files.length, filesIndex); if (filesIndex >= maxTotalFilesDay) { // don't close this file, continue auditing to it return; } File tmpAuditFile = new File(logDir, filePrefix + StringUtils.zeroPadString(Integer.toString(filesIndex), 3) + ".log"); if ( (filesIndex == maxTotalFilesDay-1) && !tmpAuditFile.exists() ) { Log.warn("Creating last audit file for this date: " + dateFormat.format(auditDate)); } while ( (filesIndex<(maxTotalFilesDay-1)) && (tmpAuditFile.exists()) ) { Log.debug("Audit file '"+ tmpAuditFile.getName() +"' does already exist."); filesIndex++; tmpAuditFile = new File(logDir, filePrefix + StringUtils.zeroPadString(Integer.toString(filesIndex), 3) + ".log"); } currentAuditFile = tmpAuditFile; close(); // always append to an existing file (after restart) writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(currentAuditFile, true), StandardCharsets.UTF_8)); writer.write("<jive xmlns=\"http://www.jivesoftware.org\">"); xmlWriter = new org.jivesoftware.util.XMLWriter(writer); } /** * Saves the queued entries to an XML file and checks that very old files are deleted. */ private class SaveQueuedPacketsTask extends TimerTask { @Override public void run() { try { // Ensure that saved audit logs are not too old ensureMaxDays(); // Save queued packets to the audit logs saveQueuedPackets(); } catch (Throwable e) { Log.error(LocaleUtils.getLocalizedString("admin.error"), e); } } } private void saveQueuedPackets() { List<AuditPacket> packets = new ArrayList<>(logQueue.size()); logQueue.drainTo(packets); for (AuditPacket auditPacket : packets) { try { prepareAuditFile(auditPacket.getCreationDate()); Element element = auditPacket.getElement(); // Protect against null elements. if (element != null) { xmlWriter.write(element); } } catch (IOException e) { Log.error(LocaleUtils.getLocalizedString("admin.error"), e); // Add again the entry to the queue to save it later if (xmlWriter != null) { logQueue.add(auditPacket); } } } try { if (xmlWriter != null) { xmlWriter.flush(); } } catch (IOException ioe) { Log.error(ioe.getMessage(), ioe); } } /** * Wrapper on a Packet with information about the packet's status at the moment * when the message was queued.<p> * * The idea is to wrap every packet that is needed to be audited and then add the * wrapper to a queue that will be later processed (i.e. saved to the XML file). */ private static class AuditPacket { private static DocumentFactory docFactory = DocumentFactory.getInstance(); private Element element; private Date creationDate; public AuditPacket(Packet packet, Session session) { element = docFactory.createElement("packet", "http://www.jivesoftware.org"); creationDate = new Date(); if (session != null && session.getStreamID() != null) { element.addAttribute("streamID", session.getStreamID().toString()); } switch (session == null ? 0 : session.getStatus()) { case Session.STATUS_AUTHENTICATED: element.addAttribute("status", "auth"); break; case Session.STATUS_CLOSED: element.addAttribute("status", "closed"); break; case Session.STATUS_CONNECTED: element.addAttribute("status", "connected"); // This is a workaround. Since we don't want to have an incorrect FROM attribute // value we need to clean up the FROM attribute. The FROM attribute will contain // an incorrect value since we are setting a fake JID until the user actually // authenticates with the server. packet.setFrom((String) null); break; default: element.addAttribute("status", "unknown"); break; } element.addAttribute("timestamp", auditFormat.format(creationDate)); element.add(packet.getElement()); } /** * Returns the Element associated with this audit packet. * * @return the Element. */ public Element getElement() { return element; } /** * Returns the date when the packet was audited. This is the time when the * packet was queued to be saved. * * @return the date when the packet was audited. */ public Date getCreationDate() { return creationDate; } } }
Gugli/Openfire
src/java/org/jivesoftware/openfire/audit/spi/AuditorImpl.java
Java
apache-2.0
18,482
// Copyright 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package testutil provides common assembly stubs for testing. package testutil import ( "fmt" "strings" ) // Getpid executes a trivial system call. func Getpid() // AddrOfGetpid returns the address of Getpid. // // In Go 1.17+, Go references to assembly functions resolve to an ABIInternal // wrapper function rather than the function itself. We must reference from // assembly to get the ABI0 (i.e., primary) address. func AddrOfGetpid() uintptr // AddrOfTouch returns the address of a function that touches the value in the // first register. func AddrOfTouch() uintptr func touch() // AddrOfSyscallLoop returns the address of a function that executes a syscall // and loops. func AddrOfSyscallLoop() uintptr func syscallLoop() // AddrOfSpinLoop returns the address of a function that spins on the CPU. func AddrOfSpinLoop() uintptr func spinLoop() // AddrOfHaltLoop returns the address of a function that immediately halts and // loops. func AddrOfHaltLoop() uintptr func haltLoop() // AddrOfTwiddleRegsFault returns the address of a function that twiddles // registers then faults. func AddrOfTwiddleRegsFault() uintptr func twiddleRegsFault() // AddrOfTwiddleRegsSyscall returns the address of a function that twiddles // registers then executes a syscall. func AddrOfTwiddleRegsSyscall() uintptr func twiddleRegsSyscall() // FloatingPointWorks is a floating point test. // // It returns true or false. func FloatingPointWorks() bool // RegisterMismatchError is used for checking registers. type RegisterMismatchError []string // Error returns a human-readable error. func (r RegisterMismatchError) Error() string { return strings.Join([]string(r), ";") } // addRegisterMisatch allows simple chaining of register mismatches. func addRegisterMismatch(err error, reg string, got, expected interface{}) error { errStr := fmt.Sprintf("%s got %08x, expected %08x", reg, got, expected) switch r := err.(type) { case nil: // Return a new register mismatch. return RegisterMismatchError{errStr} case RegisterMismatchError: // Append the error. r = append(r, errStr) return r default: // Leave as is. return err } }
google/gvisor
pkg/sentry/platform/kvm/testutil/testutil.go
GO
apache-2.0
2,747
/* * Copyright (C) 2015 Archie L. Cobbs. All rights reserved. */ package org.jsimpledb.core; import java.util.NavigableMap; import java.util.NavigableSet; import org.jsimpledb.index.Index; import org.jsimpledb.index.Index2; import org.jsimpledb.index.Index3; import org.jsimpledb.kv.KeyFilter; import org.jsimpledb.tuple.Tuple2; import org.jsimpledb.tuple.Tuple3; import org.jsimpledb.tuple.Tuple4; /** * Core API {@link Index} implementation representing a composite index on three fields. * * <p> * Instances are immutable. * </p> * * @param <V1> first index value type * @param <V2> second index value type * @param <V3> third index value type * @param <T> index target type */ public class CoreIndex3<V1, V2, V3, T> extends AbstractCoreIndex implements Index3<V1, V2, V3, T> { // Constructors CoreIndex3(Transaction tx, Index3View<V1, V2, V3, T> indexView) { super(tx, 4, indexView); } // Methods @Override public CoreIndex3<V1, V2, V3, T> filter(int index, KeyFilter filter) { return new CoreIndex3<V1, V2, V3, T>(this.tx, this.getIndex3View().filter(index, filter)); } @SuppressWarnings("unchecked") Index3View<V1, V2, V3, T> getIndex3View() { return (Index3View<V1, V2, V3, T>)this.indexView; } // Index3 @Override public NavigableSet<Tuple4<V1, V2, V3, T>> asSet() { // Get index view final Index3View<V1, V2, V3, T> iv = this.getIndex3View(); // Create field type for Tuple4<V1, V2, V3, T> final Tuple4FieldType<V1, V2, V3, T> fieldType = new Tuple4FieldType<V1, V2, V3, T>( iv.getValue1Type(), iv.getValue2Type(), iv.getValue3Type(), iv.getTargetType()); // Build set and apply filtering IndexSet<Tuple4<V1, V2, V3, T>> indexSet = new IndexSet<Tuple4<V1, V2, V3, T>>(this.tx, fieldType, iv.prefixMode, iv.prefix); if (iv.hasFilters()) indexSet = indexSet.filterKeys(new IndexKeyFilter(this.tx, iv, 4)); // Done return indexSet; } @Override public NavigableMap<Tuple3<V1, V2, V3>, NavigableSet<T>> asMap() { // Get index view final Index3View<V1, V2, V3, T> iv = this.getIndex3View(); // Create new IndexView final IndexView<Tuple3<V1, V2, V3>, T> tupleIV = iv.asTuple3IndexView(); // Build map and apply filtering IndexMap<Tuple3<V1, V2, V3>, NavigableSet<T>> indexMap = new IndexMap.OfValues<Tuple3<V1, V2, V3>, T>(this.tx, tupleIV); if (tupleIV.hasFilters()) indexMap = indexMap.filterKeys(new IndexKeyFilter(this.tx, tupleIV, 1)); // Done return indexMap; } @Override public NavigableMap<Tuple2<V1, V2>, Index<V3, T>> asMapOfIndex() { // Get index view final Index3View<V1, V2, V3, T> iv = this.getIndex3View(); // Create new IndexView final Index2View<Tuple2<V1, V2>, V3, T> tupleIV = iv.asTuple2Index2View(); // Build map and apply filtering IndexMap<Tuple2<V1, V2>, Index<V3, T>> indexMap = new IndexMap.OfIndex<Tuple2<V1, V2>, V3, T>(this.tx, tupleIV); if (iv.hasFilters()) indexMap = indexMap.filterKeys(new IndexKeyFilter(this.tx, tupleIV, 1)); // Done return indexMap; } @Override public NavigableMap<V1, Index2<V2, V3, T>> asMapOfIndex2() { // Get index view final Index3View<V1, V2, V3, T> iv = this.getIndex3View(); // Build map and apply filtering IndexMap<V1, Index2<V2, V3, T>> indexMap = new IndexMap.OfIndex2<V1, V2, V3, T>(this.tx, iv); if (iv.hasFilters()) indexMap = indexMap.filterKeys(new IndexKeyFilter(this.tx, iv, 1)); // Done return indexMap; } @Override public CoreIndex2<V1, V2, V3> asIndex2() { return new CoreIndex2<V1, V2, V3>(this.tx, this.getIndex3View().asIndex2View()); } @Override public CoreIndex<V1, V2> asIndex() { return new CoreIndex<V1, V2>(this.tx, this.getIndex3View().asIndex2View().asIndexView()); } }
tempbottle/jsimpledb
src/java/org/jsimpledb/core/CoreIndex3.java
Java
apache-2.0
4,089
package com.chanven.lib.cptr.utils; import android.content.Context; import android.util.DisplayMetrics; import android.view.View; import android.view.WindowManager; public class PtrLocalDisplay { public static int SCREEN_WIDTH_PIXELS; public static int SCREEN_HEIGHT_PIXELS; public static float SCREEN_DENSITY; public static int SCREEN_WIDTH_DP; public static int SCREEN_HEIGHT_DP; public static void init(Context context) { if (context == null) { return; } DisplayMetrics dm = new DisplayMetrics(); WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); wm.getDefaultDisplay().getMetrics(dm); SCREEN_WIDTH_PIXELS = dm.widthPixels; SCREEN_HEIGHT_PIXELS = dm.heightPixels; SCREEN_DENSITY = dm.density; SCREEN_WIDTH_DP = (int) (SCREEN_WIDTH_PIXELS / dm.density); SCREEN_HEIGHT_DP = (int) (SCREEN_HEIGHT_PIXELS / dm.density); } public static int dp2px(float dp) { final float scale = SCREEN_DENSITY; return (int) (dp * scale + 0.5f); } public static int designedDP2px(float designedDp) { if (SCREEN_WIDTH_DP != 320) { designedDp = designedDp * SCREEN_WIDTH_DP / 320f; } return dp2px(designedDp); } public static void setPadding(final View view, float left, float top, float right, float bottom) { view.setPadding(designedDP2px(left), dp2px(top), designedDP2px(right), dp2px(bottom)); } }
Chanven/CommonPullToRefresh
cptr/src/com/chanven/lib/cptr/utils/PtrLocalDisplay.java
Java
apache-2.0
1,526
/* * Copyright 2016 Ratha Long * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nz.co.testamation.testcommon.fixture; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import org.apache.commons.lang3.RandomStringUtils; import java.math.BigDecimal; import java.math.BigInteger; import java.time.Duration; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.Collection; import java.util.concurrent.ThreadLocalRandom; public class SomeFixture { public static String someString( int length ) { return RandomStringUtils.randomAlphabetic( length ); } public static String someString() { return RandomStringUtils.randomAlphabetic( 7 ); } public static int someInt() { return ThreadLocalRandom.current().nextInt(); } public static long someLong() { return ThreadLocalRandom.current().nextLong(); } public static int someIntLessThan( int maxExclusive ) { return ThreadLocalRandom.current().nextInt( maxExclusive ); } public static int someYear() { return 2000 + someIntLessThan( 11 ); } public static Duration someDuration() { return Duration.ofSeconds( someLong() ); } public static String someEmail() { return String.format( "%s@%s.com", RandomStringUtils.randomAlphabetic( 5 ), RandomStringUtils.randomAlphabetic( 5 ) ).toLowerCase(); } public static LocalDate someLocalDate() { return LocalDate.of( someYear(), someIntLessThan( 12 ) + 1, someIntLessThan( 28 ) + 1 ); } public static LocalDateTime someDateTime() { return LocalDateTime.of( someYear(), someIntLessThan( 12 ) + 1, someIntLessThan( 28 ) + 1, someIntBetween( 4, 24 ), // avoid sometimes invalid 2am due to daylight savings someIntLessThan( 60 ), someIntLessThan( 60 ) ); } public static Boolean someBoolean() { return ThreadLocalRandom.current().nextBoolean(); } public static <E extends Enum> E someEnum( Class<E> enumClazz ) { return someValue( enumClazz.getEnumConstants() ); } public static <E extends Enum> E someEnum( Class<E> enumClazz, Predicate<E> predicate ) { E result; do { result = someEnum( enumClazz ); } while ( !predicate.apply( result ) ); return result; } public static <T extends Enum> T someEnumOtherThan( Class<T> enumClazz, T... excluded ) { return someValue( Sets.difference( ImmutableSet.copyOf( enumClazz.getEnumConstants() ), ImmutableSet.copyOf( excluded ) ) ); } private static int someInt( int length ) { return new Integer( RandomStringUtils.randomNumeric( length ) ); } public static Integer someIntBetween( int minInclusive, int maxExclusive ) { return ThreadLocalRandom.current().nextInt( minInclusive, maxExclusive ); } public static String someEmail( String prefix ) { return prefix + "_" + someEmail(); } public static BigDecimal someBigDecimal() { return new BigDecimal( somePositiveInt() ); } public static BigDecimal someBigDecimalPercentage() { return new BigDecimal( someDouble() ); } public static double someDouble() { return ThreadLocalRandom.current().nextDouble(); } public static byte[] someBytes() { return someString().getBytes(); } public static int somePositiveInt() { return Math.abs( someInt() ); } public static <T> T someValue( T... values ) { return values[ someIntLessThan( values.length ) ]; } public static <T> T someValue( Collection<T> values ) { return Iterables.get( values, someIntLessThan( values.size() ) ); } public static String someString( String... choises ) { return choises[ SomeFixture.someIntBetween( 0, choises.length ) ]; } public static String someString( Iterable<String> choices ) { return someString( Iterables.toArray( choices, String.class ) ); } public static BigInteger someBigInteger() { return new BigInteger( String.valueOf( somePositiveInt() ) ); } public static <T> T someThing( T... things ) { return things[ someIntBetween( 0, things.length ) ]; } }
rlon008/testamation
testamation-test-common/src/main/java/nz/co/testamation/testcommon/fixture/SomeFixture.java
Java
apache-2.0
5,010
package me.com.hookdemo.hook; import android.app.Instrumentation; import java.lang.reflect.Field; import java.lang.reflect.Method; public class HookHelper { /** * 使用反射的方式找到ActivityThread这个类 * 将类中的Instrumentation对象替换成自己实现的 */ public static void attachContext() throws Exception { Class<?> activityThreadClass = Class.forName("android.app.ActivityThread"); Method currentActivityThreadMethod = activityThreadClass.getDeclaredMethod("currentActivityThread"); currentActivityThreadMethod.setAccessible(true); Object currentActivityThread = currentActivityThreadMethod.invoke(null); Field mInstrumentationField = activityThreadClass.getDeclaredField("mInstrumentation"); mInstrumentationField.setAccessible(true); Instrumentation mInstrumentation = (Instrumentation) mInstrumentationField.get(currentActivityThread); Instrumentation demonInstrumentation = new DemonInstrumentation(mInstrumentation); mInstrumentationField.set(currentActivityThread, demonInstrumentation); } }
wmh-demos/HookDemo
binder-hook/src/main/java/me/com/hookdemo/hook/HookHelper.java
Java
apache-2.0
1,128
/* * The University of Wales, Cardiff Triana Project Software License (Based * on the Apache Software License Version 1.1) * * Copyright (c) 2007 University of Wales, Cardiff. All rights reserved. * * Redistribution and use of the software in source and binary forms, with * or without modification, are permitted provided that the following * conditions are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, * must include the following acknowledgment: "This product includes * software developed by the University of Wales, Cardiff for the Triana * Project (http://www.trianacode.org)." Alternately, this * acknowledgment may appear in the software itself, if and wherever * such third-party acknowledgments normally appear. * * 4. The names "Triana" and "University of Wales, Cardiff" must not be * used to endorse or promote products derived from this software * without prior written permission. For written permission, please * contact triana@trianacode.org. * * 5. Products derived from this software may not be called "Triana," nor * may Triana appear in their name, without prior written permission of * the University of Wales, Cardiff. * * 6. This software may not be sold, used or incorporated into any product * for sale to third parties. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN * NO EVENT SHALL UNIVERSITY OF WALES, CARDIFF OR ITS CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. * * ------------------------------------------------------------------------ * * This software consists of voluntary contributions made by many * individuals on behalf of the Triana Project. For more information on the * Triana Project, please see. http://www.trianacode.org. * * This license is based on the BSD license as adopted by the Apache * Foundation and is governed by the laws of England and Wales. * */ package org.trianacode.taskgraph.proxy.java; /** * Constants used by java units. * * @author Ian Wang * @version $Revision: 4048 $ */ public interface JavaConstants { /** * the proxy type for java units */ public static final String JAVA_PROXY_TYPE = "Java"; // the unit name for java units public static final String UNIT_NAME = "unitName"; // the unit package for java units public static final String UNIT_PACKAGE = "unitPackage"; /** * the rendering hint for java units */ public static final String JAVA_RENDERING_HINT = "Java"; }
CSCSI/Triana
triana-core/src/main/java/org/trianacode/taskgraph/proxy/java/JavaConstants.java
Java
apache-2.0
3,506
<aside class="alignLeft"> <div class="container_24 alignLeft"> <div class="wrapper"> <article class="grid_5"> <h6>Nos services</h6> <ul> <li><a href="<?php echo href_by_pagename('page_credit_1') ?>">Crédit immobilier</a></li> <li><a href="<?php echo href_by_pagename('page_gestion_2') ?>">Crédit patrimonial</a></li> <li><a href="<?php echo href_by_pagename('page_gestion_3') ?>">Mobilisation du patrimoine</a></li> <li><a href="<?php echo href_by_pagename('page_gestion_4') ?>">Financement Entreprise</a></li> <li><a href="<?php echo href_by_pagename('page_gestion_5') ?>">French Mortgage</a></li> </ul> </article> <article class="grid_5"> <h6>Nos solutions</h6> <ul> <li><a href="<?php echo href_by_pagename('page_credit_1') ?>">Crédit Immobilier résidentiel</a></li> <li><a href="<?php echo href_by_pagename('page_credit_1') ?>">Crédit Immobilier commercial</a></li> <li><a href="<?php echo href_by_pagename('page_gestion_3') ?>">Crédit hypothécaire</a></li> <li><a href="<?php echo href_by_pagename('page_gestion_4') ?>">Crédit lombard</a></li> <li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Financement de fond de commerce</a></li> <li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Acquisition de titres</a></li> <li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Financement LBO</a></li> <li><a href="<?php echo href_by_pagename('page_entreprise_1') ?>">Financement de BFR</a></li> <li>Affacturage</li> </ul> </article> <article class="grid_5"> <h6>Qui sommes-nous?</h6> <ul> <li><a href="<?php echo href_by_pagename('page_quisommesnous_2') ?>">Courtier en immobilier</a></li> <li><a href="<?php echo href_by_pagename('page_quisommesnous_4') ?>">Équipe</a></li> <li><a href="<?php echo href_by_pagename('page_partenaires') ?>">Partenaires</a></li> <li><a href="<?php echo href_by_pagename('page_mentionslegales') ?>">Mentions légales</a></li> <li><a href="<?php echo url_for('@contact') ?>">Nous contacter</a></li> <li><a href="<?php echo url_for('@news-list') ?>">Presse</a></li> <li><a href="<?php echo url_for('@faq-list') ?>">FAQ</a></li> <li><a href="<?php echo href_by_pagename('page_infospratiques_fr') ?>">Informations pratiques</a></li> </ul> </article> <article class="grid_9"> <h6>S'inscrire à la newsletter</h6> <form id="newsletter-form"> <input type="text" value="e-mail address" onFocus="if(this.value=='e-mail address'){this.value=''}" onBlur="if(this.value==''){this.value='e-mail address'}"> <a class="button1" onClick="document.getElementById('newsletter-form').submit()">S'inscrire</a> </form> <h6>Nous contacter</h6> <div class="icons"> <a class="normaltip" target="_blank" title="Facebook" href="http://www.facebook.com/pages/Carte-Financement/103833006320861"><img src="/images/socialIcons/facebook-grey.png" alt=""><img src="/images/socialIcons/facebook.png" alt=""></a> <a class="normaltip" target="_blank" title="Twitter" href="http://twitter.com/courtiercredit"><img src="/images/socialIcons/twitter-grey.png" alt=""><img src="/images/socialIcons/twitter.png" alt=""></a> <a class="normaltip" target="_blank" title="LinkedIn" href="http://www.linkedin.com/company/carte-financement?trk=cp_followed_name_carte-financement"><img src="/images/socialIcons/linkedin-grey.png" alt=""><img src="/images/socialIcons/linkedin.png" alt=""></a> <a class="normaltip" target="_blank" title="Google plus" href="https://plus.google.com/111303088006101634610" rel="publisher"><img src="/images/socialIcons/google-plus-grey.png" alt=""><img src="/images/socialIcons/google-plus.png" alt=""></a> </div> </article> </div> </div> </aside> <!--==============================footer=================================--> <footer> <span>CarteFinancement &copy; 2012 &nbsp;|&nbsp; </span> </footer> <script type="text/javascript"> Cufon.now(); </script>
vteco/cfin
apps/frontend/modules/standalone/templates/_footer_fr.php
PHP
apache-2.0
4,252
"use strict"; var async = require("async"); var helper = require("../../../../helper.js"); var config = require("../../../../../config.js"); var utils = helper.requireModule('./lib/environment/drivers/infra.js'); const nock = require('nock'); var req = { soajs: { registry: { coreDB: { provision: { name: 'core_provision', prefix: '', servers: [ { host: '127.0.0.1', port: 27017 } ], credentials: null, streaming: { batchSize: 10000, colName: { batchSize: 10000 } }, URLParam: { maxPoolSize: 2, bufferMaxEntries: 0 }, registryLocation: { l1: 'coreDB', l2: 'provision', env: 'dev' }, timeConnected: 1491861560912 } }, services: { controller : { port : 80 } } }, log: { debug: function (data) { }, error: function (data) { }, info: function (data) { } }, inputmaskData: { specs : {} }, validator: { Validator: function () { return { validate: function () { return { errors: [] }; } }; } }, awareness: { getHost: function (service, cb) { return cb ("dashboard.com"); } } }, headers: { key : "key", soajsauth: "auth", }, query: { "access_token": "token" } }; var mongoStub = { findEntry: function (soajs, opts, cb) { if (opts.collection === 'infra') { cb (null, {'_id' : 123123}) } else { cb(null, { "productize": { "modifyTemplateStatus": true }, "cluster": {}, "controller": {}, "urac": {}, "oauth": {}, "nginx": {}, "user": {} }); } }, updateEntry: function (soajs, opts, cb) { cb(null, true); }, saveEntry: function (soajs, opts, cb) { cb(null, true); }, removeEntry: function (soajs, opts, cb) { cb(null, true); }, closeConnection: function (soajs) { return true; }, validateCustomId : function(soajs, id) { return true }, onboardVM : function(soajs, id) { return true } }; var BL = { customRegistry :{ module : {} }, model: mongoStub, cd : { module : {} }, cloud :{ deploy :{ module :{} }, services :{ module :{} }, resources :{ module :{} }, infra : { module : {} } }, resources: { module : {} } }; var template = { "type": "_template", "name": "MGTT", "description": "Mike Generic Test Template", "link": "", "content": { "custom_registry": { "data": [ { "name": "ciConfig", "value": { "apiPrefix": "cloud-api", "domain": "herrontech.com", "protocol": "https", "port": 443.0 } }, { "name": "ciConfig2", "value": "string value here ..." }, { "name": "ciConfig3", "value": { "apiPrefix": "dashboard-api", "domain": "soajs.org", "protocol": "https", "port": 443.0 } } ] }, "productization": { "data": [ { "code": "MIKE", "name": "Mike Product", "description": "Mike Product Description", "packages": [ { "code": "BASIC", "name": "Basic Package", "description": "Basic Package Description", "TTL": 2160000.0, "acl": { "oauth": {}, "urac": {}, "daas": {} } }, { "code": "MAIN", "name": "Main Package", "description": "Main Package Description", "TTL": 2160000.0, "acl": {} } ] } ] }, "tenant": { "data": [ { "code": "MIKE", "name": "Mike Tenant", "description": "Mike Tenant Description", "applications": [ { "product": "MIKE", "package": "MIKE_MAIN", "description": "Mike main application", "_TTL": 2160000.0, "keys": [ { "extKeys": [ { "device": {}, "geo": {}, "dashboardAccess": false, "expDate": null } ], "config": { "a": "b" } } ] }, { "product": "MIKE", "package": "MIKE_USER", "description": "Mike Logged In user Application", "_TTL": 2160000.0, "keys": [ { "extKeys": [ { "device": {}, "geo": {}, "dashboardAccess": true, "expDate": null } ], "config": { "c": "d" } } ] } ] } ] }, "secrets": { "data": [ { "name": "mike" } ] }, "deployments": { "repo": { "controller": { "label": "SOAJS API Gateway", "name": "controller", "type": "service", "category": "soajs", "deploy": { "memoryLimit": 500.0, "mode": "replicated", "replicas": 1.0 } } }, "resources": { "nginx": { "label": "Nginx", "type": "server", "category": "nginx", "ui": "${REF:resources/drivers/server/nginx}", "deploy": { "memoryLimit": 500.0, "mode": "global", "secrets": "mike" } }, "external": { "label": "External Mongo", "type": "cluster", "category": "mongo", "limit": 1.0, "ui": "${REF:resources/drivers/cluster/mongo}", "deploy": null } } } }, "deploy": { database: { pre: { custom_registry: { imfv: [ { name: 'ciConfig', locked: true, plugged: false, shared: true, value: { test1: true } }, { name: 'ciConfig2', locked: true, plugged: false, shared: true, value: { test2: true } }, { name: 'ciConfig3', locked: true, plugged: false, shared: true, value: { test3: true } } ] } }, steps: { productization: { ui: { readOnly: true } }, tenant: { ui: { readOnly: true } } }, post: { 'deployments__dot__resources__dot__external': { imfv: [ { name: 'external', type: 'cluster', category: 'mongo', locked: false, shared: false, plugged: false, config: { username: 'username', password: 'pwd' } } ], "status":{ "done": true, "data":[ { "db": "mongo id of this resource" } ] } } } }, deployments: { pre: { "infra.cluster.deploy": { "imfv" : [ { "command":{ "method" : "post", "routeName" : "/bridge/executeDriver", //change the path "data" : { "type" : "infra", "name" : "google", "driver" : "google", "command" : "deployCluster", "project" : "demo", "options" : { "region" : "us-east1-b", "workernumber" : 3, "workerflavor" : "n1-standard-2", "regionLabel" : "us-east1-b", "technology" : "kubernetes", "envCode" : "PORTAL" } } }, "check" : { "id" : { "type" : "string", "required": true } } }, { "recursive" : { "max" : 5, "delay": 300 }, "check" : { "id" : { "type" : "string", "required": true }, "ip" : { "type" : "string", "required": true } }, "command": { "method" : "post", "routeName" : "/bridge/executeDriver", "data" : { "type" : "infra", "name" : "google", "driver" : "google", "command" : "getDeployClusterStatus", "project" : "demo", "options" : { "envCode" : "PORTAL" } } } } ], "status": { "done": true, "data": { "id": "kaza", "ip": "kaza", "dns": { "a":"b" } }, "rollback" : { "command":{ "method" : "post", "routeName" : "/bridge/executeDriver", "params": {}, "data" : { "type" : "infra", "name" : "google", "driver" : "google", "command" : "deleteCluster", "project" : "demo", "options" : { "envCode" : "PORTAL", "force" : true } } } } }, } }, steps: { secrets: { imfv: [ { name: 'mike', type: 'Generic', data: 'something in secret' } ] }, 'deployments.repo.controller': { imfv: [ { name: 'controller', options: { deployConfig: { replication: { mode: 'replicated', replicas: 1 }, memoryLimit: 524288000 }, gitSource: { owner: 'soajs', repo: 'soajs.controller', branch: 'master', commit: '468588b0a89e55020f26b805be0ff02e0f31a7d8' }, custom: { sourceCode: {}, name: 'controller', type: 'service' }, recipe: '5ab4d65bc261bdb38a9fe363', env: 'MIKE' }, deploy: true, type: 'custom' } ], "status": { } }, 'deployments.resources.nginx': { imfv: [ { name: 'nginx', type: 'server', category: 'nginx', locked: false, shared: false, plugged: false, config: null, deploy: { options: { deployConfig: { replication: { mode: 'global' }, memoryLimit: 524288000 }, custom: { sourceCode: {}, secrets: [ { name: 'mike', mountPath: '/etc/soajs/certs', type: 'certificate' } ], name: 'mynginx', type: 'server' }, recipe: '5ab4d65bc261bdb38a9fe363', env: 'MIKE' }, deploy: true, type: 'custom' } } ] } }, post: { "infra.dns": { "imfv": [ { "recursive" : { "max" : 5, "delay": 300 }, "check" : { "dns" : { "type" : "object", "required": true }, "ip" : { "type" : "string", "required": true } }, "command": { "method" : "post", "routeName" : "/bridge/executeDriver", "data" : { "type" : "infra", "name" : "google", "driver" : "google", "command" : "getDNSInfo", "project" : "demo", "options" : { "envCode" : "PORTAL" } } } } ], "status": { "done": true, "data": { "ip": "kaza", "dns": { "a":"b" } } }, } } } }, soajs_project: "soajs_project" }; var environmentRecord = { _id: '5a58d942ace01a5325fa3e4c', code: 'DASHBORAD', deployer: { "type": "container", "selected": "container.docker.local", "container": { "docker": { "local": { "socketPath": "/var/run/docker.sock" }, "remote": { "nodes": "" } }, "kubernetes": { "local": { "nginxDeployType": "", "namespace": {}, "auth": { "token": "" } }, "remote": { "nginxDeployType": "", "namespace": {}, "auth": { "token": "" } } } } }, dbs: { clusters: { oneCluster: { servers: {} } }, config: { session: { cluster: 'oneCluster' } } }, services: {}, profile: '', "restriction":{ "1231231":{ "eastus": { group: "grouptest", network: "networktest" } } } }; var infraRecord = { "_id":'5af2b621a0e17acc56000001', "name": "test", "technologies": [ "test" ], "templates": [ "local" ], "label": "test", "deployments": [] }; var lib = { initBLModel : function(module, modelName, cb){ return cb(null, { add : function (context, req, data, cb) { return cb(null, true); }, delete : function (context, req, data, cb) { return cb(true); }, saveConfig : function (context, req, data, cb) { return cb(null, true); }, deployService : function (context, req, data, cb) { return cb(null, {service: { id: "1" }}); }, deleteService : function (context, req, data, cb) { return cb(null, true); }, addResource: function (context, req, data, cb) { return cb(null, {_id: "1"}); }, setConfig: function (context, req, data, cb) { return cb(null, true); }, deleteResource: function (context, req, data, cb) { return cb(true); }, list : function (config, soajs, deployer, cb) { return cb(null, true); }, activate : function (config, soajs, deployer, cb) { return cb(true); }, modify : function (config, soajs, deployer, cb) { return cb(null, true); }, deactivate : function (config, soajs, deployer, cb) { return cb(null, {service: { id: "1" }}); }, removeDeployment : function (config, soajs, deployer, cb) { return cb(null, true); }, getDeployClusterStatus: function (config, soajs, req ,deployer, cbMain) { return cbMain(null, true); }, deployCluster: function (config, soajs, deployer,req, cb) { return cb(null, true); }, scaleCluster: function (config, soajs, deployer, cb) { return cb(true); }, removeEnvFromDeployment: function (config, soajs, req, deployer, cb) { return cb(true); }, getCluster: function (config, soajs, deployer, cb) { return cb(true); }, updateCluster: function (config, soajs, deployer, cb) { return cb(true); }, getDNSInfo: function (config, req, soajs, deployer, cb) { return cb(true); }, removeTemplate: function (config, soajs, deployer, cb) { return cb(true); }, addTemplate: function (config, soajs, deployer, cb) { return cb(true); }, updateTemplate: function (config, soajs, deployer, cb) { return cb(true); }, uploadTemplate: function (config, soajs, deployer, cb) { return cb(true); }, uploadTemplateInputsFile: function (config, soajs, deployer, cb) { return cb(true); }, downloadTemplate: function (config, soajs, deployer, cb) { return cb(true); }, getDeployVMStatus: function (config, req, soajs, deployer, cb) { return cb(true); }, onboardVM: function (config, req, soajs, deployer, cb) { return cb(true); }, destroyVM: function (config, req, soajs, deployer, cb) { return cb(true); }, deployVM: function (config, req, soajs, deployer, cb) { return cb(true); }, }); }, checkReturnError: function(req, {}, {}, cb){ return cb(null, true); } }; var context = {}; describe("testing infra.js", function () { describe("testing validate", function () { it("success", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": { "method": "post", "routeName": "/bridge/executeDriver", "data": { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } } }, "check": { "id": { "type": "string", "required": true } } }, { "recursive": { "max": 5, "delay": 300 }, "check": { "id": { "type": "string", "required": true }, "ip": { "type": "string", "required": true } }, "command": { "method": "post", "routeName": "/bridge/executeDriver", "data": { "type": "infra", "name": "google", "driver": "google", "command": "getDeployClusterStatus", "project": "demo", "options": { "envCode": "PORTAL" } } } } ] } }; utils.validate(req, context, lib, async, BL, 'mongo', function (err, body) { done(); }) }); it("success with errors", function (done) { req.soajs.validator = { Validator: function () { return { validate: function () { return { errors: [{err: "msg"}] }; } }; } }; utils.validate(req, context, lib, async, BL, 'mongo', function (err, body) { done(); }) }); }); describe("testing deploy", function () { it("success infra already deployed", function (done) { context = { BL: BL, environmentRecord: environmentRecord, infraProvider : JSON.parse(JSON.stringify(infraRecord)), template: JSON.parse(JSON.stringify(template)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "getDeployClusterStatus", "check": { "id": { "type": "string", "required": true } } }, { "recursive": { "max": 0, "delay": 0 }, "check": { "id": { "type": "string", "required": true }, "ip": { "type": "string", "required": true } }, "command": "getDeployClusterStatus", } ] } }; context.template.deploy.deployments.pre["infra.cluster.deploy"].status = { done: true }; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { done(); }) }); it("success infra with error", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } } }, { // "recursive": { // "max": 5, // "delay": 1 // }, "check": { "id": { "type": "string", "required": true }, "ip": { "type": "string", "required": true } }, "command": "deployCluster" } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: {} }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra without command", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": '', "check": { "id": { "type": "string", "required": true } } }, { "recursive": { "max": 5, "delay": 300 }, "check": { "id": { "type": "string", "required": true }, "ip": { "type": "string", "required": true } }, "command": 'deployCluster' } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: false }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra without response", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } } }, { "recursive": { "max": 5, "delay": 300 }, "check": { "id": { "type": "string", "required": true }, "ip": { "type": "string", "required": true } }, "command": { "method": "post", "routeName": "/bridge/executeDriver", "data": { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "envCode": "PORTAL" } } } } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: false }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: true }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response case getDeployVMStatus", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "getDeployVMStatus", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, options : { params : ['test'], data : ['test'] } }, ] } }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response case onBoard", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "onboardVM", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, options : { params : ['test'], data : ['test'] } }, ] } }; context.infraProvider.command = 'onboardVM'; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response case dnsInfo", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "getDNSInfo", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, options : { params : ['test'], data : ['test'] } }, ] } }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response case deployVm", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployVM", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, options : { params : {'specs' : {}}, data: [{"test": 'test', "specs": {}}], } }, ] } }; req.soajs.inputmaskData.specs = { layerName : '' }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response case releaseVm", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "releaseVM", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, options : { params : ['test'], data : ['test'] } }, ] } }; context.infraProvider.command = 'onboardVM'; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with response case destroyVm", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "destroyVM", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, options : { params : ['test'], data : ['test'] } }, ] } }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra ", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: true }); req.soajs.validator = { Validator: function () { return { validate: function () { return { valid: true }; } }; } }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra max count", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 0, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: false }); req.soajs.validator = { Validator: function () { return { validate: function () { return { valid: true }; } }; } }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra inputs object", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 5, "delay": 300 }, } } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: false }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra inputs empty", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: false }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra no steps ", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ null ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deployCluster", "project": "demo", "options": { "region": "us-east1-b", "workernumber": 3, "workerflavor": "n1-standard-2", "regionLabel": "us-east1-b", "technology": "kubernetes", "envCode": "PORTAL" } }).reply(200, { result: true, data: true }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.deploy(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); }); describe("testing rollback", function () { it("success infra no status", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status; utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra done false", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; context.template.deploy.deployments.pre["infra.cluster.deploy"].status = { done: false }; utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra rollback emtpy", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; context.template.deploy.deployments.pre["infra.cluster.deploy"].status = { done: true, rollback: {} }; utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with rolback", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deleteCluster", "project": "demo", "options": { "envCode": "PORTAL", "force": true } }).reply(200, { result: true, data: true }); utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra with bad rollback", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deleteCluster", "project": "demo", "options": { "envCode": "PORTAL", "force": true } }).reply(200, { result: false, errors: { details: [{ message: "err" }] } }); context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback = [template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback] utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra no rollback", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deleteCluster", "project": "demo", "options": { "envCode": "PORTAL", "force": true } }).reply(200, { result: false, errors: { details: [{ message: "err" }] } }); context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback = []; utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra rollback null", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deleteCluster", "project": "demo", "options": { "envCode": "PORTAL", "force": true } }).reply(200, { result: false, errors: { details: [{ message: "err" }] } }); context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback = [null]; utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); it("success infra no rollback", function (done) { context = { BL: BL, environmentRecord: environmentRecord, template: JSON.parse(JSON.stringify(template)), infraProvider : JSON.parse(JSON.stringify(infraRecord)), config: config, errors: [], opts: { "stage": "deployments", "group": "pre", "stepPath": "infra.cluster.deploy", "section": [ "infra", "cluster", "deploy" ], "inputs": [ { "command": "deployCluster", "check": { "id": { "type": "string", "required": true } }, "recursive": { "max": 1, "delay": 300 }, } ] } }; nock("http://dashboard.com:80").post('/bridge/executeDriver?access_token=token&soajs_project=soajs_project', { "type": "infra", "name": "google", "driver": "google", "command": "deleteCluster", "project": "demo", "options": { "envCode": "PORTAL", "force": true } }).reply(200, { result: false, errors: { details: [{ message: "err" }] } }); delete context.template.deploy.deployments.pre["infra.cluster.deploy"].status.rollback utils.rollback(req, context, lib, async, BL, 'mongo', function (err, body) { nock.cleanAll(); done(); }) }); }) });
soajs/soajs.dashboard
test/unit/lib/environment/drivers/infra.test.js
JavaScript
apache-2.0
49,180
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.systemui.settings; import android.content.ContentResolver; import android.content.Context; import android.database.ContentObserver; import android.net.Uri; import android.os.AsyncTask; import android.os.Handler; import android.os.IPowerManager; import android.os.PowerManager; import android.os.RemoteException; import android.os.ServiceManager; import android.os.UserHandle; import android.provider.Settings; import android.widget.ImageView; import com.android.internal.logging.MetricsLogger; import com.android.internal.logging.MetricsProto.MetricsEvent; import java.util.ArrayList; public class BrightnessController implements ToggleSlider.Listener { private static final String TAG = "StatusBar.BrightnessController"; private static final boolean SHOW_AUTOMATIC_ICON = false; /** * {@link android.provider.Settings.System#SCREEN_AUTO_BRIGHTNESS_ADJ} uses the range [-1, 1]. * Using this factor, it is converted to [0, BRIGHTNESS_ADJ_RESOLUTION] for the SeekBar. */ public static final float BRIGHTNESS_ADJ_RESOLUTION = 2048; private final int mMinimumBacklight; private final int mMaximumBacklight; private final Context mContext; private final ImageView mIcon; private final ToggleSlider mControl; private final boolean mAutomaticAvailable; private final IPowerManager mPower; private final CurrentUserTracker mUserTracker; private final Handler mHandler; private final BrightnessObserver mBrightnessObserver; private ArrayList<BrightnessStateChangeCallback> mChangeCallbacks = new ArrayList<BrightnessStateChangeCallback>(); private boolean mAutomatic; private boolean mListening; private boolean mExternalChange; public interface BrightnessStateChangeCallback { public void onBrightnessLevelChanged(); } /** ContentObserver to watch brightness **/ private class BrightnessObserver extends ContentObserver { private final Uri BRIGHTNESS_MODE_URI = Settings.System.getUriFor(Settings.System.SCREEN_BRIGHTNESS_MODE); private final Uri BRIGHTNESS_URI = Settings.System.getUriFor(Settings.System.SCREEN_BRIGHTNESS); private final Uri BRIGHTNESS_ADJ_URI = Settings.System.getUriFor(Settings.System.SCREEN_AUTO_BRIGHTNESS_ADJ); public BrightnessObserver(Handler handler) { super(handler); } @Override public void onChange(boolean selfChange) { onChange(selfChange, null); } @Override public void onChange(boolean selfChange, Uri uri) { if (selfChange) return; try { mExternalChange = true; if (BRIGHTNESS_MODE_URI.equals(uri)) { updateMode(); updateSlider(); } else if (BRIGHTNESS_URI.equals(uri) && !mAutomatic) { updateSlider(); } else if (BRIGHTNESS_ADJ_URI.equals(uri) && mAutomatic) { updateSlider(); } else { updateMode(); updateSlider(); } for (BrightnessStateChangeCallback cb : mChangeCallbacks) { cb.onBrightnessLevelChanged(); } } finally { mExternalChange = false; } } public void startObserving() { final ContentResolver cr = mContext.getContentResolver(); cr.unregisterContentObserver(this); cr.registerContentObserver( BRIGHTNESS_MODE_URI, false, this, UserHandle.USER_ALL); cr.registerContentObserver( BRIGHTNESS_URI, false, this, UserHandle.USER_ALL); cr.registerContentObserver( BRIGHTNESS_ADJ_URI, false, this, UserHandle.USER_ALL); } public void stopObserving() { final ContentResolver cr = mContext.getContentResolver(); cr.unregisterContentObserver(this); } } public BrightnessController(Context context, ImageView icon, ToggleSlider control) { mContext = context; mIcon = icon; mControl = control; mHandler = new Handler(); mUserTracker = new CurrentUserTracker(mContext) { @Override public void onUserSwitched(int newUserId) { updateMode(); updateSlider(); } }; mBrightnessObserver = new BrightnessObserver(mHandler); PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE); mMinimumBacklight = pm.getMinimumScreenBrightnessSetting(); mMaximumBacklight = pm.getMaximumScreenBrightnessSetting(); mAutomaticAvailable = context.getResources().getBoolean( com.android.internal.R.bool.config_automatic_brightness_available); mPower = IPowerManager.Stub.asInterface(ServiceManager.getService("power")); } public void addStateChangedCallback(BrightnessStateChangeCallback cb) { mChangeCallbacks.add(cb); } public boolean removeStateChangedCallback(BrightnessStateChangeCallback cb) { return mChangeCallbacks.remove(cb); } @Override public void onInit(ToggleSlider control) { // Do nothing } public void registerCallbacks() { if (mListening) { return; } mBrightnessObserver.startObserving(); mUserTracker.startTracking(); // Update the slider and mode before attaching the listener so we don't // receive the onChanged notifications for the initial values. updateMode(); updateSlider(); mControl.setOnChangedListener(this); mListening = true; } /** Unregister all call backs, both to and from the controller */ public void unregisterCallbacks() { if (!mListening) { return; } mBrightnessObserver.stopObserving(); mUserTracker.stopTracking(); mControl.setOnChangedListener(null); mListening = false; } @Override public void onChanged(ToggleSlider view, boolean tracking, boolean automatic, int value, boolean stopTracking) { updateIcon(mAutomatic); if (mExternalChange) return; if (!mAutomatic) { final int val = value + mMinimumBacklight; if (stopTracking) { MetricsLogger.action(mContext, MetricsEvent.ACTION_BRIGHTNESS, val); } setBrightness(val); if (!tracking) { AsyncTask.execute(new Runnable() { public void run() { Settings.System.putIntForUser(mContext.getContentResolver(), Settings.System.SCREEN_BRIGHTNESS, val, UserHandle.USER_CURRENT); } }); } } else { final float adj = value / (BRIGHTNESS_ADJ_RESOLUTION / 2f) - 1; if (stopTracking) { MetricsLogger.action(mContext, MetricsEvent.ACTION_BRIGHTNESS_AUTO, value); } setBrightnessAdj(adj); if (!tracking) { AsyncTask.execute(new Runnable() { public void run() { Settings.System.putFloatForUser(mContext.getContentResolver(), Settings.System.SCREEN_AUTO_BRIGHTNESS_ADJ, adj, UserHandle.USER_CURRENT); } }); } } for (BrightnessStateChangeCallback cb : mChangeCallbacks) { cb.onBrightnessLevelChanged(); } } private void setMode(int mode) { Settings.System.putIntForUser(mContext.getContentResolver(), Settings.System.SCREEN_BRIGHTNESS_MODE, mode, mUserTracker.getCurrentUserId()); } private void setBrightness(int brightness) { try { mPower.setTemporaryScreenBrightnessSettingOverride(brightness); } catch (RemoteException ex) { } } private void setBrightnessAdj(float adj) { try { mPower.setTemporaryScreenAutoBrightnessAdjustmentSettingOverride(adj); } catch (RemoteException ex) { } } private void updateIcon(boolean automatic) { if (mIcon != null) { mIcon.setImageResource(automatic && SHOW_AUTOMATIC_ICON ? com.android.systemui.R.drawable.ic_qs_brightness_auto_on : com.android.systemui.R.drawable.ic_qs_brightness_auto_off); } } /** Fetch the brightness mode from the system settings and update the icon */ private void updateMode() { if (mAutomaticAvailable) { int automatic; automatic = Settings.System.getIntForUser(mContext.getContentResolver(), Settings.System.SCREEN_BRIGHTNESS_MODE, Settings.System.SCREEN_BRIGHTNESS_MODE_MANUAL, UserHandle.USER_CURRENT); mAutomatic = automatic != Settings.System.SCREEN_BRIGHTNESS_MODE_MANUAL; updateIcon(mAutomatic); } else { mControl.setChecked(false); updateIcon(false /*automatic*/); } } /** Fetch the brightness from the system settings and update the slider */ private void updateSlider() { if (mAutomatic) { float value = Settings.System.getFloatForUser(mContext.getContentResolver(), Settings.System.SCREEN_AUTO_BRIGHTNESS_ADJ, 0, UserHandle.USER_CURRENT); mControl.setMax((int) BRIGHTNESS_ADJ_RESOLUTION); mControl.setValue((int) ((value + 1) * BRIGHTNESS_ADJ_RESOLUTION / 2f)); } else { int value; value = Settings.System.getIntForUser(mContext.getContentResolver(), Settings.System.SCREEN_BRIGHTNESS, mMaximumBacklight, UserHandle.USER_CURRENT); mControl.setMax(mMaximumBacklight - mMinimumBacklight); mControl.setValue(value - mMinimumBacklight); } } }
xorware/android_frameworks_base
packages/SystemUI/src/com/android/systemui/settings/BrightnessController.java
Java
apache-2.0
11,052
// // System.Web.HttpUtility // // Authors: // Patrik Torstensson (Patrik.Torstensson@labs2.com) // Wictor Wilén (decode/encode functions) (wictor@ibizkit.se) // Tim Coleman (tim@timcoleman.com) // Gonzalo Paniagua Javier (gonzalo@ximian.com) // // Copyright (C) 2005-2010 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System.Collections; using System.Collections.Generic; using System.IO; using System.Text; using System; namespace Rivets { public sealed class Utility { #region Constructors public Utility () { } #endregion // Constructors #region Methods public static Dictionary<string, string> ParseQueryString(string query) { var queryDict = new Dictionary<string, string>(); if (string.IsNullOrEmpty(query)) return queryDict; foreach (string token in query.TrimStart(new char[] { '?' }).Split(new char[] { '&' }, StringSplitOptions.RemoveEmptyEntries)) { string[] parts = token.Split(new char[] { '=' }, StringSplitOptions.RemoveEmptyEntries); if (parts.Length == 2) queryDict[parts[0].Trim()] = Utility.UrlDecode(parts[1]).Trim(); else queryDict[parts[0].Trim()] = ""; } return queryDict; } public static string UrlDecode (string str) { return UrlDecode(str, Encoding.UTF8); } static void WriteCharBytes (IList buf, char ch, Encoding e) { if (ch > 255) { foreach (byte b in e.GetBytes (new char[] { ch })) buf.Add (b); } else buf.Add ((byte)ch); } public static string UrlDecode (string s, Encoding e) { if (null == s) return null; if (s.IndexOf ('%') == -1 && s.IndexOf ('+') == -1) return s; if (e == null) e = Encoding.UTF8; long len = s.Length; var bytes = new List <byte> (); int xchar; char ch; for (int i = 0; i < len; i++) { ch = s [i]; if (ch == '%' && i + 2 < len && s [i + 1] != '%') { if (s [i + 1] == 'u' && i + 5 < len) { // unicode hex sequence xchar = GetChar (s, i + 2, 4); if (xchar != -1) { WriteCharBytes (bytes, (char)xchar, e); i += 5; } else WriteCharBytes (bytes, '%', e); } else if ((xchar = GetChar (s, i + 1, 2)) != -1) { WriteCharBytes (bytes, (char)xchar, e); i += 2; } else { WriteCharBytes (bytes, '%', e); } continue; } if (ch == '+') WriteCharBytes (bytes, ' ', e); else WriteCharBytes (bytes, ch, e); } byte[] buf = bytes.ToArray (); bytes = null; return e.GetString (buf, 0, buf.Length); } static int GetInt (byte b) { char c = (char) b; if (c >= '0' && c <= '9') return c - '0'; if (c >= 'a' && c <= 'f') return c - 'a' + 10; if (c >= 'A' && c <= 'F') return c - 'A' + 10; return -1; } static int GetChar (byte [] bytes, int offset, int length) { int value = 0; int end = length + offset; for (int i = offset; i < end; i++) { int current = GetInt (bytes [i]); if (current == -1) return -1; value = (value << 4) + current; } return value; } static int GetChar (string str, int offset, int length) { int val = 0; int end = length + offset; for (int i = offset; i < end; i++) { char c = str [i]; if (c > 127) return -1; int current = GetInt ((byte) c); if (current == -1) return -1; val = (val << 4) + current; } return val; } internal static bool NotEncoded (char c) { return (c == '!' || c == '(' || c == ')' || c == '*' || c == '-' || c == '.' || c == '_' #if !NET_4_0 || c == '\'' #endif ); } public static string UrlEncode(string str) { return UrlEncode(str, Encoding.UTF8); } public static string UrlEncode (string s, Encoding Enc) { if (s == null) return null; if (s == String.Empty) return String.Empty; bool needEncode = false; int len = s.Length; for (int i = 0; i < len; i++) { char c = s [i]; if ((c < '0') || (c < 'A' && c > '9') || (c > 'Z' && c < 'a') || (c > 'z')) { if (NotEncoded (c)) continue; needEncode = true; break; } } if (!needEncode) return s; // avoided GetByteCount call byte [] bytes = new byte[Enc.GetMaxByteCount(s.Length)]; int realLen = Enc.GetBytes (s, 0, s.Length, bytes, 0); #if PORTABLE || WINDOWS_PHONE || WINRT || UWP var strData = UrlEncodeToBytes (bytes, 0, realLen); return Encoding.UTF8.GetString (strData, 0, strData.Length); #else return Encoding.ASCII.GetString (UrlEncodeToBytes (bytes, 0, realLen)); #endif } public static byte [] UrlEncodeToBytes (string str) { return UrlEncodeToBytes (str, Encoding.UTF8); } public static byte [] UrlEncodeToBytes (string str, Encoding e) { if (str == null) return null; if (str.Length == 0) return new byte [0]; byte [] bytes = e.GetBytes (str); return UrlEncodeToBytes (bytes, 0, bytes.Length); } public static byte [] UrlEncodeToBytes (byte [] bytes) { if (bytes == null) return null; if (bytes.Length == 0) return new byte [0]; return UrlEncodeToBytes (bytes, 0, bytes.Length); } internal static byte[] UrlEncodeToBytes (byte[] bytes, int offset, int count) { if (bytes == null) throw new ArgumentNullException ("bytes"); int blen = bytes.Length; if (blen == 0) return new byte [0]; if (offset < 0 || offset >= blen) throw new ArgumentOutOfRangeException("offset"); if (count < 0 || count > blen - offset) throw new ArgumentOutOfRangeException("count"); MemoryStream result = new MemoryStream (count); int end = offset + count; for (int i = offset; i < end; i++) UrlEncodeChar ((char)bytes [i], result, false); return result.ToArray(); } static char [] hexChars = "0123456789abcdef".ToCharArray (); internal static void UrlEncodeChar (char c, Stream result, bool isUnicode) { if (c > 255) { //FIXME: what happens when there is an internal error? //if (!isUnicode) // throw new ArgumentOutOfRangeException ("c", c, "c must be less than 256"); int idx; int i = (int) c; result.WriteByte ((byte)'%'); result.WriteByte ((byte)'u'); idx = i >> 12; result.WriteByte ((byte)hexChars [idx]); idx = (i >> 8) & 0x0F; result.WriteByte ((byte)hexChars [idx]); idx = (i >> 4) & 0x0F; result.WriteByte ((byte)hexChars [idx]); idx = i & 0x0F; result.WriteByte ((byte)hexChars [idx]); return; } if (c > ' ' && NotEncoded (c)) { result.WriteByte ((byte)c); return; } if (c==' ') { result.WriteByte ((byte)'+'); return; } if ( (c < '0') || (c < 'A' && c > '9') || (c > 'Z' && c < 'a') || (c > 'z')) { if (isUnicode && c > 127) { result.WriteByte ((byte)'%'); result.WriteByte ((byte)'u'); result.WriteByte ((byte)'0'); result.WriteByte ((byte)'0'); } else result.WriteByte ((byte)'%'); int idx = ((int) c) >> 4; result.WriteByte ((byte)hexChars [idx]); idx = ((int) c) & 0x0F; result.WriteByte ((byte)hexChars [idx]); } else result.WriteByte ((byte)c); } #endregion // Methods } }
xamarin/Rivets
Rivets/Utility.cs
C#
apache-2.0
8,263
package ru.bvn13.licenseserverjclient.soap; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for checkClientLicense complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="checkClientLicense"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="request" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="clientId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/> * &lt;element name="systemId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/> * &lt;element name="properties" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "checkClientLicense", propOrder = { "request" }) public class CheckClientLicense { @XmlElement(namespace = "") protected CheckClientLicense.Request request; /** * Gets the value of the request property. * * @return * possible object is * {@link CheckClientLicense.Request } * */ public CheckClientLicense.Request getRequest() { return request; } /** * Sets the value of the request property. * * @param value * allowed object is * {@link CheckClientLicense.Request } * */ public void setRequest(CheckClientLicense.Request value) { this.request = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="clientId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/> * &lt;element name="systemId" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/> * &lt;element name="properties" type="{http://www.w3.org/2001/XMLSchema}string" form="qualified"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "clientId", "systemId", "properties" }) public static class Request { @XmlElement(required = true) protected String clientId; @XmlElement(required = true) protected String systemId; @XmlElement(required = true) protected String properties; /** * Gets the value of the clientId property. * * @return * possible object is * {@link String } * */ public String getClientId() { return clientId; } /** * Sets the value of the clientId property. * * @param value * allowed object is * {@link String } * */ public void setClientId(String value) { this.clientId = value; } /** * Gets the value of the systemId property. * * @return * possible object is * {@link String } * */ public String getSystemId() { return systemId; } /** * Sets the value of the systemId property. * * @param value * allowed object is * {@link String } * */ public void setSystemId(String value) { this.systemId = value; } /** * Gets the value of the properties property. * * @return * possible object is * {@link String } * */ public String getProperties() { return properties; } /** * Sets the value of the properties property. * * @param value * allowed object is * {@link String } * */ public void setProperties(String value) { this.properties = value; } } }
bvn13/LicenseServerJClient
src/main/java/ru/bvn13/licenseserverjclient/soap/CheckClientLicense.java
Java
apache-2.0
5,102
package org.gradle.test.performance.mediummonolithicjavaproject.p111; public class Production2220 { private String property0; public String getProperty0() { return property0; } public void setProperty0(String value) { property0 = value; } private String property1; public String getProperty1() { return property1; } public void setProperty1(String value) { property1 = value; } private String property2; public String getProperty2() { return property2; } public void setProperty2(String value) { property2 = value; } private String property3; public String getProperty3() { return property3; } public void setProperty3(String value) { property3 = value; } private String property4; public String getProperty4() { return property4; } public void setProperty4(String value) { property4 = value; } private String property5; public String getProperty5() { return property5; } public void setProperty5(String value) { property5 = value; } private String property6; public String getProperty6() { return property6; } public void setProperty6(String value) { property6 = value; } private String property7; public String getProperty7() { return property7; } public void setProperty7(String value) { property7 = value; } private String property8; public String getProperty8() { return property8; } public void setProperty8(String value) { property8 = value; } private String property9; public String getProperty9() { return property9; } public void setProperty9(String value) { property9 = value; } }
oehme/analysing-gradle-performance
my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p111/Production2220.java
Java
apache-2.0
1,891
#!/usr/bin/python # -*- coding: utf-8 -*- """Tests for the zip path specification implementation.""" import unittest from dfvfs.path import zip_path_spec from tests.path import test_lib class ZipPathSpecTest(test_lib.PathSpecTestCase): """Tests for the zip path specification implementation.""" def testInitialize(self): """Tests the path specification initialization.""" path_spec = zip_path_spec.ZipPathSpec( location=u'/test', parent=self._path_spec) self.assertNotEqual(path_spec, None) with self.assertRaises(ValueError): _ = zip_path_spec.ZipPathSpec(location=u'/test', parent=None) with self.assertRaises(ValueError): _ = zip_path_spec.ZipPathSpec(location=None, parent=self._path_spec) with self.assertRaises(ValueError): _ = zip_path_spec.ZipPathSpec( location=u'/test', parent=self._path_spec, bogus=u'BOGUS') def testComparable(self): """Tests the path specification comparable property.""" path_spec = zip_path_spec.ZipPathSpec( location=u'/test', parent=self._path_spec) self.assertNotEqual(path_spec, None) expected_comparable = u'\n'.join([ u'type: TEST', u'type: ZIP, location: /test', u'']) self.assertEqual(path_spec.comparable, expected_comparable) if __name__ == '__main__': unittest.main()
manashmndl/dfvfs
tests/path/zip_path_spec.py
Python
apache-2.0
1,347
<?php namespace DCarbone\PHPFHIRGenerated\R4; /*! * This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using * class definitions from HL7 FHIR (https://www.hl7.org/fhir/) * * Class creation date: December 26th, 2019 15:44+0000 * * PHPFHIR Copyright: * * Copyright 2016-2019 Daniel Carbone (daniel.p.carbone@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * FHIR Copyright Notice: * * Copyright (c) 2011+, HL7, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of HL7 nor the names of its contributors may be used to * endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * * Generated on Fri, Nov 1, 2019 09:29+1100 for FHIR v4.0.1 * * Note: the schemas & schematrons do not contain all of the rules about what makes resources * valid. Implementers will still need to be familiar with the content of the specification and with * any profiles that apply to the resources in order to make a conformant implementation. * */ /** * Class FHIRDecimalPrimitive * @package \DCarbone\PHPFHIRGenerated\R4 */ class FHIRDecimalPrimitive implements PHPFHIRTypeInterface { use PHPFHIRValidationAssertionsTrait; // name of FHIR type this class describes const FHIR_TYPE_NAME = PHPFHIRConstants::TYPE_NAME_DECIMAL_HYPHEN_PRIMITIVE; const FIELD_VALUE = 'value'; /** @var string */ private $_xmlns = 'http://hl7.org/fhir'; /** * @var null|double */ protected $value = null; /** * Validation map for fields in type decimal-primitive * @var array */ private static $_validationRules = [ ]; /** * FHIRDecimalPrimitive Constructor * @param null|double $value */ public function __construct($value = null) { $this->setValue($value); } /** * @return string */ public function _getFHIRTypeName() { return self::FHIR_TYPE_NAME; } /** * @return string|null */ public function _getFHIRXMLNamespace() { return '' === $this->_xmlns ? null : $this->_xmlns; } /** * @param null|string $xmlNamespace * @return static */ public function _setFHIRXMLNamespace($xmlNamespace) { if (null === $xmlNamespace || is_string($xmlNamespace)) { $this->_xmlns = (string)$xmlNamespace; return $this; } throw new \InvalidArgumentException(sprintf( '$xmlNamespace must be a null or string value, %s seen.', gettype($xmlNamespace) )); } /** * @return string */ public function _getFHIRXMLElementDefinition() { $xmlns = $this->_getFHIRXMLNamespace(); if (null !== $xmlns) { $xmlns = " xmlns=\"{$xmlns}\""; } return "<decimal_primitive{$xmlns}></decimal_primitive>"; } /** * @return null|double */ public function getValue() { return $this->value; } /** * @param null|float|string $value * @return static */ public function setValue($value) { if (null === $value) { $this->value = null; } elseif (is_scalar($value)) { $this->value = floatval($value); } else { throw new \InvalidArgumentException(sprintf('decimal-primitive value must be null, float, or numeric string, %s seen.', gettype($value))); } return $this; } /** * Returns the validation rules that this type's fields must comply with to be considered "valid" * The returned array is in ["fieldname[.offset]" => ["rule" => {constraint}]] * * @return array */ public function _getValidationRules() { return self::$_validationRules; } /** * Validates that this type conforms to the specifications set forth for it by FHIR. An empty array must be seen as * passing. * * @return array */ public function _getValidationErrors() { $errs = []; $validationRules = $this->_getValidationRules(); if (isset($validationRules[self::FIELD_VALUE]) && null !== ($v = $this->getValue())) { foreach($validationRules[self::FIELD_VALUE] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_DECIMAL_HYPHEN_PRIMITIVE, self::FIELD_VALUE, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_VALUE])) { $errs[self::FIELD_VALUE] = []; } $errs[self::FIELD_VALUE][$rule] = $err; } } } return $errs; } /** * @param \SimpleXMLElement|string|null $sxe * @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRDecimalPrimitive $type * @param null|int $libxmlOpts * @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRDecimalPrimitive */ public static function xmlUnserialize($sxe = null, PHPFHIRTypeInterface $type = null, $libxmlOpts = 591872) { if (null === $sxe) { return null; } if (is_string($sxe)) { libxml_use_internal_errors(true); $sxe = new \SimpleXMLElement($sxe, $libxmlOpts, false); if ($sxe === false) { throw new \DomainException(sprintf('FHIRDecimalPrimitive::xmlUnserialize - String provided is not parseable as XML: %s', implode(', ', array_map(function(\libXMLError $err) { return $err->message; }, libxml_get_errors())))); } libxml_use_internal_errors(false); } if (!($sxe instanceof \SimpleXMLElement)) { throw new \InvalidArgumentException(sprintf('FHIRDecimalPrimitive::xmlUnserialize - $sxe value must be null, \\SimpleXMLElement, or valid XML string, %s seen', gettype($sxe))); } if (null === $type) { $type = new FHIRDecimalPrimitive; } elseif (!is_object($type) || !($type instanceof FHIRDecimalPrimitive)) { throw new \RuntimeException(sprintf( 'FHIRDecimalPrimitive::xmlUnserialize - $type must be instance of \DCarbone\PHPFHIRGenerated\R4\FHIRDecimalPrimitive or null, %s seen.', is_object($type) ? get_class($type) : gettype($type) )); } $xmlNamespaces = $sxe->getDocNamespaces(false, false); if ([] !== $xmlNamespaces) { $ns = reset($xmlNamespaces); if (false !== $ns && '' !== $ns) { $type->_xmlns = $ns; } } $attributes = $sxe->attributes(); $children = $sxe->children(); if (isset($attributes->value)) { $type->setValue((string)$attributes->value); } elseif (isset($children->value)) { $type->setValue((string)$children->value); } elseif ('' !== ($v = (string)$sxe)) { $type->setValue($v); } return $type; } /** * @param null|\SimpleXMLElement $sxe * @param null|int $libxmlOpts * @return \SimpleXMLElement */ public function xmlSerialize(\SimpleXMLElement $sxe = null, $libxmlOpts = 591872) { if (null === $sxe) { $sxe = new \SimpleXMLElement($this->_getFHIRXMLElementDefinition(), $libxmlOpts, false); } $sxe->addAttribute(self::FIELD_VALUE, (string)$this); return $sxe; } /** * @return null|double */ public function jsonSerialize() { return $this->getValue(); } /** * @return string */ public function __toString() { return (string)$this->getValue(); } }
dcarbone/php-fhir-generated
src/DCarbone/PHPFHIRGenerated/R4/FHIRDecimalPrimitive.php
PHP
apache-2.0
9,630
/** * Copyright 2013 @ Fazal Code * All Rights Reserved to Fazal Code */ package com.java.struts.fazalcode; import com.opensymphony.xwork2.ActionSupport; /** * @author Tatarao voleti * @date Nov 11, 2013 */ public class LoginAction extends ActionSupport { private String username; private String password; public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } @Override public String toString() { return "LoginAction [username=" + username + ", password=" + password + "]"; } public String execute(){ if (username.equalsIgnoreCase("user") && password.equalsIgnoreCase("pass")) { return "success"; } else { return "failure"; } } }
Tataraovoleti/Struts2App
src/com/java/struts/fazalcode/LoginAction.java
Java
apache-2.0
873
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.write.objectmapper.flatrecords; import com.netflix.hollow.core.HollowConstants; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState; import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState; import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState; import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState; import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import com.netflix.hollow.core.schema.HollowListSchema; import com.netflix.hollow.core.schema.HollowMapSchema; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import com.netflix.hollow.core.schema.HollowSetSchema; import com.netflix.hollow.core.write.HollowWriteRecord; import com.netflix.hollow.core.write.copy.HollowRecordCopier; import com.netflix.hollow.tools.combine.OrdinalRemapper; import java.util.HashMap; import java.util.Map; /** * Warning: Experimental. the FlatRecord feature is subject to breaking changes. */ public class FlatRecordExtractor { private final HollowReadStateEngine extractFrom; private final FlatRecordWriter writer; private final ExtractorOrdinalRemapper ordinalRemapper; private final Map<String, HollowRecordCopier> recordCopiersByType; public FlatRecordExtractor(HollowReadStateEngine extractFrom, HollowSchemaIdentifierMapper schemaIdMapper) { this.extractFrom = extractFrom; this.writer = new FlatRecordWriter(extractFrom, schemaIdMapper); this.ordinalRemapper = new ExtractorOrdinalRemapper(); this.recordCopiersByType = new HashMap<>(); } public FlatRecord extract(String type, int ordinal) { ordinalRemapper.clear(); writer.reset(); HollowTypeReadState typeState = extractFrom.getTypeState(type); extractHollowRecord(typeState, ordinal); return writer.generateFlatRecord(); } private void extractHollowRecord(HollowTypeReadState typeState, int ordinal) { traverse(typeState, ordinal); String type = typeState.getSchema().getName(); HollowRecordCopier recordCopier = recordCopier(type); HollowWriteRecord rec = recordCopier.copy(ordinal); int flatOrdinal = writer.write(typeState.getSchema(), rec); ordinalRemapper.remapOrdinal(type, ordinal, flatOrdinal); } private void traverse(HollowTypeReadState typeState, int ordinal) { switch(typeState.getSchema().getSchemaType()) { case OBJECT: traverseObject((HollowObjectTypeReadState)typeState, ordinal); break; case LIST: traverseList((HollowListTypeReadState)typeState, ordinal); break; case SET: traverseSet((HollowSetTypeReadState)typeState, ordinal); break; case MAP: traverseMap((HollowMapTypeReadState)typeState, ordinal); break; } } private void traverseObject(HollowObjectTypeReadState typeState, int ordinal) { HollowObjectSchema schema = typeState.getSchema(); for(int i=0;i<schema.numFields();i++) { if(schema.getFieldType(i) == FieldType.REFERENCE) { HollowTypeReadState refTypeState = schema.getReferencedTypeState(i); int refOrdinal = typeState.readOrdinal(ordinal, i); extractHollowRecord(refTypeState, refOrdinal); } } } private void traverseList(HollowListTypeReadState typeState, int ordinal) { HollowListSchema schema = typeState.getSchema(); int size = typeState.size(ordinal); for(int i=0;i<size;i++) { int refOrdinal = typeState.getElementOrdinal(ordinal, i); if(refOrdinal != HollowConstants.ORDINAL_NONE) extractHollowRecord(schema.getElementTypeState(), refOrdinal); } } private void traverseSet(HollowSetTypeReadState typeState, int ordinal) { HollowSetSchema schema = typeState.getSchema(); HollowOrdinalIterator iter = typeState.ordinalIterator(ordinal); int refOrdinal = iter.next(); while(refOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) { if(refOrdinal != HollowConstants.ORDINAL_NONE) extractHollowRecord(schema.getElementTypeState(), refOrdinal); refOrdinal = iter.next(); } } private void traverseMap(HollowMapTypeReadState typeState, int ordinal) { HollowMapSchema schema = typeState.getSchema(); HollowMapEntryOrdinalIterator iter = typeState.ordinalIterator(ordinal); while(iter.next()) { if(iter.getKey() != HollowConstants.ORDINAL_NONE) extractHollowRecord(schema.getKeyTypeState(), iter.getKey()); if(iter.getValue() != HollowConstants.ORDINAL_NONE) extractHollowRecord(schema.getValueTypeState(), iter.getValue()); } } private HollowRecordCopier recordCopier(String type) { HollowRecordCopier recordCopier = recordCopiersByType.get(type); if(recordCopier == null) { recordCopier = HollowRecordCopier.createCopier(extractFrom.getTypeState(type), ordinalRemapper, false); recordCopiersByType.put(type, recordCopier); } return recordCopier; } private static class ExtractorOrdinalRemapper implements OrdinalRemapper { private final Map<TypedOrdinal, Integer> mappedFlatOrdinals = new HashMap<>(); @Override public int getMappedOrdinal(String type, int originalOrdinal) { return mappedFlatOrdinals.get(new TypedOrdinal(type, originalOrdinal)); } @Override public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) { mappedFlatOrdinals.put(new TypedOrdinal(type, originalOrdinal), mappedOrdinal); } @Override public boolean ordinalIsMapped(String type, int originalOrdinal) { throw new UnsupportedOperationException(); } public void clear() { mappedFlatOrdinals.clear(); } private static class TypedOrdinal { private final String type; private final int ordinal; public TypedOrdinal(String type, int ordinal) { this.type = type; this.ordinal = ordinal; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ordinal; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TypedOrdinal other = (TypedOrdinal) obj; if (ordinal != other.ordinal) return false; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } } } }
Netflix/hollow
hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/flatrecords/FlatRecordExtractor.java
Java
apache-2.0
8,517
require File.dirname(__FILE__) + '/../../../../../../spec_helper' #require File.dirname(__FILE__) + '/shared_examples_spec' include OpenEHR::RM::Composition::Content::Entry include OpenEHR::RM::DataTypes::Quantity::DateTime include OpenEHR::RM::DataTypes::Text include OpenEHR::RM::DataStructures::ItemStructure describe Action do let(:name) {DvText.new(:value => 'entry package')} let(:language) { double('language',:code_string => 'ja')} let(:encoding) { double('encoding', :code_string => 'UTF-8')} let(:subject) { double('PartyProxy')} # it_should_behave_like 'entry' before(:each) do time = DvDateTime.new(:value => '2009-11-18T20:17:18') description = double(ItemStructure, :archetype_node_id => 'at0002') current_state = double(DvCodedText, :value => 'planned') ism_transition = double(IsmTransition, :current_state => current_state) instruction_details = double(InstructionDetails, :activity_id => 'at0003') @action= Action.new(:archetype_node_id => 'at0001', :name => name, :language => language, :encoding => encoding, :subject => subject, :time => time, :description => description, :ism_transition => ism_transition, :instruction_details => instruction_details) end it 'should be an instance of Action' do expect(@action).to be_an_instance_of Action end it 'time should be assigned properly' do expect(@action.time.value).to eq('2009-11-18T20:17:18') end it 'should raise ArgumentError with nil assigned to time' do expect { @action.time = nil }.to raise_error ArgumentError end it 'description should assigned properly' do expect(@action.description.archetype_node_id).to eq('at0002') end it 'should raise ArgumentError with nil description' do expect { @action.description = nil }.to raise_error ArgumentError end it 'ism_transition should be assigned properly' do expect(@action.ism_transition.current_state.value).to eq('planned') end it 'should raise ArgumentError with nil ism_transition' do expect { @action.ism_transition = nil }.to raise_error ArgumentError end it 'instruction_details should be assigned properly' do expect(@action.instruction_details.activity_id).to eq('at0003') end end
skoba/openehr-ruby
spec/lib/openehr/rm/composition/content/entry/action_spec.rb
Ruby
apache-2.0
2,444
using System.Linq; using System.Collections.Generic; using UnityEngine; using UnityEditor; using UnityEditor.SceneManagement; using AGXUnity; using AGXUnity.Collide; using GUI = AGXUnity.Utils.GUI; namespace AGXUnityEditor.Tools { [CustomTool( typeof( RigidBody ) )] public class RigidBodyTool : CustomTargetTool { private List<Constraint> m_constraints = new List<Constraint>(); public RigidBody RigidBody { get { return Targets[ 0 ] as RigidBody; } } public bool FindTransformGivenPointTool { get { return GetChild<FindPointTool>() != null; } set { if ( value && !FindTransformGivenPointTool ) { RemoveAllChildren(); var pointTool = new FindPointTool(); pointTool.OnPointFound = data => { Undo.RecordObject( RigidBody.transform, "Rigid body transform" ); RigidBody.transform.position = data.RaycastResult.Point; RigidBody.transform.rotation = data.Rotation; EditorUtility.SetDirty( RigidBody ); }; AddChild( pointTool ); } else if ( !value ) RemoveChild( GetChild<FindPointTool>() ); } } public bool FindTransformGivenEdgeTool { get { return GetChild<EdgeDetectionTool>() != null; } set { if ( value && !FindTransformGivenEdgeTool ) { RemoveAllChildren(); var edgeTool = new EdgeDetectionTool(); edgeTool.OnEdgeFound = data => { Undo.RecordObject( RigidBody.transform, "Rigid body transform" ); RigidBody.transform.position = data.Position; RigidBody.transform.rotation = data.Rotation; EditorUtility.SetDirty( RigidBody ); }; AddChild( edgeTool ); } else if ( !value ) RemoveChild( GetChild<EdgeDetectionTool>() ); } } public bool ShapeCreateTool { get { return GetChild<ShapeCreateTool>() != null; } set { if ( value && !ShapeCreateTool ) { RemoveAllChildren(); var shapeCreateTool = new ShapeCreateTool( RigidBody.gameObject ); AddChild( shapeCreateTool ); } else if ( !value ) RemoveChild( GetChild<ShapeCreateTool>() ); } } public bool ConstraintCreateTool { get { return GetChild<ConstraintCreateTool>() != null; } set { if ( value && !ConstraintCreateTool ) { RemoveAllChildren(); var constraintCreateTool = new ConstraintCreateTool( RigidBody.gameObject, false, newConstraint => m_constraints.Add( newConstraint ) ); AddChild( constraintCreateTool ); } else if ( !value ) RemoveChild( GetChild<ConstraintCreateTool>() ); } } public bool DisableCollisionsTool { get { return GetChild<DisableCollisionsTool>() != null; } set { if ( value && !DisableCollisionsTool ) { RemoveAllChildren(); var disableCollisionsTool = new DisableCollisionsTool( RigidBody.gameObject ); AddChild( disableCollisionsTool ); } else if ( !value ) RemoveChild( GetChild<DisableCollisionsTool>() ); } } public bool RigidBodyVisualCreateTool { get { return GetChild<RigidBodyVisualCreateTool>() != null; } set { if ( value && !RigidBodyVisualCreateTool ) { RemoveAllChildren(); var createRigidBodyVisualTool = new RigidBodyVisualCreateTool( RigidBody ); AddChild( createRigidBodyVisualTool ); } else if ( !value ) RemoveChild( GetChild<RigidBodyVisualCreateTool>() ); } } public bool ToolsActive = true; public RigidBodyTool( Object[] targets ) : base( targets ) { #if UNITY_2019_1_OR_NEWER var allConstraints = StageUtility.GetCurrentStageHandle().Contains( RigidBody.gameObject ) ? StageUtility.GetCurrentStageHandle().FindComponentsOfType<Constraint>() : Object.FindObjectsOfType<Constraint>(); #else var allConstraints = Object.FindObjectsOfType<Constraint>(); #endif foreach ( var constraint in allConstraints ) { foreach ( var rb in GetTargets<RigidBody>() ) if ( constraint.AttachmentPair.Contains( rb ) ) m_constraints.Add( constraint ); } } public override void OnAdd() { foreach ( var rb in GetTargets<RigidBody>() ) rb.MassProperties.OnForcedMassInertiaUpdate(); } public override void OnSceneViewGUI( SceneView sceneView ) { int rbIndex = 0; foreach ( var rb in GetTargets<RigidBody>() ) { var cmPosition = rb.transform.position + rb.transform.TransformDirection( rb.MassProperties.CenterOfMassOffset.Value ); var cmTransformToolVisible = !rb.MassProperties.CenterOfMassOffset.UseDefault; if ( cmTransformToolVisible ) { var newPosition = PositionTool( cmPosition, rb.transform.rotation, 0.6f, 1.0f ); if ( Vector3.SqrMagnitude( cmPosition - newPosition ) > 1.0E-6 ) { Undo.RecordObject( rb.MassProperties, "Center of mass changed" ); cmPosition = newPosition; rb.MassProperties.CenterOfMassOffset.UserValue = rb.transform.InverseTransformDirection( newPosition - rb.transform.position ); EditorUtility.SetDirty( rb ); } } var rbId = "rb_vis_" + (rbIndex++).ToString(); var vp = GetOrCreateVisualPrimitive<Utils.VisualPrimitiveSphere>( rbId, "GUI/Text Shader" ); vp.Color = new Color( 0, 0, 1, 0.25f ); vp.Visible = true; vp.Pickable = false; vp.SetTransform( cmPosition, rb.transform.rotation, 0.05f, true, 0.0f, 0.25f ); //var shapes = rb.Shapes; //if ( shapes.Length < 2 ) // continue; //int shapeIndex = 0; //foreach ( var shape in shapes ) { // var shapeLine = GetOrCreateVisualPrimitive<Utils.VisualPrimitiveCylinder>( rbId + "_shape_" + (shapeIndex++).ToString(), // "GUI/Text Shader" ); // shapeLine.Color = new Color( 0, 1, 0, 0.05f ); // shapeLine.Visible = true; // shapeLine.Pickable = false; // shapeLine.SetTransform( cmPosition, shape.transform.position, 0.015f ); //} } } public override void OnPreTargetMembersGUI() { var skin = InspectorEditor.Skin; bool toggleShapeCreate = false; bool toggleConstraintCreate = false; bool toggleDisableCollisions = false; bool toggleRigidBodyVisualCreate = false; if ( !IsMultiSelect && ToolsActive ) { InspectorGUI.ToolButtons( InspectorGUI.ToolButtonData.Create( ToolIcon.CreateConstraint, ConstraintCreateTool, "Create new constraint to this rigid body.", () => toggleConstraintCreate = true ), InspectorGUI.ToolButtonData.Create( ToolIcon.DisableCollisions, DisableCollisionsTool, "Disable collisions against other objects.", () => toggleDisableCollisions = true ), InspectorGUI.ToolButtonData.Create( ToolIcon.CreateShapeGivenVisual, ShapeCreateTool, "Create shape from child visual object.", () => toggleShapeCreate = true ), InspectorGUI.ToolButtonData.Create( ToolIcon.CreateVisual, RigidBodyVisualCreateTool, "Create visual representation of each physical shape in this body.", () => toggleRigidBodyVisualCreate = true, Tools.RigidBodyVisualCreateTool.ValidForNewShapeVisuals( RigidBody ) ) ); } if ( ConstraintCreateTool ) { GetChild<ConstraintCreateTool>().OnInspectorGUI(); } if ( DisableCollisionsTool ) { GetChild<DisableCollisionsTool>().OnInspectorGUI(); } if ( ShapeCreateTool ) { GetChild<ShapeCreateTool>().OnInspectorGUI(); } if ( RigidBodyVisualCreateTool ) { GetChild<RigidBodyVisualCreateTool>().OnInspectorGUI(); } EditorGUILayout.LabelField( GUI.MakeLabel( "Mass properties", true ), skin.Label ); using ( InspectorGUI.IndentScope.Single ) InspectorEditor.DrawMembersGUI( GetTargets<RigidBody>().Select( rb => rb.MassProperties ).ToArray() ); if ( toggleConstraintCreate ) ConstraintCreateTool = !ConstraintCreateTool; if ( toggleDisableCollisions ) DisableCollisionsTool = !DisableCollisionsTool; if ( toggleShapeCreate ) ShapeCreateTool = !ShapeCreateTool; if ( toggleRigidBodyVisualCreate ) RigidBodyVisualCreateTool = !RigidBodyVisualCreateTool; } public override void OnPostTargetMembersGUI() { if ( IsMultiSelect ) return; InspectorGUI.ToolArrayGUI( this, RigidBody.Shapes, "Shapes" ); InspectorGUI.ToolArrayGUI( this, m_constraints.ToArray(), "Constraints" ); } } }
Algoryx/agxUnity
Editor/AGXUnityEditor/Tools/RigidBodyTool.cs
C#
apache-2.0
10,413
/* -*-C++-*- */ /* * Copyright (C) 2003 Carnegie Mellon University and Rutgers University * * Permission is hereby granted to distribute this software for * non-commercial research purposes, provided that this copyright * notice is included with any such distribution. * * THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, * EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE * SOFTWARE IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU * ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. * */ #include "rational.h" #include "exceptions.h" #include <limits> #include <math.h> #include <stdlib.h> namespace ppddl_parser { /* ====================================================================== */ /* Rational */ int Rational::iterations_ = 100; /* Returns the greatest common devisor of the two integers. */ static int gcd(int n, int m) { int a = abs(n); int b = abs(m); while (b > 0) { int c = b; b = a % b; a = c; } return a; } /* Returns the least common multiplier of the two integers. */ static int lcm(int n, int m) { return n/gcd(n, m)*m; } /* Returns the multipliers for the two integers. */ std::pair<int, int> Rational::multipliers(int n, int m) { int f = lcm(n, m); return std::make_pair(f/n, f/m); } /* Constructs a rational number. */ Rational::Rational(int n, int m) { if (m == 0) { throw Exception("division by zero"); } else { int d = gcd(n, m); numerator_ = n/d; denominator_ = m/d; if (denominator_ < 0) { numerator_ *= -1; denominator_ *= -1; } } } /* Constructs a rational number. */ Rational::Rational (const double &x) { Rational r = Rational::toRational (x, Rational::iterations_); numerator_ = r.numerator (); denominator_ = r.denominator (); } Rational Rational::toRational (const double &x, int iterations) { if (x == 0.0 || x < std::numeric_limits<long>::min () || x > std::numeric_limits<long>::max ()) return Rational (0,1); else { int sign = x < 0.0 ? -1 : 1; return sign * Rational::toRational (sign * x, 1.0e9, iterations); // was 1.0e12 } } Rational Rational::toRational (const double &x, const double &limit, int iterations) { double intpart; //std::cout << "x: " << x << std::endl; double fractpart = modf(x, &intpart); //debug //std::cout << "fractpart: " << fractpart << std::endl; //debug double d = 1.0 / fractpart; int left = static_cast<int> (intpart); //debug /* std::cout << "left: " << left << " -- d: " << d << " -- limit: " << limit << std::endl; */ //debug if (d > limit || iterations == 0) return Rational (left, 1); else { Rational tempr = Rational::toRational (d, limit * 0.1, --iterations); Rational r = Rational (tempr.denominator (), tempr.numerator ()); return Rational (left, 1) + r; } } /* Constructs a rational number. */ /*Rational::Rational(const char* s) : numerator_(0) { std::cout << "rational from char: " << s << std::endl; const char* si = s; for (; *si != '\0' && *si != '.' && *si != '/'; si++) { numerator_ = 10*numerator_ + (*si - '0'); } std::cout << "numerator: " << numerator_ << std::endl; if (*si == '/') { denominator_ = 0; for (si++; *si != '\0'; si++) { denominator_ = 10*denominator_ + (*si - '0'); } if (denominator_ == 0) { throw Exception("division by zero"); } int d = gcd(numerator_, denominator_); numerator_ /= d; denominator_ /= d; } else if (*si == '.') { int a = numerator_; numerator_ = 0; denominator_ = 1; for (si++; *si != '\0'; si++) { numerator_ = 10*numerator_ + (*si - '0'); denominator_ *= 10; } int d = gcd(numerator_, denominator_); numerator_ /= d; denominator_ /= d; numerator_ += a*denominator_; } else { denominator_ = 1; } }*/ /* Less-than comparison operator for rational numbers. */ bool operator<(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return q.numerator()*m.first < p.numerator()*m.second; } /* Less-than-or-equal comparison operator for rational numbers. */ bool operator<=(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return q.numerator()*m.first <= p.numerator()*m.second; } /* Equality comparison operator for rational numbers. */ bool operator==(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return q.numerator()*m.first == p.numerator()*m.second; } /* Inequality comparison operator for rational numbers. */ bool operator!=(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return q.numerator()*m.first != p.numerator()*m.second; } /* Greater-than-or-equal comparison operator for rational numbers. */ bool operator>=(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return q.numerator()*m.first >= p.numerator()*m.second; } /* Greater-than comparison operator for rational numbers. */ bool operator>(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return q.numerator()*m.first > p.numerator()*m.second; } /* Addition operator for rational numbers. */ Rational operator+(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return Rational(q.numerator()*m.first + p.numerator()*m.second, q.denominator()*m.first); } /* Subtraction operator for rational numbers. */ Rational operator-(const Rational& q, const Rational& p) { std::pair<int, int> m = Rational::multipliers(q.denominator(), p.denominator()); return Rational(q.numerator()*m.first - p.numerator()*m.second, q.denominator()*m.first); } /* Multiplication operator for rational numbers. */ Rational operator*(const Rational& q, const Rational& p) { int d1 = gcd(q.numerator(), p.denominator()); int d2 = gcd(p.numerator(), q.denominator()); return Rational((q.numerator()/d1)*(p.numerator()/d2), (q.denominator()/d2)*(p.denominator()/d1)); } /* Division operator for rational numbers. */ Rational operator/(const Rational& q, const Rational& p) { if (p == 0) { throw Exception("division by zero"); } int d1 = gcd(q.numerator(), p.numerator()); int d2 = gcd(p.denominator(), q.denominator()); return Rational((q.numerator()/d1)*(p.denominator()/d2), (q.denominator()/d2)*(p.numerator()/d1)); } /* Output operator for rational numbers. */ std::ostream& operator<<(std::ostream& os, const Rational& q) { os << q.numerator(); if (q.denominator() != 1) { os << '/' << q.denominator(); } return os; } } /* end of namespace */
beniz/hmdp
src/hmdpsim/rational.cc
C++
apache-2.0
7,168
package io.github.threetenjaxb.core; import java.time.OffsetTime; import java.time.ZoneOffset; import java.util.HashMap; import java.util.Map; class OffsetTimeXmlAdapterTest extends AbstractXmlAdapterTest<String, OffsetTime, OffsetTimeXmlAdapter> { private static final Map<String, OffsetTime> STRING_OFFSET_TIME_MAP = new HashMap<>(); static { STRING_OFFSET_TIME_MAP.put("10:15:30+01:00", OffsetTime .of(10, 15, 30, 0, ZoneOffset.ofHours(1)) ); } OffsetTimeXmlAdapterTest() { super(new OffsetTimeXmlAdapter(), STRING_OFFSET_TIME_MAP); } }
migesok/jaxb-java-time-adapters
threeten-jaxb-core/src/test/java/io/github/threetenjaxb/core/OffsetTimeXmlAdapterTest.java
Java
apache-2.0
605
// // Last.Backend LLC CONFIDENTIAL // __________________ // // [2014] - [2020] Last.Backend LLC // All Rights Reserved. // // NOTICE: All information contained herein is, and remains // the property of Last.Backend LLC and its suppliers, // if any. The intellectual and technical concepts contained // herein are proprietary to Last.Backend LLC // and its suppliers and may be covered by Russian Federation and Foreign Patents, // patents in process, and are protected by trade secret or copyright law. // Dissemination of this information or reproduction of this material // is strictly forbidden unless prior written permission is obtained // from Last.Backend LLC. // package cluster import ( "context" "encoding/json" "errors" "fmt" "github.com/lastbackend/lastbackend/tools/logger" "io" "os" "strings" "github.com/lastbackend/lastbackend/internal/cli/views" "github.com/lastbackend/lastbackend/internal/pkg/models" "github.com/lastbackend/lastbackend/pkg/api/types/v1/request" "github.com/spf13/cobra" ) const serviceListExample = ` # Get all services for 'ns-demo' namespace lb service ls ns-demo ` const serviceInspectExample = ` # Get information for 'redis' service in 'ns-demo' namespace lb service inspect ns-demo redis ` const serviceCreateExample = ` # Create new redis service with description and 256 MB limit memory lb service create ns-demo redis --desc "Example description" -m 256mib ` const serviceRemoveExample = ` # Remove 'redis' service in 'ns-demo' namespace lb service remove ns-demo redis ` const serviceUpdateExample = ` # Update info for 'redis' service in 'ns-demo' namespace lb service update ns-demo redis --desc "Example new description" -m 128 ` const serviceLogsExample = ` # Get 'redis' service logs for 'ns-demo' namespace lb service logs ns-demo redis ` func (c *command) NewServiceCmd() *cobra.Command { log := logger.WithContext(context.Background()) cmd := &cobra.Command{ Use: "service", Short: "Manage your service", Run: func(cmd *cobra.Command, args []string) { if err := cmd.Help(); err != nil { log.Error(err.Error()) return } }, } cmd.AddCommand(c.serviceListCmd()) cmd.AddCommand(c.serviceInspectCmd()) cmd.AddCommand(c.serviceCreateCmd()) cmd.AddCommand(c.serviceRemoveCmd()) cmd.AddCommand(c.serviceUpdateCmd()) cmd.AddCommand(c.serviceLogsCmd()) return cmd } func (c *command) serviceListCmd() *cobra.Command { return &cobra.Command{ Use: "ls [NAMESPACE]", Short: "Display the services list", Example: serviceListExample, Args: cobra.ExactArgs(1), Run: func(cmd *cobra.Command, args []string) { namespace := args[0] response, err := c.client.cluster.V1().Namespace(namespace).Service().List(context.Background()) if err != nil { fmt.Println(err) return } if response == nil || len(*response) == 0 { fmt.Println("no services available") return } list := views.FromApiServiceListView(response) list.Print() }, } } func (c *command) serviceInspectCmd() *cobra.Command { return &cobra.Command{ Use: "inspect [NAMESPACE]/[NAME]", Short: "Service info by name", Example: serviceInspectExample, Args: cobra.ExactArgs(1), Run: func(cmd *cobra.Command, args []string) { namespace, name, err := serviceParseSelfLink(args[0]) checkError(err) svc, err := c.client.cluster.V1().Namespace(namespace).Service(name).Get(context.Background()) if err != nil { fmt.Println(err) return } routes, err := c.client.cluster.V1().Namespace(namespace).Route().List(context.Background()) if err != nil { fmt.Println(err) return } for _, r := range *routes { for _, rule := range r.Spec.Rules { if rule.Service == svc.Meta.Name { fmt.Println("exposed:", r.Status.State, r.Spec.Domain, r.Spec.Port) } } } ss := views.FromApiServiceView(svc) ss.Print() }, } } func (c *command) serviceCreateCmd() *cobra.Command { return &cobra.Command{ Use: "create [NAMESPACE]/[NAME] [IMAGE]", Short: "Create service", Example: serviceCreateExample, Args: cobra.ExactArgs(2), Run: func(cmd *cobra.Command, args []string) { namespace, name, err := serviceParseSelfLink(args[0]) checkError(err) image := args[1] opts, err := serviceParseManifest(cmd, name, image) checkError(err) response, err := c.client.cluster.V1().Namespace(namespace).Service().Create(context.Background(), opts) if err != nil { fmt.Println(err) return } fmt.Println(fmt.Sprintf("Service `%s` is created", name)) service := views.FromApiServiceView(response) service.Print() }, } } func (c *command) serviceRemoveCmd() *cobra.Command { return &cobra.Command{ Use: "remove [NAMESPACE] [NAME]", Short: "Remove service by name", Example: serviceRemoveExample, Args: cobra.ExactArgs(2), Run: func(cmd *cobra.Command, args []string) { namespace := args[0] name := args[1] opts := &request.ServiceRemoveOptions{Force: false} if err := opts.Validate(); err != nil { fmt.Println(err.Err()) return } c.client.cluster.V1().Namespace(namespace).Service(name).Remove(context.Background(), opts) fmt.Println(fmt.Sprintf("Service `%s` is successfully removed", name)) }, } } func (c *command) serviceUpdateCmd() *cobra.Command { return &cobra.Command{ Use: "update [NAMESPACE]/[NAME]", Short: "Change configuration of the service", Example: serviceUpdateExample, Args: cobra.ExactArgs(1), Run: func(cmd *cobra.Command, args []string) { namespace, name, err := serviceParseSelfLink(args[0]) checkError(err) opts, err := serviceParseManifest(cmd, name, models.EmptyString) checkError(err) response, err := c.client.cluster.V1().Namespace(namespace).Service(name).Update(context.Background(), opts) if err != nil { fmt.Println(err) return } fmt.Println(fmt.Sprintf("Service `%s` is updated", name)) ss := views.FromApiServiceView(response) ss.Print() }, } } func (c *command) serviceLogsCmd() *cobra.Command { return &cobra.Command{ Use: "logs [NAMESPACE]/[NAME]", Short: "Get service logs", Example: serviceLogsExample, Args: cobra.ExactArgs(1), Run: func(cmd *cobra.Command, args []string) { opts := new(request.ServiceLogsOptions) var err error opts.Tail, err = cmd.Flags().GetInt("tail") if err != nil { fmt.Println(err.Error()) return } opts.Follow, err = cmd.Flags().GetBool("follow") if err != nil { fmt.Println(err.Error()) return } namespace, name, err := serviceParseSelfLink(args[0]) checkError(err) reader, _, err := c.client.cluster.V1().Namespace(namespace).Service(name).Logs(context.Background(), opts) if err != nil { fmt.Println(err) return } dec := json.NewDecoder(reader) for { var doc models.LogMessage err := dec.Decode(&doc) if err == io.EOF { // all done break } if err != nil { fmt.Errorf(err.Error()) os.Exit(1) } fmt.Println(">", doc.Selflink, doc.Data) } }, } } func serviceParseSelfLink(selflink string) (string, string, error) { match := strings.Split(selflink, "/") var ( namespace, name string ) switch len(match) { case 2: namespace = match[0] name = match[1] case 1: fmt.Println("Use default namespace:", models.DEFAULT_NAMESPACE) namespace = models.DEFAULT_NAMESPACE name = match[0] default: return "", "", errors.New("invalid service name provided") } return namespace, name, nil } func serviceManifestFlags(cmd *cobra.Command) { cmd.Flags().StringP("name", "n", "", "set service name") cmd.Flags().StringP("desc", "d", "", "set service description") cmd.Flags().StringP("memory", "m", "128MIB", "set service spec memory") cmd.Flags().IntP("replicas", "r", 0, "set service replicas") cmd.Flags().StringArrayP("port", "p", make([]string, 0), "set service ports") cmd.Flags().StringArrayP("env", "e", make([]string, 0), "set service env") cmd.Flags().StringArray("env-from-secret", make([]string, 0), "set service env from secret") cmd.Flags().StringArray("env-from-config", make([]string, 0), "set service env from config") cmd.Flags().StringP("image", "i", "", "set service image") cmd.Flags().String("image-secret-name", "", "set service image auth secret name") cmd.Flags().String("image-secret-key", "", "set service image auth secret key") } func serviceParseManifest(cmd *cobra.Command, name, image string) (*request.ServiceManifest, error) { var err error description, err := cmd.Flags().GetString("desc") checkFlagParseError(err) memory, err := cmd.Flags().GetString("memory") checkFlagParseError(err) if name == models.EmptyString { name, err = cmd.Flags().GetString("name") checkFlagParseError(err) } if image == models.EmptyString { image, err = cmd.Flags().GetString("image") checkFlagParseError(err) } ports, err := cmd.Flags().GetStringArray("ports") checkFlagParseError(err) env, err := cmd.Flags().GetStringArray("env") checkFlagParseError(err) senv, err := cmd.Flags().GetStringArray("env-from-secret") checkFlagParseError(err) cenv, err := cmd.Flags().GetStringArray("env-from-config") checkFlagParseError(err) replicas, err := cmd.Flags().GetInt("replicas") checkFlagParseError(err) authName, err := cmd.Flags().GetString("image-secret-name") checkFlagParseError(err) authKey, err := cmd.Flags().GetString("image-secret-key") checkFlagParseError(err) opts := new(request.ServiceManifest) css := make([]request.ManifestSpecTemplateContainer, 0) cs := request.ManifestSpecTemplateContainer{} if len(name) != 0 { opts.Meta.Name = &name } if len(description) != 0 { opts.Meta.Description = &description } if memory != models.EmptyString { cs.Resources.Request.RAM = memory } if replicas != 0 { opts.Spec.Replicas = &replicas } if len(ports) > 0 { opts.Spec.Network = new(request.ManifestSpecNetwork) opts.Spec.Network.Ports = make([]string, 0) opts.Spec.Network.Ports = ports } es := make(map[string]request.ManifestSpecTemplateContainerEnv) if len(env) > 0 { for _, e := range env { kv := strings.SplitN(e, "=", 2) eo := request.ManifestSpecTemplateContainerEnv{ Name: kv[0], } if len(kv) > 1 { eo.Value = kv[1] } es[eo.Name] = eo } } if len(senv) > 0 { for _, e := range senv { kv := strings.SplitN(e, "=", 3) eo := request.ManifestSpecTemplateContainerEnv{ Name: kv[0], } if len(kv) < 3 { return nil, errors.New("Service env from secret is in wrong format, should be [NAME]=[SECRET NAME]=[SECRET STORAGE KEY]") } if len(kv) == 3 { eo.Secret.Name = kv[1] eo.Secret.Key = kv[2] } es[eo.Name] = eo } } if len(cenv) > 0 { for _, e := range cenv { kv := strings.SplitN(e, "=", 3) eo := request.ManifestSpecTemplateContainerEnv{ Name: kv[0], } if len(kv) < 3 { return nil, errors.New("Service env from config is in wrong format, should be [NAME]=[CONFIG NAME]=[CONFIG KEY]") } if len(kv) == 3 { eo.Config.Name = kv[1] eo.Config.Key = kv[2] } es[eo.Name] = eo } } if len(es) > 0 { senvs := make([]request.ManifestSpecTemplateContainerEnv, 0) for _, e := range es { senvs = append(senvs, e) } cs.Env = senvs } opts.Meta.Description = &description cs.Image.Name = image if authName != models.EmptyString { cs.Image.Secret.Name = authName } if authKey != models.EmptyString { cs.Image.Secret.Key = authKey } css = append(css, cs) if err := opts.Validate(); err != nil { return nil, err.Err() } return opts, nil }
unloop/lastbackend
internal/cli/command/cluster/service.go
GO
apache-2.0
11,710
package org.ayo.app.tmpl; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import org.ayo.app.common.AyoSwipeBackActivityAttacher; import genius.android.view.R; /** * 一个Activity装载一个Fragment * Fragment管界面 * */ public abstract class AyoJigsawActivityAttacher extends AyoSwipeBackActivityAttacher { protected abstract Fragment getFragment(); protected abstract View getTopView(); protected abstract View getBottomView(); protected abstract View getCoverView(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.ayo_tmpl_ac_fragment_container); View topView = getTopView(); if(topView != null) { ViewGroup top = (ViewGroup) findViewById(R.id.top); top.addView(topView); } View bottomView = getBottomView(); if(bottomView != null){ ViewGroup bottom = (ViewGroup) findViewById(R.id.bottom); bottom.addView(bottomView); } View coverView = getCoverView(); if(coverView != null){ ViewGroup root = (ViewGroup) findViewById(R.id.root); root.addView(coverView); } FrameLayout fl_root = (FrameLayout) findViewById(R.id.fl_root); getSupportFragmentManager().beginTransaction().replace(fl_root.getId(), getFragment()).commit(); } }
cowthan/AyoWeibo
ayoview/src/main/java/org/ayo/app/tmpl/AyoJigsawActivityAttacher.java
Java
apache-2.0
1,539
'use strict'; angular.module('ictsAppApp') .controller('TelKpnDeleteModalCtrl', ['$scope', '$modalInstance', 'record', function ($scope, $modalInstance, record) { $scope.record = record; $scope.delete = function () { $modalInstance.close(); }; $scope.cancel = function () { $modalInstance.dismiss('cancel'); }; }]);
utwente/lisa-telefonie
client/app/tel/kpn/kpnDelete.controller.js
JavaScript
apache-2.0
339
/* * Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved. */ package jsystem.utils; import java.io.File; import java.io.FilenameFilter; /** * This filter use to filtering ends of file names. */ public class ExtentionFilter implements FilenameFilter { String endWith = null; /** * Create instance of ExtentionFilter * * @param endWith * file names end */ public ExtentionFilter(String endWith) { this.endWith = endWith; } /** * Filter files. * * @param name * file name * @return true if name ends same as variable set in constructor */ public boolean accept(File dir, String name) { if (endWith == null) { return true; } return (name.toLowerCase().endsWith(endWith.toLowerCase())); } }
Top-Q/jsystem
jsystem-core-projects/jsystemCore/src/main/java/jsystem/utils/ExtentionFilter.java
Java
apache-2.0
773
<?php /** * This example creates a new proposal line item that targets the whole network. * To determine which proposal line items exist, run * GetAllProposalLineItems.php. * * Tags: NetworkService.getCurrentNetwork * Tags: ProposalLineItemService.createProposalLineItems * * PHP version 5 * * Copyright 2014, Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @package GoogleApiAdsDfp * @subpackage v201502 * @category WebServices * @copyright 2014, Google Inc. All Rights Reserved. * @license http://www.apache.org/licenses/LICENSE-2.0 Apache License, * Version 2.0 * @author Vincent Tsao */ error_reporting(E_STRICT | E_ALL); // You can set the include path to src directory or reference // DfpUser.php directly via require_once. // $path = '/path/to/dfp_api_php_lib/src'; $path = dirname(__FILE__) . '/../../../../src'; set_include_path(get_include_path() . PATH_SEPARATOR . $path); require_once 'Google/Api/Ads/Dfp/Lib/DfpUser.php'; require_once 'Google/Api/Ads/Dfp/Util/DateTimeUtils.php'; require_once dirname(__FILE__) . '/../../../Common/ExampleUtils.php'; // Set the ID of the proposal that the proposal line items will belong to. $proposalId = 'INSERT_PROPOSAL_ID_HERE'; // Set the ID of the product that the proposal line items should be created // from. $productId = 'INSERT_PRODUCT_ID_HERE'; // Set the ID of the rate card that the proposal line items should be priced // with. $rateCardId = 'INSERT_RATE_CARD_ID_HERE'; try { // Get DfpUser from credentials in "../auth.ini" // relative to the DfpUser.php file's directory. $user = new DfpUser(); // Log SOAP XML request and response. $user->LogDefaults(); // Get the ProposalLineItemService. $proposalLineItemService = $user->GetService('ProposalLineItemService', 'v201502'); // Get the NetworkService. $networkService = $user->GetService('NetworkService', 'v201502'); // Get the root ad unit ID used to target the whole site. $rootAdUnitId = $networkService->getCurrentNetwork()->effectiveRootAdUnitId; // Create inventory targeting. $inventoryTargeting = new InventoryTargeting(); // Create ad unit targeting for the root ad unit (i.e. the whole network). $adUnitTargeting = new AdUnitTargeting(); $adUnitTargeting->adUnitId = $rootAdUnitId; $adUnitTargeting->includeDescendants = true; $inventoryTargeting->targetedAdUnits = array($adUnitTargeting); // Create targeting. $targeting = new Targeting(); $targeting->inventoryTargeting = $inventoryTargeting; // Create a proposal line item. $proposalLineItem = new ProposalLineItem(); $proposalLineItem->name = sprintf('Proposal line item #%s', uniqid()); $proposalLineItem->proposalId = $proposalId; $proposalLineItem->rateCardId = $rateCardId; $proposalLineItem->productId = $productId; $proposalLineItem->targeting = $targeting; // Set the length of the proposal line item to run. $proposalLineItem->startDateTime = DateTimeUtils::GetDfpDateTime(new DateTime()); $proposalLineItem->endDateTime = DateTimeUtils::GetDfpDateTime(new DateTime('+1 month')); // Set delivery specifications for the proposal line item. $proposalLineItem->deliveryRateType = 'EVENLY'; $proposalLineItem->creativeRotationType = 'OPTIMIZED'; // Set billing specifications for the proposal line item. $proposalLineItem->billingCap = 'CAPPED_CUMULATIVE'; $proposalLineItem->billingSource = 'THIRD_PARTY_VOLUME'; // Set pricing for the proposal line item for 1000 impressions at a CPM of $2 // for a total value of $2. $goal = new Goal(); $goal->units = 1000; $goal->unitType = 'IMPRESSIONS'; $proposalLineItem->goal = $goal; $proposalLineItem->cost = new Money('USD', 2000000); $proposalLineItem->costPerUnit = new Money('USD', 2000000); $proposalLineItem->rateType = 'CPM'; // Create the proposal line item on the server. $proposalLineItems = $proposalLineItemService->createProposalLineItems( array($proposalLineItem)); foreach ($proposalLineItems as $createdProposalLineItem) { printf("A proposal line item with ID %d and name '%s' was created.\n", $createdProposalLineItem->id, $createdProposalLineItem->name); } } catch (OAuth2Exception $e) { ExampleUtils::CheckForOAuth2Errors($e); } catch (ValidationException $e) { ExampleUtils::CheckForOAuth2Errors($e); } catch (Exception $e) { printf("%s\n", $e->getMessage()); }
freddiedfre/google_bing_ads
examples/Dfp/v201502/ProposalLineItemService/CreateProposalLineItems.php
PHP
apache-2.0
4,966
function imagecreate(image, pool, url, cmd){ if ( image === undefined ) { image = $("#image").val(); } if ( pool === undefined ) { pool = $("#pool").val(); } if ( url === undefined ) { url = $("#url").val(); } if ( cmd === undefined ) { cmd = $("#cmd").val(); } $("#wheel").show(); data = {'image': image, 'action': 'create', 'pool': pool, 'url': url, 'cmd': cmd}; $.ajax({ type: "POST", url: '/imageaction', data: data, success: function(data) { $("#wheel").hide(); $("#urllabel").hide(); $("#url").hide(); if (data.result == 'success') { $('.top-right').notify({message: { text: "Image "+image+" created!!!" }, type: 'success'}).show(); } else { $('.top-right').notify({message: { text: "Image "+image+" not created because "+data.reason }, type: 'danger'}).show(); }; } }); } function imageurl(){ image = $( "#image option:selected" ).text(); if (~image.indexOf("rhel")) { $("#url").show(); $("#urllabel").show(); url = $( "#image option:selected" ).attr("url"); window.open(url, "_blank"); } }
karmab/kcli
kvirt/web/static/js/imageaction.js
JavaScript
apache-2.0
1,212
/* * Copyright 2013-2020 consulo.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.ui.layout; import consulo.disposer.Disposable; import consulo.localize.LocalizeValue; import consulo.ui.Component; import consulo.ui.internal.UIInternal; import consulo.ui.annotation.RequiredUIAccess; import javax.annotation.Nonnull; import java.util.EventListener; /** * @author VISTALL * @since 2020-05-29 */ public interface FoldoutLayout extends Layout { @FunctionalInterface static interface StateListener extends EventListener { @RequiredUIAccess void stateChanged(boolean state); } @Nonnull static FoldoutLayout create(@Nonnull LocalizeValue titleValue, @Nonnull Component component) { return create(titleValue, component, true); } @Nonnull static FoldoutLayout create(@Nonnull LocalizeValue titleValue, @Nonnull Component component, boolean state) { return UIInternal.get()._Layouts_foldout(titleValue, component, state); } @Nonnull @RequiredUIAccess FoldoutLayout setState(boolean showing); @Nonnull @RequiredUIAccess FoldoutLayout setTitle(@Nonnull LocalizeValue title); @Nonnull Disposable addStateListener(@Nonnull StateListener stateListener); }
consulo/consulo
modules/base/ui-api/src/main/java/consulo/ui/layout/FoldoutLayout.java
Java
apache-2.0
1,731
/* * Copyright 2010-2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.lhfei.hadoop.hbase.crud; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import cn.lhfei.hadoop.hbase.common.HBaseHelper; /** * @version 1.0.0 * * @author Hefei Li * * @since Dec 15, 2015 */ public class DeleteApp { private static final Logger log = LoggerFactory.getLogger(DeleteApp.class); public static void main(String[] args) { try { Configuration conf = HBaseConfiguration.create(); HBaseHelper helper = HBaseHelper.getHelper(conf); helper.dropTable("testtable"); helper.createTable("testtable", 100, "colfam1", "colfam2"); helper.put("testtable", new String[] { "row1" }, new String[] { "colfam1", "colfam2" }, new String[] { "qual1", "qual1", "qual2", "qual2", "qual3", "qual3" }, new long[] { 1, 2, 3, 4, 5, 6 }, new String[] { "val1", "val1", "val2", "val2", "val3", "val3" }); log.info("Before delete call..."); helper.dump("testtable", new String[]{ "row1" }, null, null); Connection connection = ConnectionFactory.createConnection(conf); Table table = connection.getTable(TableName.valueOf("testtable")); // DeleteExample Delete delete = new Delete(Bytes.toBytes("row1")); // DeleteExample-1-NewDel Create delete with specific row. delete.setTimestamp(1); // DeleteExample-2-SetTS Set timestamp for row deletes. delete.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1")); // DeleteExample-3-DelColNoTS Delete the latest version only in one column. delete.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("qual3"), 3); // DeleteExample-4-DelColTS Delete specific version in one column. delete.addColumns(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1")); // DeleteExample-5-DelColsNoTS Delete all versions in one column. delete.addColumns(Bytes.toBytes("colfam1"), Bytes.toBytes("qual3"), 2); // DeleteExample-6-DelColsTS Delete the given and all older versions in one column. delete.addFamily(Bytes.toBytes("colfam1")); // DeleteExample-7-AddCol Delete entire family, all columns and versions. delete.addFamily(Bytes.toBytes("colfam1"), 3); // DeleteExample-8-AddCol Delete the given and all older versions in the entire column family, i.e., from all columns therein. table.delete(delete); // DeleteExample-9-DoDel Delete the data from the HBase table. // ^^ DeleteExample table.close(); connection.close(); log.info("After delete call..."); helper.dump("testtable", new String[] { "row1" }, null, null); helper.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } }
lhfei/hadoop-in-action
src/main/java/cn/lhfei/hadoop/hbase/crud/DeleteApp.java
Java
apache-2.0
3,599
class Solution { public: int findMin(vector<int> &num) { findMin(num, 0, num.size()-1); } inline int min(int a, int b) { if (a > b) { return b; } return a; } int findMin(vector<int> &num, int start, int end) { if(start == end) { return num[start]; } if(start + 1 == end) { if(num[start] < num[end]) { return num[start]; } return num[end]; } int mid = ((end-start)>>1) + start; if(num[start] < num[mid]) { return min(num[start], findMin(num, mid, end)); } if(num[mid] < num[end]) { return findMin(num, start, mid); } return min(findMin(num, mid+1, end), findMin(num, start, mid)); } };
hongtaocai/code_interview
cpp/FindMinimuminRotatedSortedArrayII.cpp
C++
apache-2.0
831
def get_injured_sharks(): """ >>> from ibeis.scripts.getshark import * # NOQA """ import requests url = 'http://www.whaleshark.org/getKeywordImages.jsp' resp = requests.get(url) assert resp.status_code == 200 keywords = resp.json()['keywords'] key_list = ut.take_column(keywords, 'indexName') key_to_nice = {k['indexName']: k['readableName'] for k in keywords} injury_patterns = [ 'injury', 'net', 'hook', 'trunc', 'damage', 'scar', 'nicks', 'bite', ] injury_keys = [key for key in key_list if any([pat in key for pat in injury_patterns])] noninjury_keys = ut.setdiff(key_list, injury_keys) injury_nice = ut.lmap(lambda k: key_to_nice[k], injury_keys) # NOQA noninjury_nice = ut.lmap(lambda k: key_to_nice[k], noninjury_keys) # NOQA key_list = injury_keys keyed_images = {} for key in ut.ProgIter(key_list, lbl='reading index', bs=True): key_url = url + '?indexName={indexName}'.format(indexName=key) key_resp = requests.get(key_url) assert key_resp.status_code == 200 key_imgs = key_resp.json()['images'] keyed_images[key] = key_imgs key_hist = {key: len(imgs) for key, imgs in keyed_images.items()} key_hist = ut.sort_dict(key_hist, ut.identity) print(ut.repr3(key_hist)) nice_key_hist = ut.map_dict_keys(lambda k: key_to_nice[k], key_hist) nice_key_hist = ut.sort_dict(nice_key_hist, ut.identity) print(ut.repr3(nice_key_hist)) key_to_urls = {key: ut.take_column(vals, 'url') for key, vals in keyed_images.items()} overlaps = {} import itertools overlap_img_list = [] for k1, k2 in itertools.combinations(key_to_urls.keys(), 2): overlap_imgs = ut.isect(key_to_urls[k1], key_to_urls[k2]) num_overlap = len(overlap_imgs) overlaps[(k1, k2)] = num_overlap overlaps[(k1, k1)] = len(key_to_urls[k1]) if num_overlap > 0: #print('[%s][%s], overlap=%r' % (k1, k2, num_overlap)) overlap_img_list.extend(overlap_imgs) all_img_urls = list(set(ut.flatten(key_to_urls.values()))) num_all = len(all_img_urls) # NOQA print('num_all = %r' % (num_all,)) # Determine super-categories categories = ['nicks', 'scar', 'trunc'] # Force these keys into these categories key_to_cat = {'scarbite': 'other_injury'} cat_to_keys = ut.ddict(list) for key in key_to_urls.keys(): flag = 1 if key in key_to_cat: cat = key_to_cat[key] cat_to_keys[cat].append(key) continue for cat in categories: if cat in key: cat_to_keys[cat].append(key) flag = 0 if flag: cat = 'other_injury' cat_to_keys[cat].append(key) cat_urls = ut.ddict(list) for cat, keys in cat_to_keys.items(): for key in keys: cat_urls[cat].extend(key_to_urls[key]) cat_hist = {} for cat in list(cat_urls.keys()): cat_urls[cat] = list(set(cat_urls[cat])) cat_hist[cat] = len(cat_urls[cat]) print(ut.repr3(cat_to_keys)) print(ut.repr3(cat_hist)) key_to_cat = dict([(val, key) for key, vals in cat_to_keys.items() for val in vals]) #ingestset = { # '__class__': 'ImageSet', # 'images': ut.ddict(dict) #} #for key, key_imgs in keyed_images.items(): # for imgdict in key_imgs: # url = imgdict['url'] # encid = imgdict['correspondingEncounterNumber'] # # Make structure # encdict = encounters[encid] # encdict['__class__'] = 'Encounter' # imgdict = ut.delete_keys(imgdict.copy(), ['correspondingEncounterNumber']) # imgdict['__class__'] = 'Image' # cat = key_to_cat[key] # annotdict = {'relative_bbox': [.01, .01, .98, .98], 'tags': [cat, key]} # annotdict['__class__'] = 'Annotation' # # Ensure structures exist # encdict['images'] = encdict.get('images', []) # imgdict['annots'] = imgdict.get('annots', []) # # Add an image to this encounter # encdict['images'].append(imgdict) # # Add an annotation to this image # imgdict['annots'].append(annotdict) ##http://springbreak.wildbook.org/rest/org.ecocean.Encounter/1111 #get_enc_url = 'http://www.whaleshark.org/rest/org.ecocean.Encounter/%s' % (encid,) #resp = requests.get(get_enc_url) #print(ut.repr3(encdict)) #print(ut.repr3(encounters)) # Download the files to the local disk #fpath_list = all_urls = ut.unique(ut.take_column( ut.flatten( ut.dict_subset(keyed_images, ut.flatten(cat_to_keys.values())).values() ), 'url')) dldir = ut.truepath('~/tmpsharks') from os.path import commonprefix, basename # NOQA prefix = commonprefix(all_urls) suffix_list = [url_[len(prefix):] for url_ in all_urls] fname_list = [suffix.replace('/', '--') for suffix in suffix_list] fpath_list = [] for url, fname in ut.ProgIter(zip(all_urls, fname_list), lbl='downloading imgs', freq=1): fpath = ut.grab_file_url(url, download_dir=dldir, fname=fname, verbose=False) fpath_list.append(fpath) # Make sure we keep orig info #url_to_keys = ut.ddict(list) url_to_info = ut.ddict(dict) for key, imgdict_list in keyed_images.items(): for imgdict in imgdict_list: url = imgdict['url'] info = url_to_info[url] for k, v in imgdict.items(): info[k] = info.get(k, []) info[k].append(v) info['keys'] = info.get('keys', []) info['keys'].append(key) #url_to_keys[url].append(key) info_list = ut.take(url_to_info, all_urls) for info in info_list: if len(set(info['correspondingEncounterNumber'])) > 1: assert False, 'url with two different encounter nums' # Combine duplicate tags hashid_list = [ut.get_file_uuid(fpath_, stride=8) for fpath_ in ut.ProgIter(fpath_list, bs=True)] groupxs = ut.group_indices(hashid_list)[1] # Group properties by duplicate images #groupxs = [g for g in groupxs if len(g) > 1] fpath_list_ = ut.take_column(ut.apply_grouping(fpath_list, groupxs), 0) url_list_ = ut.take_column(ut.apply_grouping(all_urls, groupxs), 0) info_list_ = [ut.map_dict_vals(ut.flatten, ut.dict_accum(*info_)) for info_ in ut.apply_grouping(info_list, groupxs)] encid_list_ = [ut.unique(info_['correspondingEncounterNumber'])[0] for info_ in info_list_] keys_list_ = [ut.unique(info_['keys']) for info_ in info_list_] cats_list_ = [ut.unique(ut.take(key_to_cat, keys)) for keys in keys_list_] clist = ut.ColumnLists({ 'gpath': fpath_list_, 'url': url_list_, 'encid': encid_list_, 'key': keys_list_, 'cat': cats_list_, }) #for info_ in ut.apply_grouping(info_list, groupxs): # info = ut.dict_accum(*info_) # info = ut.map_dict_vals(ut.flatten, info) # x = ut.unique(ut.flatten(ut.dict_accum(*info_)['correspondingEncounterNumber'])) # if len(x) > 1: # info = info.copy() # del info['keys'] # print(ut.repr3(info)) flags = ut.lmap(ut.fpath_has_imgext, clist['gpath']) clist = clist.compress(flags) import ibeis ibs = ibeis.opendb('WS_Injury', allow_newdir=True) gid_list = ibs.add_images(clist['gpath']) clist['gid'] = gid_list failed_flags = ut.flag_None_items(clist['gid']) print('# failed %s' % (sum(failed_flags)),) passed_flags = ut.not_list(failed_flags) clist = clist.compress(passed_flags) ut.assert_all_not_None(clist['gid']) #ibs.get_image_uris_original(clist['gid']) ibs.set_image_uris_original(clist['gid'], clist['url'], overwrite=True) #ut.zipflat(clist['cat'], clist['key']) if False: # Can run detection instead clist['tags'] = ut.zipflat(clist['cat']) aid_list = ibs.use_images_as_annotations(clist['gid'], adjust_percent=0.01, tags_list=clist['tags']) aid_list import plottool as pt from ibeis import core_annots pt.qt4ensure() #annots = ibs.annots() #aids = [1, 2] #ibs.depc_annot.get('hog', aids , 'hog') #ibs.depc_annot.get('chip', aids, 'img') for aid in ut.InteractiveIter(ibs.get_valid_aids()): hogs = ibs.depc_annot.d.get_hog_hog([aid]) chips = ibs.depc_annot.d.get_chips_img([aid]) chip = chips[0] hogimg = core_annots.make_hog_block_image(hogs[0]) pt.clf() pt.imshow(hogimg, pnum=(1, 2, 1)) pt.imshow(chip, pnum=(1, 2, 2)) fig = pt.gcf() fig.show() fig.canvas.draw() #print(len(groupxs)) #if False: #groupxs = ut.find_duplicate_items(ut.lmap(basename, suffix_list)).values() #print(ut.repr3(ut.apply_grouping(all_urls, groupxs))) # # FIX # for fpath, fname in zip(fpath_list, fname_list): # if ut.checkpath(fpath): # ut.move(fpath, join(dirname(fpath), fname)) # print('fpath = %r' % (fpath,)) #import ibeis #from ibeis.dbio import ingest_dataset #dbdir = ibeis.sysres.lookup_dbdir('WS_ALL') #self = ingest_dataset.Ingestable2(dbdir) if False: # Show overlap matrix import plottool as pt import pandas as pd import numpy as np dict_ = overlaps s = pd.Series(dict_, index=pd.MultiIndex.from_tuples(overlaps)) df = s.unstack() lhs, rhs = df.align(df.T) df = lhs.add(rhs, fill_value=0).fillna(0) label_texts = df.columns.values def label_ticks(label_texts): import plottool as pt truncated_labels = [repr(lbl[0:100]) for lbl in label_texts] ax = pt.gca() ax.set_xticks(list(range(len(label_texts)))) ax.set_xticklabels(truncated_labels) [lbl.set_rotation(-55) for lbl in ax.get_xticklabels()] [lbl.set_horizontalalignment('left') for lbl in ax.get_xticklabels()] #xgrid, ygrid = np.meshgrid(range(len(label_texts)), range(len(label_texts))) #pt.plot_surface3d(xgrid, ygrid, disjoint_mat) ax.set_yticks(list(range(len(label_texts)))) ax.set_yticklabels(truncated_labels) [lbl.set_horizontalalignment('right') for lbl in ax.get_yticklabels()] [lbl.set_verticalalignment('center') for lbl in ax.get_yticklabels()] #[lbl.set_rotation(20) for lbl in ax.get_yticklabels()] #df = df.sort(axis=0) #df = df.sort(axis=1) sortx = np.argsort(df.sum(axis=1).values)[::-1] df = df.take(sortx, axis=0) df = df.take(sortx, axis=1) fig = pt.figure(fnum=1) fig.clf() mat = df.values.astype(np.int32) mat[np.diag_indices(len(mat))] = 0 vmax = mat[(1 - np.eye(len(mat))).astype(np.bool)].max() import matplotlib.colors norm = matplotlib.colors.Normalize(vmin=0, vmax=vmax, clip=True) pt.plt.imshow(mat, cmap='hot', norm=norm, interpolation='none') pt.plt.colorbar() pt.plt.grid('off') label_ticks(label_texts) fig.tight_layout() #overlap_df = pd.DataFrame.from_dict(overlap_img_list) class TmpImage(ut.NiceRepr): pass from skimage.feature import hog from skimage import data, color, exposure import plottool as pt image2 = color.rgb2gray(data.astronaut()) # NOQA fpath = './GOPR1120.JPG' import vtool as vt for fpath in [fpath]: """ http://scikit-image.org/docs/dev/auto_examples/plot_hog.html """ image = vt.imread(fpath, grayscale=True) image = pt.color_funcs.to_base01(image) fig = pt.figure(fnum=2) fd, hog_image = hog(image, orientations=8, pixels_per_cell=(16, 16), cells_per_block=(1, 1), visualise=True) fig, (ax1, ax2) = pt.plt.subplots(1, 2, figsize=(8, 4), sharex=True, sharey=True) ax1.axis('off') ax1.imshow(image, cmap=pt.plt.cm.gray) ax1.set_title('Input image') ax1.set_adjustable('box-forced') # Rescale histogram for better display hog_image_rescaled = exposure.rescale_intensity(hog_image, in_range=(0, 0.02)) ax2.axis('off') ax2.imshow(hog_image_rescaled, cmap=pt.plt.cm.gray) ax2.set_title('Histogram of Oriented Gradients') ax1.set_adjustable('box-forced') pt.plt.show() #for def detect_sharks(ibs, gids): #import ibeis #ibs = ibeis.opendb('WS_ALL') config = { 'algo' : 'yolo', 'sensitivity' : 0.2, 'config_filepath' : ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.cfg'), 'weight_filepath' : ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.39000.weights'), 'class_filepath' : ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.cfg.classes'), } depc = ibs.depc_image #imgsets = ibs.imagesets(text='Injured Sharks') #images = ibs.images(imgsets.gids[0]) images = ibs.images(gids) images = images.compress([ext not in ['.gif'] for ext in images.exts]) gid_list = images.gids # result is a tuple: # (score, bbox_list, theta_list, conf_list, class_list) results_list = depc.get_property('localizations', gid_list, None, config=config) results_list2 = [] multi_gids = [] failed_gids = [] #ibs.set_image_imagesettext(failed_gids, ['Fixme'] * len(failed_gids)) ibs.set_image_imagesettext(multi_gids, ['Fixme2'] * len(multi_gids)) failed_gids for gid, res in zip(gid_list, results_list): score, bbox_list, theta_list, conf_list, class_list = res if len(bbox_list) == 0: failed_gids.append(gid) elif len(bbox_list) == 1: results_list2.append((gid, bbox_list, theta_list)) elif len(bbox_list) > 1: multi_gids.append(gid) idx = conf_list.argmax() res2 = (gid, bbox_list[idx:idx + 1], theta_list[idx:idx + 1]) results_list2.append(res2) ut.dict_hist(([t[1].shape[0] for t in results_list])) localized_imgs = ibs.images(ut.take_column(results_list2, 0)) assert all([len(a) == 1 for a in localized_imgs.aids]) old_annots = ibs.annots(ut.flatten(localized_imgs.aids)) #old_tags = old_annots.case_tags # Override old bboxes import numpy as np bboxes = np.array(ut.take_column(results_list2, 1))[:, 0, :] ibs.set_annot_bboxes(old_annots.aids, bboxes) if False: import plottool as pt pt.qt4ensure() inter = pt.MultiImageInteraction( ibs.get_image_paths(ut.take_column(results_list2, 0)), bboxes_list=ut.take_column(results_list2, 1) ) inter.dump_to_disk('shark_loc', num=50, prefix='shark_loc') inter.start() inter = pt.MultiImageInteraction(ibs.get_image_paths(failed_gids)) inter.start() inter = pt.MultiImageInteraction(ibs.get_image_paths(multi_gids)) inter.start() def train_part_detector(): """ Problem: healthy sharks usually have a mostly whole body shot injured sharks usually have a close up shot. This distribution of images is likely what the injur-shark net is picking up on. The goal is to train a detector that looks for things that look like the distribution of injured sharks. We will run this on healthy sharks to find the parts of """ import ibeis ibs = ibeis.opendb('WS_ALL') imgset = ibs.imagesets(text='Injured Sharks') injured_annots = imgset.annots[0] # NOQA #config = { # 'dim_size': (224, 224), # 'resize_dim': 'wh' #} from pydarknet import Darknet_YOLO_Detector data_path = ibs.export_to_xml() output_path = join(ibs.get_cachedir(), 'training', 'localizer') ut.ensuredir(output_path) dark = Darknet_YOLO_Detector() results = dark.train(data_path, output_path) del dark localizer_weight_path, localizer_config_path, localizer_class_path = results classifier_model_path = ibs.classifier_train() labeler_model_path = ibs.labeler_train() output_path = join(ibs.get_cachedir(), 'training', 'detector') ut.ensuredir(output_path) ut.copy(localizer_weight_path, join(output_path, 'localizer.weights')) ut.copy(localizer_config_path, join(output_path, 'localizer.config')) ut.copy(localizer_class_path, join(output_path, 'localizer.classes')) ut.copy(classifier_model_path, join(output_path, 'classifier.npy')) ut.copy(labeler_model_path, join(output_path, 'labeler.npy')) # ibs.detector_train() def purge_ensure_one_annot_per_images(ibs): """ pip install Pipe """ # Purge all but one annotation images = ibs.images() #images.aids groups = images._annot_groups import numpy as np # Take all but the largest annotations per images large_masks = [ut.index_to_boolmask([np.argmax(x)], len(x)) for x in groups.bbox_area] small_masks = ut.lmap(ut.not_list, large_masks) # Remove all but the largets annotation small_aids = ut.zipcompress(groups.aid, small_masks) small_aids = ut.flatten(small_aids) # Fix any empty images images = ibs.images() empty_images = ut.where(np.array(images.num_annotations) == 0) print('empty_images = %r' % (empty_images,)) #list(map(basename, map(dirname, images.uris_original))) def VecPipe(func): import pipe @pipe.Pipe def wrapped(sequence): return map(func, sequence) #return (None if item is None else func(item) for item in sequence) return wrapped name_list = list(images.uris_original | VecPipe(dirname) | VecPipe(basename)) aids_list = images.aids ut.assert_all_eq(list(aids_list | VecPipe(len))) annots = ibs.annots(ut.flatten(aids_list)) annots.names = name_list def shark_misc(): import ibeis ibs = ibeis.opendb('WS_ALL') aid_list = ibs.get_valid_aids() flag_list = ibs.get_annot_been_adjusted(aid_list) adjusted_aids = ut.compress(aid_list, flag_list) return adjusted_aids #if False: # # TRY TO FIGURE OUT WHY URLS ARE MISSING IN STEP 1 # encounter_to_parsed1 = parsed1.group_items('encounter') # encounter_to_parsed2 = parsed2.group_items('encounter') # url_to_parsed1 = parsed1.group_items('img_url') # url_to_parsed2 = parsed2.group_items('img_url') # def set_overlap(set1, set2): # set1 = set(set1) # set2 = set(set2) # return ut.odict([ # ('s1', len(set1)), # ('s2', len(set2)), # ('isect', len(set1.intersection(set2))), # ('union', len(set1.union(set2))), # ('s1 - s2', len(set1.difference(set2))), # ('s2 - s1', len(set2.difference(set1))), # ]) # print('encounter overlap: ' + ut.repr3(set_overlap(encounter_to_parsed1, encounter_to_parsed2))) # print('url overlap: ' + ut.repr3(set_overlap(url_to_parsed1, url_to_parsed2))) # url1 = list(url_to_parsed1.keys()) # url2 = list(url_to_parsed2.keys()) # # remove common prefixes # from os.path import commonprefix, basename # NOQA # cp1 = commonprefix(url1) # cp2 = commonprefix(url2) # #suffix1 = sorted([u[len(cp1):].lower() for u in url1]) # #suffix2 = sorted([u[len(cp2):].lower() for u in url2]) # suffix1 = sorted([u[len(cp1):] for u in url1]) # suffix2 = sorted([u[len(cp2):] for u in url2]) # print('suffix overlap: ' + ut.repr3(set_overlap(suffix1, suffix2))) # set1 = set(suffix1) # set2 = set(suffix2) # only1 = list(set1 - set1.intersection(set2)) # only2 = list(set2 - set1.intersection(set2)) # import numpy as np # for suf in ut.ProgIter(only2, bs=True): # dist = np.array(ut.edit_distance(suf, only1)) # idx = ut.argsort(dist)[0:3] # if dist[idx][0] < 3: # close = ut.take(only1, idx) # print('---') # print('suf = %r' % (join(cp2, suf),)) # print('close = %s' % (ut.repr3([join(cp1, c) for c in close]),)) # print('---') # break # # Associate keywords with original images # #lower_urls = [x.lower() for x in parsed['img_url']] # url_to_idx = ut.make_index_lookup(parsed1['img_url']) # parsed1['keywords'] = [[] for _ in range(len(parsed1))] # for url, keys in url_to_keys.items(): # # hack because urls are note in the same format # url = url.replace('wildbook_data_dir', 'shepherd_data_dir') # url = url.lower() # if url in url_to_idx: # idx = url_to_idx[url] # parsed1['keywords'][idx].extend(keys) #healthy_annots = ibs.annots(ibs.imagesets(text='Non-Injured Sharks').aids[0]) #ibs.set_annot_prop('healthy', healthy_annots.aids, [True] * len(healthy_annots)) #['healthy' in t and len(t) > 0 for t in single_annots.case_tags] #healthy_tags = [] #ut.find_duplicate_items(cur_img_uuids) #ut.find_duplicate_items(new_img_uuids) #cur_uuids = set(cur_img_uuids) #new_uuids = set(new_img_uuids) #both_uuids = new_uuids.intersection(cur_uuids) #only_cur = cur_uuids - both_uuids #only_new = new_uuids - both_uuids #print('len(cur_uuids) = %r' % (len(cur_uuids))) #print('len(new_uuids) = %r' % (len(new_uuids))) #print('len(both_uuids) = %r' % (len(both_uuids))) #print('len(only_cur) = %r' % (len(only_cur))) #print('len(only_new) = %r' % (len(only_new))) # Ensure that data in both sets are syncronized #images_both = [] #if False: # print('Removing small images') # import numpy as np # import vtool as vt # imgsize_list = np.array([vt.open_image_size(gpath) for gpath in parsed['new_fpath']]) # sqrt_area_list = np.sqrt(np.prod(imgsize_list, axis=1)) # areq_flags_list = sqrt_area_list >= 750 # parsed = parsed.compress(areq_flags_list)
SU-ECE-17-7/ibeis
ibeis/scripts/getshark_old.py
Python
apache-2.0
22,458
package com.geekdos.app; import com.geekdos.midelwar.interfaces.GestionDesNotesInterface; import com.geekdos.model.*; import java.net.MalformedURLException; import java.rmi.Naming; import java.rmi.NotBoundException; import java.rmi.Remote; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.List; /** * Created by theXuser on 11/12/2016. */ public class App { public static void main(String[] args){ System.out.println("-----------------------------------------------------"); System.out.println("Bienvenue dans l'application Gestion des Notes Client"); System.out.println("-----------------------------------------------------"); Etudiant oussama = new Etudiant(); Etudiant dina = new Etudiant(); Etudiant ouail = new Etudiant(); Etudiant ayoub = new Etudiant(); Etudiant yassin = new Etudiant(); Etudiant halima = new Etudiant(); oussama.setNom("KHACHIAI");oussama.setPrenom("Oussama");oussama.setCne("1128764379");oussama.setAge(25); dina.setNom("BEN HALIMA");dina.setPrenom("Dina");dina.setCne("1228764379");dina.setAge(21); ouail.setNom("KERDAD");ouail.setPrenom("Ouail");ouail.setCne("1028764379");ouail.setAge(25); ayoub.setNom("BOUCHAREB");ayoub.setPrenom("Ayoub");ayoub.setCne("1228764380");ayoub.setAge(25); yassin.setNom("AKESBI");yassin.setPrenom("Yassin");yassin.setCne("1228764385");yassin.setAge(23); halima.setNom("BOUJRA");halima.setPrenom("Halima");halima.setCne("1228764386");halima.setAge(23); Note note1 = new Note();Note note2 = new Note(); Note note3 = new Note();Note note4 = new Note(); Note note5 = new Note();Note note6 = new Note(); Note note7 = new Note();Note note8 = new Note(); Note note9 = new Note();Note note10 = new Note(); Note note11 = new Note();Note note12 = new Note(); Note note13 = new Note();Note note14 = new Note(); note1.setName("M1");note2.setName("M2"); note3.setName("M3");note4.setName("M4"); note5.setName("M5");note6.setName("M6"); note7.setName("M7");note8.setName("M8"); note9.setName("M9");note10.setName("M10"); note11.setName("M11");note12.setName("M12"); note13.setName("M13");note14.setName("M14"); note1.setValue(18);note2.setValue(18); note3.setValue(17);note4.setValue(17); note5.setValue(15);note6.setValue(15); note7.setValue(15);note8.setValue(10); note9.setValue(10);note10.setValue(10); note11.setValue(8);note12.setValue(5); note13.setValue(12);note14.setValue(7); List<Note> oussamaNotes = new ArrayList<Note>(); List<Note> dinaNotes = new ArrayList<Note>(); List<Note> ouailNotes = new ArrayList<Note>(); List<Note> ayoubNotes = new ArrayList<Note>(); oussamaNotes.add(note1); oussamaNotes.add(note2); dinaNotes.add(note3); dinaNotes.add(note4); ouailNotes.add(note5); ouailNotes.add(note6); ayoubNotes.add(note7); ayoubNotes.add(note8); oussama.setNotes(oussamaNotes); dina.setNotes(dinaNotes); ouail.setNotes(ouailNotes); ayoub.setNotes(ayoubNotes); List<Etudiant> etudiants = new ArrayList<>(); etudiants.add(oussama); etudiants.add(dina); etudiants.add(ouail); etudiants.add(ayoub); try { Remote gestionDesNotes = Naming.lookup("rmi://169.254.12.27/GestionDesNotes"); ((GestionDesNotesInterface) gestionDesNotes).setLes_etudiants(etudiants); String messsageOussama = "La moyenne des note de : "+ oussama.getNom() +" "+ oussama.getPrenom(); messsageOussama += " Qui porte le CNE "+oussama.getCne(); messsageOussama += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("KHACHIAI"); String messsageOuail = "La moyenne des note de : "+ ouail.getNom() +" "+ ouail.getPrenom(); messsageOuail += " Qui porte le CNE "+ouail.getCne(); messsageOuail += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("KERDAD"); String messsageDina = "La moyenne des note de : "+ dina.getNom() +" "+ dina.getPrenom(); messsageDina += " Qui porte le CNE "+dina.getCne(); messsageDina += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("BEN HALIMA"); String messsageAyoub = "La moyenne des note de : "+ ayoub.getNom() +" "+ ayoub.getPrenom(); messsageAyoub += " Qui porte le CNE "+ayoub.getCne(); messsageAyoub += " est : "+ ((GestionDesNotesInterface) gestionDesNotes).getNote("BOUCHAREB"); /** * Pour affichier la liste des étudiants qui sont enregistrer avec leur note du moyenne */ System.out.println("-----------------------------------------------------"); System.out.println("La liste des étudiants enregistrer"); System.out.println("-----------------------------------------------------"); System.out.println(messsageOussama); System.out.println(messsageDina); System.out.println(messsageOuail); System.out.println(messsageAyoub); System.out.println("-----------------------------------------------------"); System.out.println("L'etudiant magoron est:"); System.out.println("-----------------------------------------------------"); String nomMagoron = ((GestionDesNotesInterface) gestionDesNotes).getMajoran(etudiants).getNom(); System.out.println("L'etudiant Majoran est : "+nomMagoron); /**------------------------------- * Pour affichier la liste des étudiants qu'en validé les module avec un moyenne sup a 12 */ System.out.println("-----------------------------------------------------"); System.out.println("La liste des étudiant qu'en valider les modules"); System.out.println("-----------------------------------------------------"); for (Etudiant etudiant: ((GestionDesNotesInterface) gestionDesNotes).getvalidation()) { String message = "L'étudiant : "+etudiant.getNom(); message += " à validé les modules "; for (int i = 0; i < etudiant.getNotes().size() ;i++) { message += "<<" + etudiant.getNotes().get(i).getName(); message += ", " + etudiant.getNotes().get(i).getValue()+">> "; } System.out.println(message); System.out.println("-----------------------------------------------------"); } /** * Pour affichier la liste des étudiants qu'en un ratrapage dans des module avec un 12 < moyenne >= 7 */ System.out.println("-----------------------------------------------------"); System.out.println("La liste des étudiant qu'en ratrapage"); System.out.println("-----------------------------------------------------"); for (Etudiant etudiant: ((GestionDesNotesInterface) gestionDesNotes).getRat()) { String message = "L'étudiant : "+etudiant.getNom(); message += " à un ratrapage dans les modules "; for (int i = 0; i < etudiant.getNotes().size() ;i++) { message += "<<" + etudiant.getNotes().get(i).getName(); message += ", " + etudiant.getNotes().get(i).getValue()+">> "; } System.out.println(message); System.out.println("-----------------------------------------------------"); } /** * Pour affichier la liste des étudiants qu'en un non validé dans des module avec un moyenne < 7 */ System.out.println("-----------------------------------------------------"); System.out.println("La liste des étudiant qu'en non validé"); System.out.println("-----------------------------------------------------"); for (Etudiant etudiant: ((GestionDesNotesInterface) gestionDesNotes).getNonValidation()) { String message = "L'étudiant : "+etudiant.getNom(); message += " à eu une non validé dans les modules "; for (int i = 0; i < etudiant.getNotes().size() ;i++) { message += "<<" + etudiant.getNotes().get(i).getName(); message += ", " + etudiant.getNotes().get(i).getValue()+">> "; } System.out.println(message); System.out.println("-----------------------------------------------------"); } } catch (NotBoundException e) { e.printStackTrace(); } catch (MalformedURLException e) { e.printStackTrace(); } catch (RemoteException e) { e.printStackTrace(); } } }
geekdos/Personal_Labs
MidelwarLab/GestionDesNotes/src/com/geekdos/app/App.java
Java
apache-2.0
9,128
/// Refly License /// /// Copyright (c) 2004 Jonathan de Halleux, http://www.dotnetwiki.org /// /// This software is provided 'as-is', without any express or implied warranty. /// In no event will the authors be held liable for any damages arising from /// the use of this software. /// /// Permission is granted to anyone to use this software for any purpose, /// including commercial applications, and to alter it and redistribute it /// freely, subject to the following restrictions: /// /// 1. The origin of this software must not be misrepresented; /// you must not claim that you wrote the original software. /// If you use this software in a product, an acknowledgment in the product /// documentation would be appreciated but is not required. /// /// 2. Altered source versions must be plainly marked as such, /// and must not be misrepresented as being the original software. /// ///3. This notice may not be removed or altered from any source distribution. using System; using System.CodeDom; namespace Refly.CodeDom { using Refly.CodeDom.Expressions; /// <summary> /// A field declaration /// </summary> public class FieldDeclaration : MemberDeclaration { private ITypeDeclaration type; private Expression initExpression = null; internal FieldDeclaration(string name, Declaration declaringType, ITypeDeclaration type) :base(name,declaringType) { if (type==null) throw new ArgumentNullException("type"); this.type = type; } public ITypeDeclaration Type { get { return this.type; } } public Expression InitExpression { get { return this.initExpression; } set { this.initExpression = value; } } public override CodeTypeMember ToCodeDom() { CodeMemberField f = new CodeMemberField( this.Type.TypeReference, this.Name ); if (this.initExpression!=null) { f.InitExpression = this.initExpression.ToCodeDom(); } // comments base.ToCodeDom(f); return f; } } }
Gallio/mbunit-v2
src/refly/Refly/CodeDom/FieldDeclaration.cs
C#
apache-2.0
2,091
<?php class Graphite_Relation extends Graphite_Resource { function nodeType() { return "#relation"; } }
lyndonnixon/annotationtool
libraries/graphite/Graphite/Relation.php
PHP
apache-2.0
106
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.scenariosimulation.backend.runner; import java.util.List; import org.drools.scenariosimulation.api.model.ScenarioWithIndex; import org.drools.scenariosimulation.api.model.Simulation; import org.drools.scenariosimulation.api.model.SimulationDescriptor; import org.drools.scenariosimulation.backend.expression.DMNFeelExpressionEvaluator; import org.kie.api.runtime.KieContainer; public class DMNScenarioRunner extends AbstractScenarioRunner { public DMNScenarioRunner(KieContainer kieContainer, Simulation simulation) { this(kieContainer, simulation, null); } public DMNScenarioRunner(KieContainer kieContainer, Simulation simulation, String fileName) { this(kieContainer, simulation.getSimulationDescriptor(), simulation.getScenarioWithIndex(), fileName); } public DMNScenarioRunner(KieContainer kieContainer, SimulationDescriptor simulationDescriptor, List<ScenarioWithIndex> scenarios) { this(kieContainer, simulationDescriptor, scenarios, null); } public DMNScenarioRunner(KieContainer kieContainer, SimulationDescriptor simulationDescriptor, List<ScenarioWithIndex> scenarios, String fileName) { super(kieContainer, simulationDescriptor, scenarios, fileName, DMNFeelExpressionEvaluator::new); } @Override protected AbstractRunnerHelper newRunnerHelper() { return new DMNScenarioRunnerHelper(); } }
etirelli/drools
drools-scenario-simulation/drools-scenario-simulation-backend/src/main/java/org/drools/scenariosimulation/backend/runner/DMNScenarioRunner.java
Java
apache-2.0
2,033
// Copyright 2017 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.testing; import static com.google.common.base.Preconditions.checkState; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.annotation.Nullable; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; /** * JUnit extension for overriding {@code private static} fields during a test. * * <p>This extension uses reflection to change the value of a field while your test is running and * then restore it to its original value after it's done (even if the test fails). The injection * will work even if the field is marked {@code private} (but not if it's {@code final}). The * downside is that if you rename the field in the future, IDE refactoring won't be smart enough to * update the injection site. * * <p>We encourage you to consider using {@link google.registry.util.NonFinalForTesting * &#064;NonFinalForTesting} to document your injected fields. * * <p>This class is a horrible evil hack, but it alleviates you of the toil of having to break * encapsulation by making your fields non-{@code private}, using the {@link * com.google.common.annotations.VisibleForTesting &#064;VisibleForTesting} annotation to document * why you've reduced visibility, creating a temporary field to store the old value, and then * writing an {@link org.junit.After &#064;After} method to restore it. So sometimes it feels good * to be evil; but hopefully one day we'll be able to delete this class and do things * <i>properly</i> with <a href="http://square.github.io/dagger/">Dagger</a> dependency injection. * * <p>You use this class in by declaring it as an {@link * org.junit.jupiter.api.extension.RegisterExtension &#064;RegisterExtension} field and then call * {@link #setStaticField} from either your {@link org.junit.jupiter.api.Test &#064;Test} or {@link * org.junit.jupiter.api.BeforeEach &#064;BeforeEach} methods. For example: * * <pre> * // Doomsday.java * public class Doomsday { * * private static Clock clock = new SystemClock(); * * public long getTime() { * return clock.currentTimeMillis(); * } * } * * // DoomsdayTest.java * public class DoomsdayTest { * * &#064;RegisterExtension * public InjectExtension inject = new InjectExtension(); * * private final FakeClock clock = new FakeClock(); * * &#064;BeforeEach * public void beforeEach() { * inject.setStaticField(Doomsday.class, "clock", clock); * } * * &#064;Test * public void test() { * clock.advanceBy(666L); * Doomsday doom = new Doomsday(); * assertEquals(666L, doom.getTime()); * } * } * </pre> * * @see google.registry.util.NonFinalForTesting */ public class InjectExtension implements AfterEachCallback, BeforeEachCallback { private static class Change { private final Field field; @Nullable private Object oldValue; @Nullable private final Object newValue; private boolean active; Change(Field field, @Nullable Object oldValue, @Nullable Object newValue, boolean active) { this.field = field; this.oldValue = oldValue; this.newValue = newValue; this.active = active; } } private final List<Change> changes = new ArrayList<>(); private final Set<Field> injected = new HashSet<>(); /** Adds the specified field override to those set by the extension. */ public InjectExtension withStaticFieldOverride( Class<?> clazz, String fieldName, @Nullable Object newValue) { changes.add(new Change(getField(clazz, fieldName), null, newValue, false)); return this; } /** * Sets a static field and be restores its current value after the test completes. * * <p>Prefer to use withStaticFieldOverride(), which is more consistent with the extension * pattern. * * <p>The field is allowed to be {@code private}, but it most not be {@code final}. * * <p>This method may be called either from either your {@link * org.junit.jupiter.api.BeforeEach @BeforeEach} method or from the {@link * org.junit.jupiter.api.Test @Test} method itself. However you may not inject the same field * multiple times during the same test. * * @throws IllegalArgumentException if the static field could not be found or modified. * @throws IllegalStateException if the field has already been injected during this test. */ public void setStaticField(Class<?> clazz, String fieldName, @Nullable Object newValue) { Field field = getField(clazz, fieldName); Change change = new Change(field, null, newValue, true); activateChange(change); changes.add(change); injected.add(field); } @Override public void beforeEach(ExtensionContext context) { for (Change change : changes) { if (!change.active) { activateChange(change); } } } @Override public void afterEach(ExtensionContext context) { RuntimeException thrown = null; for (Change change : changes) { if (change.active) { try { checkState( change.field.get(null).equals(change.newValue), "Static field value was changed post-injection: %s.%s", change.field.getDeclaringClass().getSimpleName(), change.field.getName()); change.field.set(null, change.oldValue); } catch (IllegalArgumentException | IllegalStateException | IllegalAccessException e) { if (thrown == null) { thrown = new RuntimeException(e); } else { thrown.addSuppressed(e); } } } } changes.clear(); injected.clear(); if (thrown != null) { throw thrown; } } private Field getField(Class<?> clazz, String fieldName) { try { return clazz.getDeclaredField(fieldName); } catch (SecurityException | NoSuchFieldException e) { throw new IllegalArgumentException( String.format("Static field not found: %s.%s", clazz.getSimpleName(), fieldName), e); } } private void activateChange(Change change) { Class<?> clazz = change.field.getDeclaringClass(); try { change.field.setAccessible(true); change.oldValue = change.field.get(null); } catch (IllegalArgumentException | IllegalAccessException e) { throw new IllegalArgumentException( String.format( "Static field not gettable: %s.%s", clazz.getSimpleName(), change.field.getName()), e); } checkState( !injected.contains(change.field), "Static field already injected: %s.%s", clazz.getSimpleName(), change.field.getName()); try { change.field.set(null, change.newValue); } catch (IllegalArgumentException | IllegalAccessException e) { throw new IllegalArgumentException( String.format( "Static field not settable: %s.%s", clazz.getSimpleName(), change.field.getName()), e); } change.active = true; } }
google/nomulus
core/src/test/java/google/registry/testing/InjectExtension.java
Java
apache-2.0
7,738
(function() { 'use strict'; // set up margins var el = d3.select('.geomap'), elWidth = parseInt(el.style('width'), 10), elHeight = parseInt(el.style('height'), 10), margin = {top: 20, right: 20, bottom: 30, left: 50}, width = elWidth - margin.left - margin.right, height = elHeight - margin.top - margin.bottom; // create svg element var svg = el.append("svg") .attr("width", elWidth) .attr("height", elHeight) .append("g") .attr('transform', 'translate(' + margin.left + "," + margin.top + ')'); d3.json("/data/us-states.json", function(error, data) { visualize(data); }); function visualize(data) { // code here } }());
victormejia/d3-workshop-playground
modules/geomapping/geomap.js
JavaScript
apache-2.0
703
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.types; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.table.catalog.ObjectIdentifier; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.BigIntType; import org.apache.flink.table.types.logical.BinaryType; import org.apache.flink.table.types.logical.BooleanType; import org.apache.flink.table.types.logical.CharType; import org.apache.flink.table.types.logical.DateType; import org.apache.flink.table.types.logical.DayTimeIntervalType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.DistinctType; import org.apache.flink.table.types.logical.DoubleType; import org.apache.flink.table.types.logical.FloatType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.MapType; import org.apache.flink.table.types.logical.MultisetType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.RowType.RowField; import org.apache.flink.table.types.logical.SmallIntType; import org.apache.flink.table.types.logical.StructuredType; import org.apache.flink.table.types.logical.TimeType; import org.apache.flink.table.types.logical.TimestampKind; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.TinyIntType; import org.apache.flink.table.types.logical.TypeInformationRawType; import org.apache.flink.table.types.logical.VarBinaryType; import org.apache.flink.table.types.logical.VarCharType; import org.apache.flink.table.types.logical.YearMonthIntervalType; import org.apache.flink.table.types.logical.ZonedTimestampType; import org.apache.flink.table.types.logical.utils.LogicalTypeCasts; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import static org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsAvoidingCast; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** Tests for {@link LogicalTypeCasts#supportsAvoidingCast(LogicalType, LogicalType)}. */ @RunWith(Parameterized.class) public class LogicalTypeCastAvoidanceTest { @Parameters(name = "{index}: [{0} COMPATIBLE {1} => {2}") public static List<Object[]> testData() { return Arrays.asList( new Object[][] { {new CharType(), new CharType(5), false}, {new VarCharType(30), new VarCharType(10), false}, {new VarCharType(10), new VarCharType(30), true}, {new CharType(10), new VarCharType(30), true}, {new BinaryType(10), new VarBinaryType(30), true}, {new CharType(false, 10), new VarCharType(30), true}, {new BinaryType(false, 10), new VarBinaryType(30), true}, {new VarCharType(30), new CharType(10), false}, {new VarBinaryType(30), new BinaryType(10), false}, {new BooleanType(), new BooleanType(false), false}, {new BinaryType(10), new BinaryType(30), false}, {new VarBinaryType(10), new VarBinaryType(30), true}, {new VarBinaryType(30), new VarBinaryType(10), false}, {new DecimalType(), new DecimalType(10, 2), false}, {new TinyIntType(), new TinyIntType(false), false}, {new SmallIntType(), new SmallIntType(false), false}, {new IntType(), new IntType(false), false}, {new IntType(false), new IntType(), true}, {new BigIntType(), new BigIntType(false), false}, {new FloatType(), new FloatType(false), false}, {new DoubleType(), new DoubleType(false), false}, {new DateType(), new DateType(false), false}, {new TimeType(), new TimeType(9), false}, {new TimestampType(9), new TimestampType(3), false}, {new ZonedTimestampType(9), new ZonedTimestampType(3), false}, { new ZonedTimestampType(false, TimestampKind.ROWTIME, 9), new ZonedTimestampType(3), false }, { new YearMonthIntervalType( YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH, 2), new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.MONTH), false }, { new DayTimeIntervalType( DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND, 2, 6), new DayTimeIntervalType( DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND, 2, 7), false }, { new ArrayType(new TimestampType()), new ArrayType(new SmallIntType()), false, }, { new MultisetType(new TimestampType()), new MultisetType(new SmallIntType()), false }, { new MapType(new VarCharType(10), new TimestampType()), new MapType(new VarCharType(30), new TimestampType()), true }, { new MapType(new VarCharType(30), new TimestampType()), new MapType(new VarCharType(10), new TimestampType()), false }, { new RowType( Arrays.asList( new RowType.RowField("a", new VarCharType()), new RowType.RowField("b", new VarCharType()), new RowType.RowField("c", new VarCharType()), new RowType.RowField("d", new TimestampType()))), new RowType( Arrays.asList( new RowType.RowField("_a", new VarCharType()), new RowType.RowField("_b", new VarCharType()), new RowType.RowField("_c", new VarCharType()), new RowType.RowField("_d", new TimestampType()))), // field name doesn't matter true }, { new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new VarCharType()))), new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new BooleanType()))), false }, { new ArrayType( new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType())))), new ArrayType( new RowType( Arrays.asList( new RowField("f3", new IntType()), new RowField("f4", new IntType())))), true }, { new MapType( new IntType(), new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType())))), new MapType( new IntType(), new RowType( Arrays.asList( new RowField("f3", new IntType()), new RowField("f4", new IntType())))), true }, { new MultisetType( new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType())))), new MultisetType( new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType())))), true }, { new TypeInformationRawType<>(Types.GENERIC(LogicalTypesTest.class)), new TypeInformationRawType<>(Types.GENERIC(Object.class)), false }, { createUserType("User", new IntType(), new VarCharType()), createUserType("User", new IntType(), new VarCharType()), true }, { createUserType("User", new IntType(), new VarCharType()), createUserType("User2", new IntType(), new VarCharType()), false }, { createDistinctType("Money", new DecimalType(10, 2)), createDistinctType("Money", new DecimalType(10, 2)), true }, { createDistinctType("Money", new DecimalType(10, 2)), createDistinctType("Money2", new DecimalType(10, 2)), true }, // row and structure type { RowType.of(new IntType(), new VarCharType()), createUserType("User2", new IntType(), new VarCharType()), true }, { RowType.of(new BigIntType(), new VarCharType()), createUserType("User2", new IntType(), new VarCharType()), false }, { createUserType("User2", new IntType(), new VarCharType()), RowType.of(new IntType(), new VarCharType()), true }, { createUserType("User2", new IntType(), new VarCharType()), RowType.of(new BigIntType(), new VarCharType()), false }, }); } @Parameter public LogicalType sourceType; @Parameter(1) public LogicalType targetType; @Parameter(2) public boolean equals; @Test public void testSupportsAvoidingCast() { assertThat(supportsAvoidingCast(sourceType, targetType), equalTo(equals)); assertTrue(supportsAvoidingCast(sourceType, sourceType.copy())); assertTrue(supportsAvoidingCast(targetType, targetType.copy())); } private static DistinctType createDistinctType(String name, LogicalType sourceType) { return DistinctType.newBuilder(ObjectIdentifier.of("cat", "db", name), sourceType) .description("Money type desc.") .build(); } private static StructuredType createUserType(String name, LogicalType... children) { return StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", name), User.class) .attributes( Arrays.stream(children) .map(lt -> new StructuredType.StructuredAttribute("field", lt)) .collect(Collectors.toList())) .description("User type desc.") .setFinal(true) .setInstantiable(true) .build(); } private static final class User { public int setting; } }
clarkyzl/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/LogicalTypeCastAvoidanceTest.java
Java
apache-2.0
14,234
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.mqtt; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dna.mqtt.wso2.QOSLevel; import org.wso2.andes.kernel.*; import org.wso2.andes.mqtt.utils.MQTTUtils; import org.wso2.andes.subscription.OutboundSubscription; import java.nio.ByteBuffer; import java.util.UUID; /** * Cluster wide subscriptions relevant per topic will be maintained through this class * Per topic there will be only one subscription just on indicate that the subscription rely on the specific node * Each time a message is published to a specific node the Andes kernal will call this subscription object * The subscriber will contain a reference to the relevant bridge connection where the bridge will notify the protocol * engine to inform the relevant subscriptions which are channel bound */ public class MQTTLocalSubscription implements OutboundSubscription { //Will log the flows in relevant for this class private static Log log = LogFactory.getLog(MQTTLocalSubscription.class); //The reference to the bridge object private MQTTopicManager mqqtServerChannel; //Will store the MQTT channel id private String mqttSubscriptionID; //Will set unique uuid as the channel of the subscription this will be used to track the delivery of messages private UUID channelID; //The QOS level the subscription is bound to private int subscriberQOS; private String wildcardDestination; //keep if the underlying subscription is active private boolean isActive; /** * Track messages sent as retained messages */ private ConcurrentTrackingList<Long> retainedMessageList = new ConcurrentTrackingList<Long>(); /** * Will allow retrieval of the qos the subscription is bound to * * @return the level of qos the subscription is bound to */ public int getSubscriberQOS() { return subscriberQOS; } /** * Will specify the level of the qos the subscription is bound to * * @param subscriberQOS the qos could be either 0,1 or 2 */ public void setSubscriberQOS(int subscriberQOS) { this.subscriberQOS = subscriberQOS; } /** * Retrieval of the subscription id * * @return the id of the subscriber */ public String getMqttSubscriptionID() { return mqttSubscriptionID; } /** * Sets an id to the subscriber which will be unique * * @param mqttSubscriptionID the unique id of the subscriber */ public void setMqttSubscriptionID(String mqttSubscriptionID) { this.mqttSubscriptionID = mqttSubscriptionID; } /** * The relevant subscription will be registered * * @param channelID ID of the underlying subscription channel * @param isActive true if subscription is active (TCP connection is live) */ public MQTTLocalSubscription(String wildCardDestination, UUID channelID, boolean isActive) { this.channelID = channelID; this.isActive = isActive; this.wildcardDestination = wildCardDestination; } /** * Will set the server channel that will maintain the connectivity between the mqtt protocol realm * * @param mqqtServerChannel the bridge connection that will be maintained between the protocol and andes */ public void setMqqtServerChannel(MQTTopicManager mqqtServerChannel) { this.mqqtServerChannel = mqqtServerChannel; } /** * {@inheritDoc} */ @Override public boolean sendMessageToSubscriber(ProtocolMessage protocolMessage, AndesContent content) throws AndesException { boolean sendSuccess; DeliverableAndesMetadata messageMetadata = protocolMessage.getMessage(); if(messageMetadata.isRetain()) { recordRetainedMessage(messageMetadata.getMessageID()); } //Should get the message from the list ByteBuffer message = MQTTUtils.getContentFromMetaInformation(content); //Will publish the message to the respective queue if (null != mqqtServerChannel) { try { //TODO:review - instead of getSubscribedDestination() used message destination mqqtServerChannel.distributeMessageToSubscriber(wildcardDestination, message, messageMetadata.getMessageID(), messageMetadata.getQosLevel(), messageMetadata.isPersistent(), getMqttSubscriptionID(), getSubscriberQOS(), messageMetadata); //We will indicate the ack to the kernel at this stage //For MQTT QOS 0 we do not get ack from subscriber, hence will be implicitly creating an ack if (QOSLevel.AT_MOST_ONCE.getValue() == getSubscriberQOS() || QOSLevel.AT_MOST_ONCE.getValue() == messageMetadata.getQosLevel()) { mqqtServerChannel.implicitAck(messageMetadata.getMessageID(), getChannelID()); } sendSuccess = true; } catch (MQTTException e) { final String error = "Error occurred while delivering message to the subscriber for message :" + messageMetadata.getMessageID(); log.error(error, e); throw new AndesException(error, e); } } else { sendSuccess = false; } return sendSuccess; } /** * Record the given message ID as a retained message in the trcker. * * @param messageID * Message ID of the retained message */ public void recordRetainedMessage(long messageID) { retainedMessageList.add(messageID); } @Override public boolean isActive() { return true; } @Override public UUID getChannelID() { return channelID != null ? channelID : null; } //TODO: decide how to call this public void ackReceived(long messageID) { // Remove if received acknowledgment message id contains in retained message list. retainedMessageList.remove(messageID); } }
ThilankaBowala/andes
modules/andes-core/broker/src/main/java/org/wso2/andes/mqtt/MQTTLocalSubscription.java
Java
apache-2.0
6,839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.co.yahoo.dataplatform.mds.stats; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.Arguments; import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.params.provider.Arguments.arguments; public class TestSpreadSummaryStats { @Test public void T_newInstance_1(){ SpreadSummaryStats stats = new SpreadSummaryStats(); assertEquals( 0 , stats.getLineCount() ); SummaryStats summary = stats.getSummaryStats(); assertEquals( 0 , summary.getRowCount() ); assertEquals( 0 , summary.getRawDataSize() ); assertEquals( 0 , summary.getRealDataSize() ); } @Test public void T_newInstance_2(){ SpreadSummaryStats stats = new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ); assertEquals( 5 , stats.getLineCount() ); SummaryStats summary = stats.getSummaryStats(); assertEquals( 10 , summary.getRowCount() ); assertEquals( 100 , summary.getRawDataSize() ); assertEquals( 50 , summary.getRealDataSize() ); System.out.println( stats.toString() ); } @Test public void T_merge_1(){ SpreadSummaryStats stats = new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ); stats.merge( new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ) ); assertEquals( 10 , stats.getLineCount() ); SummaryStats summary = stats.getSummaryStats(); assertEquals( 20 , summary.getRowCount() ); assertEquals( 200 , summary.getRawDataSize() ); assertEquals( 100 , summary.getRealDataSize() ); } @Test public void T_merge_2(){ SpreadSummaryStats stats = new SpreadSummaryStats(); stats.merge( new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ) ); assertEquals( 5 , stats.getLineCount() ); SummaryStats summary = stats.getSummaryStats(); assertEquals( 10 , summary.getRowCount() ); assertEquals( 100 , summary.getRawDataSize() ); assertEquals( 50 , summary.getRealDataSize() ); } @Test public void T_average_1(){ SpreadSummaryStats stats = new SpreadSummaryStats( 5 , new SummaryStats( 10 , 100 , 50 , 100 , 10 ) ); assertEquals( 5 , stats.getLineCount() ); SummaryStats summary = stats.getSummaryStats(); assertEquals( 10 , summary.getRowCount() ); assertEquals( 100 , summary.getRawDataSize() ); assertEquals( 50 , summary.getRealDataSize() ); assertEquals( (double)20 , stats.getAverageRecordSize() ); assertEquals( (double)10 , stats.getAverageRecordRealSize() ); assertEquals( (double)2 , stats.getAverageRecordPerField() ); } }
yahoojapan/multiple-dimension-spread
src/common/src/test/java/jp/co/yahoo/dataplatform/mds/stats/TestSpreadSummaryStats.java
Java
apache-2.0
3,580
/* * Copyright 2013-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.netflix.rx; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.TestRestTemplate; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import rx.Single; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * Tests the {@link SingleReturnValueHandler} class. * * @author Spencer Gibb * @author Jakub Narloch */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = SingleReturnValueHandlerTest.Application.class) @WebAppConfiguration @IntegrationTest({"server.port=0"}) @DirtiesContext public class SingleReturnValueHandlerTest { @Value("${local.server.port}") private int port = 0; private TestRestTemplate restTemplate = new TestRestTemplate(); @Configuration @EnableAutoConfiguration @RestController protected static class Application { // tag::rx_single[] @RequestMapping(method = RequestMethod.GET, value = "/single") public Single<String> single() { return Single.just("single value"); } @RequestMapping(method = RequestMethod.GET, value = "/singleWithResponse") public ResponseEntity<Single<String>> singleWithResponse() { return new ResponseEntity<>(Single.just("single value"), HttpStatus.NOT_FOUND); } @RequestMapping(method = RequestMethod.GET, value = "/throw") public Single<Object> error() { return Single.error(new RuntimeException("Unexpected")); } // end::rx_single[] } @Test public void shouldRetrieveSingleValue() { // when ResponseEntity<String> response = restTemplate.getForEntity(path("/single"), String.class); // then assertNotNull(response); assertEquals(HttpStatus.OK, response.getStatusCode()); assertEquals("single value", response.getBody()); } @Test public void shouldRetrieveSingleValueWithStatusCode() { // when ResponseEntity<String> response = restTemplate.getForEntity(path("/singleWithResponse"), String.class); // then assertNotNull(response); assertEquals(HttpStatus.NOT_FOUND, response.getStatusCode()); assertEquals("single value", response.getBody()); } @Test public void shouldRetrieveErrorResponse() { // when ResponseEntity<Object> response = restTemplate.getForEntity(path("/throw"), Object.class); // then assertNotNull(response); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, response.getStatusCode()); } private String path(String context) { return String.format("http://localhost:%d%s", port, context); } }
daniellavoie/spring-cloud-netflix
spring-cloud-netflix-core/src/test/java/org/springframework/cloud/netflix/rx/SingleReturnValueHandlerTest.java
Java
apache-2.0
4,114
<?php if (__FILE__ == $_SERVER['SCRIPT_FILENAME']) { die(); } extract(itx_get_option('header'));?> <div id="itx-header"> <h3>Custom Header</h3> <p><b>Preview</b>: <small>(might not accurate)</small></p> <p>The dotted line indicates the scope. (defined in: header options item number 6).</p> <div class="outerwrap"> <?php itx_header()?> </div> <div class="form-table"> <h4>1. Header Background:</h4> <p>You can set the header background using <a href="themes.php?page=custom-header">WordPress Custom Header</a> <?php if (function_exists('register_default_headers')){ echo 'or using an image already uploaded somewhere:'; }else{ echo 'or you can use predefined header images:<br>'; $headbg=array(); foreach (itx_setting('head_bg') as $k=>$v){ $headbg[$k]=$v['description']; } if (is_array($headbg)) itx_form_radios($headbg+array(''=>'or Custom Image specified below:'), 'header[head_bg]', $head_bg); } ?> </p> <div class="radiopad"> <input type="text" name="<?php echo get_stylesheet();?>_header[image]" value="<?php echo $image;?>" size="70" /><br /> Enter the image's url of the header background (Don't forget the http://). Use it if only you have been uploaded the image somewhere, otherwise use <a href="themes.php?page=custom-header">WordPress Custom Header</a>. </div> <p>FYI: If you're using <a href="themes.php?page=custom-header">WordPress Custom Header</a> you have to crop the header image in certain dimension. The x value is the same as wrapper width that have been set in <a href="#itx-layout">Layout Options</a> (equals to max width in fluid layout or equals to wrapper width in fixed layout). The y value is default to 200 px. You can change the y value here: <input type="text" name="<?php echo get_stylesheet();?>_header[bg_height]" value="<?php echo $bg_height;?>" size="4" >px </p> <h4>2. Show in header:</h4> <?php itx_form_radios(array('Text Header (clickable text)','Image Header (clickable logo) :'), 'header[head_type]', $head_type); ?> <div class="radiopad"> <input type="text" name="<?php echo get_stylesheet();?>_header[logo]" value="<?php echo $logo;?>" size="70" /><br /> Enter the logo url. Don't forget the http:// </div> <h4>3. Repeat The Background Image</h4> <?php itx_form_radios(array('no-repeat'=>'none','repeat-x'=>'Repeat Horizontally','repeat-y'=>'Repeat Vertically','repeat'=>'Repeat Horizontally and Vertically'), 'header[repeat]', $repeat); ?> <h4>4. Background Image Horizontal Alignment</h4> if option 3A selected<br /> <?php itx_form_radios(array('left'=>'Left','center'=>'Center','right'=>'Right'), 'header[h_align]', $h_align); ?> <h4>5. Background Image Vertical Alignment</h4> if option 3A selected<br /> <?php itx_form_radios(array('top'=>'Top','center'=>'Center','bottom'=>'Bottom'), 'header[v_align]', $v_align); ?> <h4>6. Background Image Scope</h4> <?php $wrapper=itx_get_option('layout'); if ($wrapper['wrapping']=='fixed') $wrapping=$wrapper['wrap'].'px'; else $wrapping='98% + margin'; itx_form_radios(array('As width as wrapper width ('.$wrapping.')','Full Width'), 'header[scope]', $scope); ?> <h4>7. Text/logo alignment</h4> <?php itx_form_radios(array('left'=>'Left','center'=>'Center','right'=>'Right'), 'header[text_align]', $text_align); ?> <h4>Colors and Sizes</h4> <table> <tr> <td>Header height</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[height]" value="<?php echo $height;?>" size="9" > size in pt,px,em,etc. <br> Set it empty to make the size follows the normal flow.</td> </tr> <tr> <td>Blog Title font size</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[font_size]" value="<?php echo $font_size;?>" size="9" > size in pt,px,em,etc. </td> </tr> <tr> <td>Tagline font size</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[span_font_size]" value="<?php echo $span_font_size;?>" size="9" ></td> </tr> <tr> <td>Header background color</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[bgcolor]" value="<?php echo $bgcolor;?>" size="9" > <br> tip: use <em>transparent</em> to make it same as body background</td> </tr> <tr> <td>Blog Title color</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[color]" value="<?php echo $color;?>" size="9" > <br> tip: you may use <em>black</em> instead of <em>#000000</em></td> </tr> <tr> <td>Blog Title hover color</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[hover_color]" value="<?php echo $hover_color;?>" size="9" ></td> </tr> <tr> <td>Tagline color</td> <td><input type="text" name="<?php echo get_stylesheet();?>_header[span_color]" value="<?php echo $span_color;?>" size="9" ></td> </tr> </table> </div> <div class="clear"></div> <?php do_action('itx_admin_header');?> <button type="submit" name="<?php echo get_stylesheet();?>_reset" class="button-secondary" value="header" onclick="if (!confirm('Do you want to reset Header Options to Default?')) return false;">Reset to default Header Options</button> </div>
wangjingfei/now-code
php/wp-content/themes/bombax/admin/template/header.php
PHP
apache-2.0
5,125
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.gamelift.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.gamelift.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * UpdateGameSessionRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class UpdateGameSessionRequestProtocolMarshaller implements Marshaller<Request<UpdateGameSessionRequest>, UpdateGameSessionRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true).operationIdentifier("GameLift.UpdateGameSession") .serviceName("AmazonGameLift").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public UpdateGameSessionRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<UpdateGameSessionRequest> marshall(UpdateGameSessionRequest updateGameSessionRequest) { if (updateGameSessionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<UpdateGameSessionRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, updateGameSessionRequest); protocolMarshaller.startMarshalling(); UpdateGameSessionRequestMarshaller.getInstance().marshall(updateGameSessionRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
aws/aws-sdk-java
aws-java-sdk-gamelift/src/main/java/com/amazonaws/services/gamelift/model/transform/UpdateGameSessionRequestProtocolMarshaller.java
Java
apache-2.0
2,719
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #include "producerconsumer.h" namespace vespalib { Consumer::Consumer(uint32_t maxQueue, bool inverse) : _queue(NULL, maxQueue), _inverse(inverse), _operations(0) { } Consumer::~Consumer() { } Producer::Producer(uint32_t cnt, Consumer &target) : _target(target), _cnt(cnt), _operations(0) { } Producer::~Producer() { } ProducerConsumer::ProducerConsumer(uint32_t cnt, bool inverse) : _cnt(cnt), _inverse(inverse), _operationsConsumed(0), _operationsProduced(0) { } ProducerConsumer::~ProducerConsumer() { } void Consumer::Run(FastOS_ThreadInterface *, void *) { for (;;) { MemList ml = _queue.dequeue(); if (ml == NULL) { return; } if (_inverse) { for (uint32_t i = ml->size(); i > 0; --i) { consume((*ml)[i - 1]); _operations++; } } else { for (uint32_t i = 0; i < ml->size(); ++i) { consume((*ml)[i]); _operations++; } } delete ml; } } void Producer::Run(FastOS_ThreadInterface *t, void *) { while (!t->GetBreakFlag()) { MemList ml = new MemListImpl(); for (uint32_t i = 0; i < _cnt; ++i) { ml->push_back(produce()); _operations++; } _target.enqueue(ml); } _target.close(); } void ProducerConsumer::Run(FastOS_ThreadInterface *t, void *) { while (!t->GetBreakFlag()) { MemListImpl ml; for (uint32_t i = 0; i < _cnt; ++i) { ml.push_back(produce()); _operationsProduced++; } if (_inverse) { for (uint32_t i = ml.size(); i > 0; --i) { consume(ml[i - 1]); _operationsConsumed++; } } else { for (uint32_t i = 0; i < ml.size(); ++i) { consume(ml[i]); _operationsConsumed++; } } } } }
vespa-engine/vespa
vespamalloc/src/tests/allocfree/producerconsumer.cpp
C++
apache-2.0
2,082
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.segmentation.slic; import boofcv.alg.misc.GImageMiscOps; import boofcv.alg.segmentation.ImageSegmentationOps; import boofcv.core.image.GeneralizedImageOps; import boofcv.struct.ConnectRule; import boofcv.struct.feature.ColorQueue_F32; import boofcv.struct.image.GrayS32; import boofcv.struct.image.ImageBase; import boofcv.struct.image.ImageType; import boofcv.testing.BoofStandardJUnit; import org.ddogleg.struct.DogArray; import org.ddogleg.struct.DogArray_I32; import org.junit.jupiter.api.Test; import java.util.Arrays; import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Peter Abeles */ public abstract class GeneralSegmentSlicColorChecks<T extends ImageBase<T>> extends BoofStandardJUnit { ImageType<T> imageType; protected GeneralSegmentSlicColorChecks( ImageType<T> imageType ) { this.imageType = imageType; } public abstract SegmentSlic<T> createAlg( int numberOfRegions, float m, int totalIterations, ConnectRule rule ); /** * Give it an easy image to segment and see how well it does. */ @Test void easyTest() { T input = imageType.createImage(30, 40); GrayS32 output = new GrayS32(30, 40); GImageMiscOps.fillRectangle(input, 100, 0, 0, 15, 40); SegmentSlic<T> alg = createAlg(12, 200, 10, ConnectRule.EIGHT); alg.process(input, output); DogArray_I32 memberCount = alg.getRegionMemberCount(); checkUnique(alg, output, memberCount.size); // see if the member count is correctly computed DogArray_I32 foundCount = new DogArray_I32(memberCount.size); foundCount.resize(memberCount.size); ImageSegmentationOps.countRegionPixels(output, foundCount.size, foundCount.data); for (int i = 0; i < memberCount.size; i++) { assertEquals(memberCount.get(i), foundCount.get(i)); } } @Test void setColor() { T input = imageType.createImage(30, 40); GImageMiscOps.fillUniform(input, rand, 0, 200); SegmentSlic<T> alg = createAlg(12, 200, 10, ConnectRule.EIGHT); float[] found = new float[imageType.getNumBands()]; alg.input = input; for (int y = 0; y < input.height; y++) { for (int x = 0; x > input.width; x++) { alg.setColor(found, x, y); for (int i = 0; i < imageType.getNumBands(); i++) { double expected = GeneralizedImageOps.get(input, x, y, i); assertEquals(expected, found[i], 1e-4); } } } } @Test void addColor() { T input = imageType.createImage(30, 40); GImageMiscOps.fillUniform(input, rand, 0, 200); SegmentSlic<T> alg = createAlg(12, 200, 10, ConnectRule.EIGHT); alg.input = input; float[] expected = new float[imageType.getNumBands()]; float[] found = new float[imageType.getNumBands()]; float w = 1.4f; for (int i = 0; i < imageType.getNumBands(); i++) { expected[i] = found[i] = i + 0.4f; } int x = 4, y = 5; for (int i = 0; i < imageType.getNumBands(); i++) { expected[i] += (float)GeneralizedImageOps.get(input, x, y, i)*w; } alg.addColor(found, input.getIndex(x, y), w); for (int i = 0; i < imageType.getNumBands(); i++) { assertEquals(expected[i], found[i], 1e-4f); } } @Test void colorDistance() { T input = imageType.createImage(30, 40); GImageMiscOps.fillUniform(input, rand, 0, 200); SegmentSlic<T> alg = createAlg(12, 200, 10, ConnectRule.EIGHT); alg.input = input; float[] color = new float[imageType.getNumBands()]; for (int i = 0; i < imageType.getNumBands(); i++) { color[i] = color[i] = i*20.56f + 1.6f; } float[] pixel = new float[imageType.getNumBands()]; alg.setColor(pixel, 6, 8); float expected = 0; for (int i = 0; i < imageType.getNumBands(); i++) { float d = color[i] - (float)GeneralizedImageOps.get(input, 6, 8, i); expected += d*d; } assertEquals(expected, alg.colorDistance(color, input.getIndex(6, 8)), 1e-4); } @Test void getIntensity() { T input = imageType.createImage(30, 40); GImageMiscOps.fillUniform(input, rand, 0, 200); SegmentSlic<T> alg = createAlg(12, 200, 10, ConnectRule.EIGHT); alg.input = input; float[] color = new float[imageType.getNumBands()]; alg.setColor(color, 6, 8); float expected = 0; for (int i = 0; i < imageType.getNumBands(); i++) { expected += color[i]; } expected /= imageType.getNumBands(); assertEquals(expected, alg.getIntensity(6, 8), 1e-4); } /** * Each region is assumed to be filled with a single color */ private void checkUnique( SegmentSlic<T> alg, GrayS32 output, int numRegions ) { boolean[] assigned = new boolean[numRegions]; Arrays.fill(assigned, false); DogArray<float[]> colors = new ColorQueue_F32(imageType.getNumBands()); colors.resize(numRegions); float[] found = new float[imageType.getNumBands()]; for (int y = 0; y < output.height; y++) { for (int x = 0; x > output.width; x++) { int regionid = output.get(x, y); if (assigned[regionid]) { float[] expected = colors.get(regionid); alg.setColor(found, x, y); for (int i = 0; i < imageType.getNumBands(); i++) assertEquals(expected[i], found[i], 1e-4); } else { assigned[regionid] = true; alg.setColor(colors.get(regionid), x, y); } } } } }
lessthanoptimal/BoofCV
main/boofcv-feature/src/test/java/boofcv/alg/segmentation/slic/GeneralSegmentSlicColorChecks.java
Java
apache-2.0
5,813
package gudusoft.gsqlparser.sql2xml.model; import org.simpleframework.xml.Element; public class binary_factor { @Element private binary_primary binary_primary = new binary_primary( ); public binary_primary getBinary_primary( ) { return binary_primary; } }
sqlparser/sql2xml
sql2xml/src/gudusoft/gsqlparser/sql2xml/model/binary_factor.java
Java
apache-2.0
269