method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
@Test
public void testSerialization() {
Stroke stroke1 = new BasicStroke(2.0f);
XYPolygonAnnotation a1 = new XYPolygonAnnotation(new double[] {1.0,
2.0, 3.0, 4.0, 5.0, 6.0}, stroke1, Color.red, Color.blue);
XYPolygonAnnotation a2 = (XYPolygonAnnotation) TestUtilities.serialised(a1);
assertEquals(a1, a2);
}
|
void function() { Stroke stroke1 = new BasicStroke(2.0f); XYPolygonAnnotation a1 = new XYPolygonAnnotation(new double[] {1.0, 2.0, 3.0, 4.0, 5.0, 6.0}, stroke1, Color.red, Color.blue); XYPolygonAnnotation a2 = (XYPolygonAnnotation) TestUtilities.serialised(a1); assertEquals(a1, a2); }
|
/**
* Serialize an instance, restore it, and check for equality.
*/
|
Serialize an instance, restore it, and check for equality
|
testSerialization
|
{
"repo_name": "simon04/jfreechart",
"path": "src/test/java/org/jfree/chart/annotations/XYPolygonAnnotationTest.java",
"license": "lgpl-2.1",
"size": 6424
}
|
[
"java.awt.BasicStroke",
"java.awt.Color",
"java.awt.Stroke",
"org.jfree.chart.TestUtilities",
"org.junit.Assert"
] |
import java.awt.BasicStroke; import java.awt.Color; import java.awt.Stroke; import org.jfree.chart.TestUtilities; import org.junit.Assert;
|
import java.awt.*; import org.jfree.chart.*; import org.junit.*;
|
[
"java.awt",
"org.jfree.chart",
"org.junit"
] |
java.awt; org.jfree.chart; org.junit;
| 2,832,624 |
@Deprecated
@JsonProperty("datetimeDataTypeRefsetIdentifier")
public void setDatetimeDatatypeRefsetIdentifier(String datetimeDatatypeRefsetIdentifier) {
this.datetimeDatatypeRefsetIdentifier = datetimeDatatypeRefsetIdentifier;
}
|
@JsonProperty(STR) void function(String datetimeDatatypeRefsetIdentifier) { this.datetimeDatatypeRefsetIdentifier = datetimeDatatypeRefsetIdentifier; }
|
/**
* Sets the ID of the datetime datatype reference set identifier concept
*
* @param datetimeDatatypeRefsetIdentifier the datetimeDatatypeRefsetIdentifier to set
*/
|
Sets the ID of the datetime datatype reference set identifier concept
|
setDatetimeDatatypeRefsetIdentifier
|
{
"repo_name": "b2ihealthcare/snow-owl",
"path": "snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/config/SnomedCoreConfiguration.java",
"license": "apache-2.0",
"size": 7645
}
|
[
"com.fasterxml.jackson.annotation.JsonProperty"
] |
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.*;
|
[
"com.fasterxml.jackson"
] |
com.fasterxml.jackson;
| 1,160,688 |
private void doQuarantineTest(TableName table, HBaseFsck hbck, int check,
int corrupt, int fail, int quar, int missing) throws Exception {
try {
setupTable(table);
assertEquals(ROWKEYS.length, countRows());
admin.flush(table); // flush is async.
// Mess it up by leaving a hole in the assignment, meta, and hdfs data
admin.disableTable(table);
String[] args = {"-sidelineCorruptHFiles", "-repairHoles", "-ignorePreCheckPermission",
table.getNameAsString()};
HBaseFsck res = hbck.exec(hbfsckExecutorService, args);
HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
assertEquals(hfcc.getHFilesChecked(), check);
assertEquals(hfcc.getCorrupted().size(), corrupt);
assertEquals(hfcc.getFailures().size(), fail);
assertEquals(hfcc.getQuarantined().size(), quar);
assertEquals(hfcc.getMissing().size(), missing);
// its been fixed, verify that we can enable
admin.enableTableAsync(table);
while (!admin.isTableEnabled(table)) {
try {
Thread.sleep(250);
} catch (InterruptedException e) {
e.printStackTrace();
fail("Interrupted when trying to enable table " + table);
}
}
} finally {
cleanupTable(table);
}
}
|
void function(TableName table, HBaseFsck hbck, int check, int corrupt, int fail, int quar, int missing) throws Exception { try { setupTable(table); assertEquals(ROWKEYS.length, countRows()); admin.flush(table); admin.disableTable(table); String[] args = {STR, STR, STR, table.getNameAsString()}; HBaseFsck res = hbck.exec(hbfsckExecutorService, args); HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker(); assertEquals(hfcc.getHFilesChecked(), check); assertEquals(hfcc.getCorrupted().size(), corrupt); assertEquals(hfcc.getFailures().size(), fail); assertEquals(hfcc.getQuarantined().size(), quar); assertEquals(hfcc.getMissing().size(), missing); admin.enableTableAsync(table); while (!admin.isTableEnabled(table)) { try { Thread.sleep(250); } catch (InterruptedException e) { e.printStackTrace(); fail(STR + table); } } } finally { cleanupTable(table); } }
|
/**
* Test that use this should have a timeout, because this method could potentially wait forever.
*/
|
Test that use this should have a timeout, because this method could potentially wait forever
|
doQuarantineTest
|
{
"repo_name": "SeekerResource/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java",
"license": "apache-2.0",
"size": 103756
}
|
[
"org.apache.hadoop.hbase.TableName",
"org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker",
"org.junit.Assert"
] |
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; import org.junit.Assert;
|
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.hbck.*; import org.junit.*;
|
[
"org.apache.hadoop",
"org.junit"
] |
org.apache.hadoop; org.junit;
| 1,788,139 |
public ReadableStringSet debugGetSuppressedUpdateEventNames() {
return suppressedEventNames;
}
|
ReadableStringSet function() { return suppressedEventNames; }
|
/**
* Gets the class names of suppressed event listeners. For debugging only.
*
* @return a live readable view of the currently suppressed events.
*/
|
Gets the class names of suppressed event listeners. For debugging only
|
debugGetSuppressedUpdateEventNames
|
{
"repo_name": "gburd/wave",
"path": "src/org/waveprotocol/wave/client/editor/EditorUpdateEventImpl.java",
"license": "apache-2.0",
"size": 8480
}
|
[
"org.waveprotocol.wave.model.util.ReadableStringSet"
] |
import org.waveprotocol.wave.model.util.ReadableStringSet;
|
import org.waveprotocol.wave.model.util.*;
|
[
"org.waveprotocol.wave"
] |
org.waveprotocol.wave;
| 568,359 |
private void pressPin(final PinView thumb) {
if (mFirstSetTickCount) {
mFirstSetTickCount = false;
}
if (mArePinsTemporary) {
ValueAnimator animator = ValueAnimator.ofFloat(0, mExpandedPinRadius);
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
|
void function(final PinView thumb) { if (mFirstSetTickCount) { mFirstSetTickCount = false; } if (mArePinsTemporary) { ValueAnimator animator = ValueAnimator.ofFloat(0, mExpandedPinRadius); animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
|
/**
* Set the thumb to be in the pressed state and calls invalidate() to redraw
* the canvas to reflect the updated state.
*
* @param thumb the thumb to press
*/
|
Set the thumb to be in the pressed state and calls invalidate() to redraw the canvas to reflect the updated state
|
pressPin
|
{
"repo_name": "MaTriXy/material-range-bar",
"path": "rangebar/src/com/appyvet/rangebar/RangeBar.java",
"license": "apache-2.0",
"size": 50986
}
|
[
"android.animation.ValueAnimator"
] |
import android.animation.ValueAnimator;
|
import android.animation.*;
|
[
"android.animation"
] |
android.animation;
| 2,064,511 |
public String upload_appender_file1(byte[] file_buff, String file_ext_name,
NameValuePair[] meta_list) throws IOException, MyException {
String parts[] = this.upload_appender_file(file_buff, file_ext_name, meta_list);
if (parts != null) {
return parts[0] + SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + parts[1];
} else {
return null;
}
}
|
String function(byte[] file_buff, String file_ext_name, NameValuePair[] meta_list) throws IOException, MyException { String parts[] = this.upload_appender_file(file_buff, file_ext_name, meta_list); if (parts != null) { return parts[0] + SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + parts[1]; } else { return null; } }
|
/**
* upload appender file to storage server (by file buff)
*
* @param file_buff file content/buff
* @param file_ext_name file ext name, do not include dot(.)
* @param meta_list meta info array
* @return file id(including group name and filename) if success, <br>
* return null if fail
*/
|
upload appender file to storage server (by file buff)
|
upload_appender_file1
|
{
"repo_name": "brucevsked/vskeddemolist",
"path": "vskeddemos/mavenproject/fastdfsclientdemo/src/main/java/org/csource/fastdfs/StorageClient1.java",
"license": "mit",
"size": 27070
}
|
[
"java.io.IOException",
"org.csource.common.MyException",
"org.csource.common.NameValuePair"
] |
import java.io.IOException; import org.csource.common.MyException; import org.csource.common.NameValuePair;
|
import java.io.*; import org.csource.common.*;
|
[
"java.io",
"org.csource.common"
] |
java.io; org.csource.common;
| 2,904,397 |
return ( SunCORBATransportManagerConfig)this.getModel();
}
|
return ( SunCORBATransportManagerConfig)this.getModel(); }
|
/**
* Returns this Config Model Object.
*/
|
Returns this Config Model Object
|
corbaManager
|
{
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "utils/eclipselink.utils.workbench/scplugin/source/org/eclipse/persistence/tools/workbench/scplugin/model/adapter/SunCORBATransportManagerAdapter.java",
"license": "epl-1.0",
"size": 1902
}
|
[
"org.eclipse.persistence.internal.sessions.factories.model.transport.SunCORBATransportManagerConfig"
] |
import org.eclipse.persistence.internal.sessions.factories.model.transport.SunCORBATransportManagerConfig;
|
import org.eclipse.persistence.internal.sessions.factories.model.transport.*;
|
[
"org.eclipse.persistence"
] |
org.eclipse.persistence;
| 1,589,833 |
@Command(shortDescription = "Opens the NUI editor for a ui skin", requiredPermission = PermissionManager.NO_PERMISSION)
public String editSkin(@CommandParam(value = "uri", suggester = SkinSuggester.class) String uri) {
if (!nuiSkinEditorSystem.isEditorActive()) {
nuiSkinEditorSystem.toggleEditor();
}
Set<ResourceUrn> urns = assetManager.resolve(uri, UISkin.class);
switch (urns.size()) {
case 0:
return String.format("No asset found for screen '%s'", uri);
case 1:
ResourceUrn urn = urns.iterator().next();
((NUISkinEditorScreen) nuiManager.getScreen(NUISkinEditorScreen.ASSET_URI)).selectAsset(urn);
return "Success";
default:
return String.format("Multiple matches for screen '%s': {%s}", uri, Arrays.toString(urns.toArray()));
}
}
|
@Command(shortDescription = STR, requiredPermission = PermissionManager.NO_PERMISSION) String function(@CommandParam(value = "uri", suggester = SkinSuggester.class) String uri) { if (!nuiSkinEditorSystem.isEditorActive()) { nuiSkinEditorSystem.toggleEditor(); } Set<ResourceUrn> urns = assetManager.resolve(uri, UISkin.class); switch (urns.size()) { case 0: return String.format(STR, uri); case 1: ResourceUrn urn = urns.iterator().next(); ((NUISkinEditorScreen) nuiManager.getScreen(NUISkinEditorScreen.ASSET_URI)).selectAsset(urn); return STR; default: return String.format(STR, uri, Arrays.toString(urns.toArray())); } }
|
/**
* Opens the NUI editor for a ui skin
* @param uri String containing name of ui skin
* @return String containing final message
*/
|
Opens the NUI editor for a ui skin
|
editSkin
|
{
"repo_name": "mertserezli/Terasology",
"path": "engine/src/main/java/org/terasology/logic/console/commands/CoreCommands.java",
"license": "apache-2.0",
"size": 28714
}
|
[
"java.util.Arrays",
"java.util.Set",
"org.terasology.assets.ResourceUrn",
"org.terasology.logic.console.commandSystem.annotations.Command",
"org.terasology.logic.console.commandSystem.annotations.CommandParam",
"org.terasology.logic.console.suggesters.SkinSuggester",
"org.terasology.logic.permission.PermissionManager",
"org.terasology.rendering.nui.editor.layers.NUISkinEditorScreen",
"org.terasology.rendering.nui.skin.UISkin"
] |
import java.util.Arrays; import java.util.Set; import org.terasology.assets.ResourceUrn; import org.terasology.logic.console.commandSystem.annotations.Command; import org.terasology.logic.console.commandSystem.annotations.CommandParam; import org.terasology.logic.console.suggesters.SkinSuggester; import org.terasology.logic.permission.PermissionManager; import org.terasology.rendering.nui.editor.layers.NUISkinEditorScreen; import org.terasology.rendering.nui.skin.UISkin;
|
import java.util.*; import org.terasology.assets.*; import org.terasology.logic.console.*; import org.terasology.logic.console.suggesters.*; import org.terasology.logic.permission.*; import org.terasology.rendering.nui.editor.layers.*; import org.terasology.rendering.nui.skin.*;
|
[
"java.util",
"org.terasology.assets",
"org.terasology.logic",
"org.terasology.rendering"
] |
java.util; org.terasology.assets; org.terasology.logic; org.terasology.rendering;
| 1,309,533 |
public BitSet rootDocs(LeafReaderContext ctx) throws IOException {
return rootFilter.getBitSet(ctx);
}
|
BitSet function(LeafReaderContext ctx) throws IOException { return rootFilter.getBitSet(ctx); }
|
/**
* Get a {@link BitDocIdSet} that matches the root documents.
*/
|
Get a <code>BitDocIdSet</code> that matches the root documents
|
rootDocs
|
{
"repo_name": "gingerwizard/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java",
"license": "apache-2.0",
"size": 10856
}
|
[
"java.io.IOException",
"org.apache.lucene.index.LeafReaderContext",
"org.apache.lucene.util.BitSet"
] |
import java.io.IOException; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BitSet;
|
import java.io.*; import org.apache.lucene.index.*; import org.apache.lucene.util.*;
|
[
"java.io",
"org.apache.lucene"
] |
java.io; org.apache.lucene;
| 947,740 |
public boolean menuItems(String startFromPath, int numberOfLevels, HttpServletRequest request) throws JspException
{
boolean fileExists = false;
try
{
//Create the Menu for this path and depthLevel
createMenu(startFromPath,numberOfLevels,request);
//Validate if the file has been created, if so the menu have items
String currentPath = request.getRequestURI();
Host host = WebAPILocator.getHostWebAPI().getCurrentHost(request);
String hostId = host.getIdentifier();
if ((startFromPath == null) || (startFromPath.length() == 0)) {
Logger.debug(NavigationWebAPI.class, "pagePath=" + currentPath);
int idx1 = currentPath.indexOf("/");
int idx2 = currentPath.indexOf("/", idx1 + 1);
startFromPath = currentPath.substring(idx1, idx2 + 1);
Logger.debug(NavigationWebAPI.class, "path=" + startFromPath);
}
boolean addSpans = false;
if(request.getAttribute("menu_spans") != null && (Boolean)request.getAttribute("menu_spans")){
addSpans = true;
}
String firstItemClass = "";
if(request.getAttribute("firstItemClass") != null){
firstItemClass = " class=\""+(String)request.getAttribute("firstItemClass")+"_";
}
String lastItemClass = "";
if(request.getAttribute("lastItemClass") != null ) {
lastItemClass=" class=\""+(String)request.getAttribute("lastItemClass")+"_";
}
String menuIdPrefix = "";
if(request.getAttribute("menuIdPrefix") != null ){
menuIdPrefix=(String)request.getAttribute("menuIdPrefix")+"_";
}
String paramsValues = ((Boolean)addSpans).toString() + firstItemClass.toString() + lastItemClass.toString() +
menuIdPrefix.toString();
String fileName = "";
java.io.File file = null;
if ("/".equals(startFromPath))
{
fileName = hostId + "_levels_" + numberOfLevels + paramsValues.hashCode() + "_static.vtl";
file = new java.io.File(MENU_VTL_PATH + fileName);
if (file.exists() && file.length() > 0)
{
fileExists = true;
}
}
else
{
Folder folder = APILocator.getFolderAPI().findFolderByPath(startFromPath, hostId, user, true);
fileName = folder.getInode() + "_levels_" + numberOfLevels + paramsValues.hashCode() + "_static.vtl";
file = new java.io.File(MENU_VTL_PATH + fileName);
if (file.exists() && file.length() > 0)
{
fileExists = true;
}
}
}
catch(Exception ex)
{
Logger.error(this,ex.toString(),ex);
}
return fileExists;
}
|
boolean function(String startFromPath, int numberOfLevels, HttpServletRequest request) throws JspException { boolean fileExists = false; try { createMenu(startFromPath,numberOfLevels,request); String currentPath = request.getRequestURI(); Host host = WebAPILocator.getHostWebAPI().getCurrentHost(request); String hostId = host.getIdentifier(); if ((startFromPath == null) (startFromPath.length() == 0)) { Logger.debug(NavigationWebAPI.class, STR + currentPath); int idx1 = currentPath.indexOf("/"); int idx2 = currentPath.indexOf("/", idx1 + 1); startFromPath = currentPath.substring(idx1, idx2 + 1); Logger.debug(NavigationWebAPI.class, "path=" + startFromPath); } boolean addSpans = false; if(request.getAttribute(STR) != null && (Boolean)request.getAttribute(STR)){ addSpans = true; } String firstItemClass = STRfirstItemClassSTR class=\STRfirstItemClass")+"_"; } String lastItemClass = STRlastItemClassSTR class=\STRlastItemClass")+"_"; } String menuIdPrefix = STRmenuIdPrefixSTRmenuIdPrefix")+"_STRSTR/STR_levels_STR_static.vtlSTR_levels_STR_static.vtl"; file = new java.io.File(MENU_VTL_PATH + fileName); if (file.exists() && file.length() > 0) { fileExists = true; } } } catch(Exception ex) { Logger.error(this,ex.toString(),ex); } return fileExists; }
|
/**
* Returns true if the menu is valid and contains items in it
* @param startFromPath
* @param maxDepth
* @param request
* @return
* @throws JspException
*/
|
Returns true if the menu is valid and contains items in it
|
menuItems
|
{
"repo_name": "ggonzales/ksl",
"path": "src/com/dotmarketing/viewtools/NavigationWebAPI.java",
"license": "gpl-3.0",
"size": 79879
}
|
[
"com.dotmarketing.beans.Host",
"com.dotmarketing.business.web.WebAPILocator",
"com.dotmarketing.util.Logger",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.jsp.JspException"
] |
import com.dotmarketing.beans.Host; import com.dotmarketing.business.web.WebAPILocator; import com.dotmarketing.util.Logger; import javax.servlet.http.HttpServletRequest; import javax.servlet.jsp.JspException;
|
import com.dotmarketing.beans.*; import com.dotmarketing.business.web.*; import com.dotmarketing.util.*; import javax.servlet.http.*; import javax.servlet.jsp.*;
|
[
"com.dotmarketing.beans",
"com.dotmarketing.business",
"com.dotmarketing.util",
"javax.servlet"
] |
com.dotmarketing.beans; com.dotmarketing.business; com.dotmarketing.util; javax.servlet;
| 942,397 |
public interface Versioner {
List<String> version(List<List<Uri>> resourceUris, String container,
List<String> resourceTags);
|
interface Versioner { List<String> function(List<List<Uri>> resourceUris, String container, List<String> resourceTags);
|
/**
* Generates a version for each of the provided resources.
* @param resourceUris List of resource "batches" to version.
* @param container Container making the request
* @param resourceTags Index-correlated list of html tags, one per list of resouceUris as only
* similar tags can be concat. Each entry in resourceTags corresponds to html tag of resources
* uris. Any older implementations can just ignore.
* @return Index-correlated list of version strings, one per input.
*/
|
Generates a version for each of the provided resources
|
version
|
{
"repo_name": "apparentlymart/shindig",
"path": "java/gadgets/src/main/java/org/apache/shindig/gadgets/uri/ConcatUriManager.java",
"license": "apache-2.0",
"size": 6676
}
|
[
"java.util.List",
"org.apache.shindig.common.uri.Uri"
] |
import java.util.List; import org.apache.shindig.common.uri.Uri;
|
import java.util.*; import org.apache.shindig.common.uri.*;
|
[
"java.util",
"org.apache.shindig"
] |
java.util; org.apache.shindig;
| 437,798 |
@Override public void enterPolicyargs(@NotNull PoCoParser.PolicyargsContext ctx) { }
|
@Override public void enterPolicyargs(@NotNull PoCoParser.PolicyargsContext ctx) { }
|
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
|
The default implementation does nothing
|
exitPocopol
|
{
"repo_name": "Corjuh/PoCo-Compiler",
"path": "Parser/gen/PoCoParserBaseListener.java",
"license": "lgpl-2.1",
"size": 18482
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 706,429 |
public int getAccessibleChildrenCount(JComponent a) {
int returnValue =
((ComponentUI) (uis.elementAt(0))).getAccessibleChildrenCount(a);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getAccessibleChildrenCount(a);
}
return returnValue;
}
|
int function(JComponent a) { int returnValue = ((ComponentUI) (uis.elementAt(0))).getAccessibleChildrenCount(a); for (int i = 1; i < uis.size(); i++) { ((ComponentUI) (uis.elementAt(i))).getAccessibleChildrenCount(a); } return returnValue; }
|
/**
* Invokes the <code>getAccessibleChildrenCount</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
|
Invokes the <code>getAccessibleChildrenCount</code> method on each UI handled by this object
|
getAccessibleChildrenCount
|
{
"repo_name": "isaacl/openjdk-jdk",
"path": "src/share/classes/javax/swing/plaf/multi/MultiProgressBarUI.java",
"license": "gpl-2.0",
"size": 7599
}
|
[
"javax.swing.JComponent",
"javax.swing.plaf.ComponentUI"
] |
import javax.swing.JComponent; import javax.swing.plaf.ComponentUI;
|
import javax.swing.*; import javax.swing.plaf.*;
|
[
"javax.swing"
] |
javax.swing;
| 217,898 |
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof ChartEntity) {
ChartEntity that = (ChartEntity) obj;
if (!this.area.equals(that.area)) {
return false;
}
if (!ObjectUtilities.equal(this.toolTipText, that.toolTipText)) {
return false;
}
if (!ObjectUtilities.equal(this.urlText, that.urlText)) {
return false;
}
return true;
}
return false;
}
|
boolean function(Object obj) { if (obj == this) { return true; } if (obj instanceof ChartEntity) { ChartEntity that = (ChartEntity) obj; if (!this.area.equals(that.area)) { return false; } if (!ObjectUtilities.equal(this.toolTipText, that.toolTipText)) { return false; } if (!ObjectUtilities.equal(this.urlText, that.urlText)) { return false; } return true; } return false; }
|
/**
* Tests the entity for equality with an arbitrary object.
*
* @param obj the object to test against (<code>null</code> permitted).
*
* @return A boolean.
*/
|
Tests the entity for equality with an arbitrary object
|
equals
|
{
"repo_name": "nologic/nabs",
"path": "client/trunk/shared/libraries/jfreechart-1.0.5/source/org/jfree/chart/entity/ChartEntity.java",
"license": "gpl-2.0",
"size": 14127
}
|
[
"org.jfree.util.ObjectUtilities"
] |
import org.jfree.util.ObjectUtilities;
|
import org.jfree.util.*;
|
[
"org.jfree.util"
] |
org.jfree.util;
| 2,677,290 |
public static MozuClient<com.mozu.api.contracts.core.extensible.Attribute> createAttributeClient(com.mozu.api.contracts.core.extensible.Attribute attribute) throws Exception
{
return createAttributeClient( attribute, null);
}
|
static MozuClient<com.mozu.api.contracts.core.extensible.Attribute> function(com.mozu.api.contracts.core.extensible.Attribute attribute) throws Exception { return createAttributeClient( attribute, null); }
|
/**
* Create and save a new attribute. These attributes are used in products and product options.
* <p><pre><code>
* MozuClient<com.mozu.api.contracts.core.extensible.Attribute> mozuClient=CreateAttributeClient( attribute);
* client.setBaseAddress(url);
* client.executeRequest();
* Attribute attribute = client.Result();
* </code></pre></p>
* @param attribute Properties of an attribute used to describe customers or orders.
* @return Mozu.Api.MozuClient <com.mozu.api.contracts.core.extensible.Attribute>
* @see com.mozu.api.contracts.core.extensible.Attribute
* @see com.mozu.api.contracts.core.extensible.Attribute
*/
|
Create and save a new attribute. These attributes are used in products and product options. <code><code> MozuClient mozuClient=CreateAttributeClient( attribute); client.setBaseAddress(url); client.executeRequest(); Attribute attribute = client.Result(); </code></code>
|
createAttributeClient
|
{
"repo_name": "lakshmi-nair/mozu-java",
"path": "mozu-java-core/src/main/java/com/mozu/api/clients/commerce/orders/attributedefinition/AttributeClient.java",
"license": "mit",
"size": 12980
}
|
[
"com.mozu.api.MozuClient"
] |
import com.mozu.api.MozuClient;
|
import com.mozu.api.*;
|
[
"com.mozu.api"
] |
com.mozu.api;
| 169,975 |
public static void removeLog4jNoOpLogger(IgniteBiTuple<Object, Object> t) throws IgniteCheckedException {
Object rootLog = t.get1();
Object nullApp = t.get2();
if (nullApp == null)
return;
try {
Class appenderCls = Class.forName("org.apache.log4j.Appender");
rootLog.getClass().getMethod("removeAppender", appenderCls).invoke(rootLog, nullApp);
}
catch (Exception e) {
throw new IgniteCheckedException("Failed to remove previously added no-op logger for Log4j.", e);
}
}
|
static void function(IgniteBiTuple<Object, Object> t) throws IgniteCheckedException { Object rootLog = t.get1(); Object nullApp = t.get2(); if (nullApp == null) return; try { Class appenderCls = Class.forName(STR); rootLog.getClass().getMethod(STR, appenderCls).invoke(rootLog, nullApp); } catch (Exception e) { throw new IgniteCheckedException(STR, e); } }
|
/**
* Removes previously added no-op logger via method {@link #addLog4jNoOpLogger}.
*
* @param t Tuple with root log and null appender instances.
* @throws IgniteCheckedException In case of failure to remove previously added no-op logger for Log4j.
*/
|
Removes previously added no-op logger via method <code>#addLog4jNoOpLogger</code>
|
removeLog4jNoOpLogger
|
{
"repo_name": "pperalta/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 314980
}
|
[
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.lang.IgniteBiTuple"
] |
import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.lang.IgniteBiTuple;
|
import org.apache.ignite.*; import org.apache.ignite.lang.*;
|
[
"org.apache.ignite"
] |
org.apache.ignite;
| 1,151,890 |
ServiceTicket getServiceTicket();
|
ServiceTicket getServiceTicket();
|
/**
* Retrieves the service ticket.
*
* @return the requested service ticket
*/
|
Retrieves the service ticket
|
getServiceTicket
|
{
"repo_name": "CenterForOpenScience/cas-overlay",
"path": "cas-server-support-oauth/src/main/java/org/jasig/cas/support/oauth/token/AccessToken.java",
"license": "apache-2.0",
"size": 1558
}
|
[
"org.jasig.cas.ticket.ServiceTicket"
] |
import org.jasig.cas.ticket.ServiceTicket;
|
import org.jasig.cas.ticket.*;
|
[
"org.jasig.cas"
] |
org.jasig.cas;
| 1,422,510 |
public Color getForegroundForTokenType(int type) {
Color fg = syntaxScheme.styles[type].foreground;
return fg != null ? fg : getForeground();
}
|
Color function(int type) { Color fg = syntaxScheme.styles[type].foreground; return fg != null ? fg : getForeground(); }
|
/**
* Returns the foreground color to use when painting a token. This does not take into account whether the token is a
* hyperlink.
*
* @param type
* The token type.
* @return The foreground color to use for that token. This value is never <code>null</code>.
* @see #getForegroundForToken(Token)
*/
|
Returns the foreground color to use when painting a token. This does not take into account whether the token is a hyperlink
|
getForegroundForTokenType
|
{
"repo_name": "kevinmcgoldrick/Tank",
"path": "tools/script_filter/src/main/java/org/fife/ui/rsyntaxtextarea/RSyntaxTextArea.java",
"license": "epl-1.0",
"size": 75671
}
|
[
"java.awt.Color"
] |
import java.awt.Color;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 50,376 |
public PrivateCloudUpdateProperties withAvailability(AvailabilityProperties availability) {
this.availability = availability;
return this;
}
|
PrivateCloudUpdateProperties function(AvailabilityProperties availability) { this.availability = availability; return this; }
|
/**
* Set the availability property: Properties describing how the cloud is distributed across availability zones.
*
* @param availability the availability value to set.
* @return the PrivateCloudUpdateProperties object itself.
*/
|
Set the availability property: Properties describing how the cloud is distributed across availability zones
|
withAvailability
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/avs/azure-resourcemanager-avs/src/main/java/com/azure/resourcemanager/avs/fluent/models/PrivateCloudUpdateProperties.java",
"license": "mit",
"size": 5531
}
|
[
"com.azure.resourcemanager.avs.models.AvailabilityProperties"
] |
import com.azure.resourcemanager.avs.models.AvailabilityProperties;
|
import com.azure.resourcemanager.avs.models.*;
|
[
"com.azure.resourcemanager"
] |
com.azure.resourcemanager;
| 658,513 |
private synchronized void moveNodes() {
lastMaxMotion = maxMotion;
final double[] maxMotionA = new double[1];
maxMotionA[0] = 0;
TGForEachNode fen = new TGForEachNode() {
|
synchronized void function() { lastMaxMotion = maxMotion; final double[] maxMotionA = new double[1]; maxMotionA[0] = 0; TGForEachNode fen = new TGForEachNode() {
|
/**
* Mode nodes.
*/
|
Mode nodes
|
moveNodes
|
{
"repo_name": "dvorka/mindraider",
"path": "mr7/src/main/java/com/touchgraph/graphlayout/TGLayout.java",
"license": "apache-2.0",
"size": 19687
}
|
[
"com.touchgraph.graphlayout.graphelements.TGForEachNode"
] |
import com.touchgraph.graphlayout.graphelements.TGForEachNode;
|
import com.touchgraph.graphlayout.graphelements.*;
|
[
"com.touchgraph.graphlayout"
] |
com.touchgraph.graphlayout;
| 2,257,383 |
private void runDERBY_2703(int db_index)
throws SQLException
{
setUpTable();
ResultSet rs =
getConnection().getMetaData().getColumns(null, "APP", "CUSTOMER", "%");
int rowCount = JDBC.assertDrainResults(rs);
Assert.assertTrue("catch bug where no rows are returned.", rowCount > 0);
dropTable();
}
|
void function(int db_index) throws SQLException { setUpTable(); ResultSet rs = getConnection().getMetaData().getColumns(null, "APP", STR, "%"); int rowCount = JDBC.assertDrainResults(rs); Assert.assertTrue(STR, rowCount > 0); dropTable(); }
|
/**
* Test simple call to DatabaseMetaData.getColumns()
* <p>
* This test is the same form of the getColumns() call that
* the IMPORT and EXPORT system procedures depend on.
* Currently on ibm and sun 1.4.2 jvm's this test fails.
**/
|
Test simple call to DatabaseMetaData.getColumns() This test is the same form of the getColumns() call that the IMPORT and EXPORT system procedures depend on. Currently on ibm and sun 1.4.2 jvm's this test fails
|
runDERBY_2703
|
{
"repo_name": "scnakandala/derby",
"path": "java/testing/org/apache/derbyTesting/functionTests/tests/lang/CollationTest2.java",
"license": "apache-2.0",
"size": 65788
}
|
[
"java.sql.ResultSet",
"java.sql.SQLException",
"junit.framework.Assert",
"org.apache.derbyTesting.junit.JDBC"
] |
import java.sql.ResultSet; import java.sql.SQLException; import junit.framework.Assert; import org.apache.derbyTesting.junit.JDBC;
|
import java.sql.*; import junit.framework.*; import org.apache.*;
|
[
"java.sql",
"junit.framework",
"org.apache"
] |
java.sql; junit.framework; org.apache;
| 2,882,186 |
public static void checkResponseDocForNumStringResponses(Response responseDoc, int expectedNumStringResponses) {
List<StringResponse> stringResponses = responseDoc.getStringResponses();
int actualNumResponses = stringResponses.size();
assertTrueWithPrefix("Wrong number of string responses: was " + actualNumResponses + " instead of " + expectedNumStringResponses,
actualNumResponses == expectedNumStringResponses);
}
|
static void function(Response responseDoc, int expectedNumStringResponses) { List<StringResponse> stringResponses = responseDoc.getStringResponses(); int actualNumResponses = stringResponses.size(); assertTrueWithPrefix(STR + actualNumResponses + STR + expectedNumStringResponses, actualNumResponses == expectedNumStringResponses); }
|
/**
* Checks if there is a certain number of string responses (XML tag 'response:string') within a response
* (XML tag 'response:response').
* Fails if not.
*
* @param responseDoc the response containing the string responses.
* @param expectedNumStringResponses the number of string responses to check for.
*/
|
Checks if there is a certain number of string responses (XML tag 'response:string') within a response (XML tag 'response:response'). Fails if not
|
checkResponseDocForNumStringResponses
|
{
"repo_name": "fraunhoferfokus/Fuzzino",
"path": "src/test/java/de/fraunhofer/fokus/fuzzing/fuzzino/TestUtil.java",
"license": "apache-2.0",
"size": 30135
}
|
[
"de.fraunhofer.fokus.fuzzing.fuzzino.response.Response",
"de.fraunhofer.fokus.fuzzing.fuzzino.response.StringResponse",
"java.util.List"
] |
import de.fraunhofer.fokus.fuzzing.fuzzino.response.Response; import de.fraunhofer.fokus.fuzzing.fuzzino.response.StringResponse; import java.util.List;
|
import de.fraunhofer.fokus.fuzzing.fuzzino.response.*; import java.util.*;
|
[
"de.fraunhofer.fokus",
"java.util"
] |
de.fraunhofer.fokus; java.util;
| 945,044 |
long addLocation(String locationSetting, String cityName, double lat, double lon) {
long locationId;
// First, check if the location with this city name exists in the db
Cursor locationCursor = getContext().getContentResolver().query(
WeatherContract.LocationEntry.CONTENT_URI,
new String[]{WeatherContract.LocationEntry._ID},
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?",
new String[]{locationSetting},
null);
if (locationCursor.moveToFirst()) {
int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID);
locationId = locationCursor.getLong(locationIdIndex);
} else {
// Now that the content provider is set up, inserting rows of data is pretty simple.
// First create a ContentValues object to hold the data you want to insert.
ContentValues locationValues = new ContentValues();
// Then add the data, along with the corresponding name of the data type,
// so the content provider knows what kind of value is being inserted.
locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName);
locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon);
// Finally, insert location data into the database.
Uri insertedUri = getContext().getContentResolver().insert(
WeatherContract.LocationEntry.CONTENT_URI,
locationValues
);
// The resulting URI contains the ID for the row. Extract the locationId from the Uri.
locationId = ContentUris.parseId(insertedUri);
}
locationCursor.close();
// Wait, that worked? Yes!
return locationId;
}
|
long addLocation(String locationSetting, String cityName, double lat, double lon) { long locationId; Cursor locationCursor = getContext().getContentResolver().query( WeatherContract.LocationEntry.CONTENT_URI, new String[]{WeatherContract.LocationEntry._ID}, WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + STR, new String[]{locationSetting}, null); if (locationCursor.moveToFirst()) { int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID); locationId = locationCursor.getLong(locationIdIndex); } else { ContentValues locationValues = new ContentValues(); locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName); locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting); locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat); locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon); Uri insertedUri = getContext().getContentResolver().insert( WeatherContract.LocationEntry.CONTENT_URI, locationValues ); locationId = ContentUris.parseId(insertedUri); } locationCursor.close(); return locationId; }
|
/**
* Helper method to handle insertion of a new location in the weather database.
*
* @param locationSetting The location string used to request updates from the server.
* @param cityName A human-readable city name, e.g "Mountain View"
* @param lat the latitude of the city
* @param lon the longitude of the city
* @return the row ID of the added location.
*/
|
Helper method to handle insertion of a new location in the weather database
|
addLocation
|
{
"repo_name": "niteshgrg/Sunshine2",
"path": "app/src/main/java/com/example/android/sunshine/app/sync/SunshineSyncAdapter.java",
"license": "apache-2.0",
"size": 28539
}
|
[
"android.content.ContentUris",
"android.content.ContentValues",
"android.database.Cursor",
"android.net.Uri",
"com.example.android.sunshine.app.data.WeatherContract"
] |
import android.content.ContentUris; import android.content.ContentValues; import android.database.Cursor; import android.net.Uri; import com.example.android.sunshine.app.data.WeatherContract;
|
import android.content.*; import android.database.*; import android.net.*; import com.example.android.sunshine.app.data.*;
|
[
"android.content",
"android.database",
"android.net",
"com.example.android"
] |
android.content; android.database; android.net; com.example.android;
| 794,228 |
static public LbListener getInstance(@Nonnull LbProtocol protocol, int publicPort, int privatePort) {
return new LbListener(LbAlgorithm.ROUND_ROBIN, LbPersistence.NONE, protocol, publicPort, privatePort);
}
|
static LbListener function(@Nonnull LbProtocol protocol, int publicPort, int privatePort) { return new LbListener(LbAlgorithm.ROUND_ROBIN, LbPersistence.NONE, protocol, publicPort, privatePort); }
|
/**
* Constructs a listener that routes traffic of the specified protocol in a round-robin format from the specified
* public port to the specified private port.
* @param protocol the network protocol being load balanced
* @param publicPort the public port on which the load balancer is listening
* @param privatePort the private port on which endpoints are listening
* @return a newly constructed listener
*/
|
Constructs a listener that routes traffic of the specified protocol in a round-robin format from the specified public port to the specified private port
|
getInstance
|
{
"repo_name": "unwin/dasein-cloud-core",
"path": "src/main/java/org/dasein/cloud/network/LbListener.java",
"license": "apache-2.0",
"size": 10593
}
|
[
"javax.annotation.Nonnull"
] |
import javax.annotation.Nonnull;
|
import javax.annotation.*;
|
[
"javax.annotation"
] |
javax.annotation;
| 2,878,022 |
public Date getFinalFireTime() {
// FUTURE_TODO: implement QUARTZ-423
return null;
}
|
Date function() { return null; }
|
/**
* NOT YET IMPLEMENTED: Returns the final time that the
* <code>CronExpression</code> will match.
*/
|
<code>CronExpression</code> will match
|
getFinalFireTime
|
{
"repo_name": "SourceStudyNotes/log4j2",
"path": "src/main/java/org/apache/logging/log4j/core/util/CronExpression.java",
"license": "apache-2.0",
"size": 59620
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 561,903 |
protected MediaPlayer createMediaPlayer() {
return null;
}
|
MediaPlayer function() { return null; }
|
/**
* Subclass need to implement this method if audio need to be played when
* playAudio() method is invoked.
*
* @return
*/
|
Subclass need to implement this method if audio need to be played when playAudio() method is invoked
|
createMediaPlayer
|
{
"repo_name": "android-noobie/laolanguage",
"path": "src/org/laolanguage/LaoBaseActivity.java",
"license": "gpl-3.0",
"size": 7369
}
|
[
"android.media.MediaPlayer"
] |
import android.media.MediaPlayer;
|
import android.media.*;
|
[
"android.media"
] |
android.media;
| 102,277 |
void registerWorldGenHandler(Class<? extends IChunkGenerator> chunkGenerator, IWorldGen worldGen);
|
void registerWorldGenHandler(Class<? extends IChunkGenerator> chunkGenerator, IWorldGen worldGen);
|
/**This is to make it easier to support different chunk generators than the vanilla standard one,
* Simply register the chunk generator class to a worldgen and it will be called
*
* @param chunkGenerator the chunk generator to associate with this world generator
* @param worldGen the worldgen to register */
|
This is to make it easier to support different chunk generators than the vanilla standard one, Simply register the chunk generator class to a worldgen and it will be called
|
registerWorldGenHandler
|
{
"repo_name": "joshiejack/Mariculture",
"path": "src/main/java/joshie/mariculture/api/gen/WorldGen.java",
"license": "mit",
"size": 566
}
|
[
"net.minecraft.world.chunk.IChunkGenerator"
] |
import net.minecraft.world.chunk.IChunkGenerator;
|
import net.minecraft.world.chunk.*;
|
[
"net.minecraft.world"
] |
net.minecraft.world;
| 2,505,849 |
public void testTranslateJava2DToValue() {
NumberAxis axis = new NumberAxis();
axis.setRange(50.0, 100.0);
Rectangle2D dataArea = new Rectangle2D.Double(10.0, 50.0, 400.0, 300.0);
double y1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT);
assertEquals(y1, 95.8333333, EPSILON);
double y2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT);
assertEquals(y2, 95.8333333, EPSILON);
double x1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP);
assertEquals(x1, 58.125, EPSILON);
double x2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM);
assertEquals(x2, 58.125, EPSILON);
axis.setInverted(true);
double y3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT);
assertEquals(y3, 54.1666667, EPSILON);
double y4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT);
assertEquals(y4, 54.1666667, EPSILON);
double x3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP);
assertEquals(x3, 91.875, EPSILON);
double x4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM);
assertEquals(x4, 91.875, EPSILON);
}
|
void function() { NumberAxis axis = new NumberAxis(); axis.setRange(50.0, 100.0); Rectangle2D dataArea = new Rectangle2D.Double(10.0, 50.0, 400.0, 300.0); double y1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT); assertEquals(y1, 95.8333333, EPSILON); double y2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT); assertEquals(y2, 95.8333333, EPSILON); double x1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP); assertEquals(x1, 58.125, EPSILON); double x2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM); assertEquals(x2, 58.125, EPSILON); axis.setInverted(true); double y3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT); assertEquals(y3, 54.1666667, EPSILON); double y4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT); assertEquals(y4, 54.1666667, EPSILON); double x3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP); assertEquals(x3, 91.875, EPSILON); double x4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM); assertEquals(x4, 91.875, EPSILON); }
|
/**
* Test the translation of Java2D values to data values.
*/
|
Test the translation of Java2D values to data values
|
testTranslateJava2DToValue
|
{
"repo_name": "JSansalone/JFreeChart",
"path": "tests/org/jfree/chart/axis/junit/NumberAxisTests.java",
"license": "lgpl-2.1",
"size": 16698
}
|
[
"java.awt.geom.Rectangle2D",
"org.jfree.chart.axis.NumberAxis",
"org.jfree.ui.RectangleEdge"
] |
import java.awt.geom.Rectangle2D; import org.jfree.chart.axis.NumberAxis; import org.jfree.ui.RectangleEdge;
|
import java.awt.geom.*; import org.jfree.chart.axis.*; import org.jfree.ui.*;
|
[
"java.awt",
"org.jfree.chart",
"org.jfree.ui"
] |
java.awt; org.jfree.chart; org.jfree.ui;
| 2,479,682 |
public TrainTestInstances getTrainingAndTestProblemInstances(String experimentDirectory, long trainingSeed, long testingSeed, boolean deterministic, boolean trainingRequired, boolean testRequired, boolean trainingFeaturesRequired, boolean testingFeaturesRequired) throws IOException
{
InstanceListWithSeeds training = getTrainingProblemInstances(experimentDirectory, trainingSeed, deterministic, trainingRequired, trainingFeaturesRequired);
InstanceListWithSeeds testing = getTestingProblemInstances(experimentDirectory, testingSeed, deterministic, testRequired, testingFeaturesRequired);
return new TrainTestInstances(training, testing);
}
|
TrainTestInstances function(String experimentDirectory, long trainingSeed, long testingSeed, boolean deterministic, boolean trainingRequired, boolean testRequired, boolean trainingFeaturesRequired, boolean testingFeaturesRequired) throws IOException { InstanceListWithSeeds training = getTrainingProblemInstances(experimentDirectory, trainingSeed, deterministic, trainingRequired, trainingFeaturesRequired); InstanceListWithSeeds testing = getTestingProblemInstances(experimentDirectory, testingSeed, deterministic, testRequired, testingFeaturesRequired); return new TrainTestInstances(training, testing); }
|
/**
* Gets both the training and the test problem instances
*
* @param experimentDirectory Directory to search for instance files
* @param trainingSeed Seed to use for the training instances
* @param testingSeed Seed to use for the testing instances
* @param deterministic Whether or not the instances should be generated with deterministic (-1) seeds
* @param trainingRequired Whether the training instance file is required
* @param testRequired Whether the test instance file is required
* @return
* @throws IOException
*/
|
Gets both the training and the test problem instances
|
getTrainingAndTestProblemInstances
|
{
"repo_name": "fredizzimo/keyboardlayout",
"path": "smac/src/aeatk/ca/ubc/cs/beta/aeatk/probleminstance/ProblemInstanceOptions.java",
"license": "gpl-2.0",
"size": 17398
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,670,482 |
public static RevisionMetadata stripFromAllFields(
RevisionMetadata rm, List<String> words, String replacement, boolean wordAlone) {
String newId = rm.id();
String newAuthor = rm.author();
String newDescription = rm.description();
ListMultimap<String, String> newFields = LinkedListMultimap.create(rm.fields());
for (String word : words) {
String regex = (wordAlone) ? ("(?i)(\\b)" + word + "(\\b)") : ("(?i)" + word);
newId = newId.replaceAll(regex, replacement);
newAuthor = replaceAuthor(newAuthor, word, replacement);
newDescription = newDescription.replaceAll(regex, replacement);
for (Entry<String, String> entry : newFields.entries()) {
entry.setValue(entry.getValue().replaceAll(regex, replacement));
}
}
return rm.toBuilder()
.id(newId)
.author(newAuthor)
.description(newDescription)
.fields(ImmutableSetMultimap.copyOf(newFields))
.build();
}
|
static RevisionMetadata function( RevisionMetadata rm, List<String> words, String replacement, boolean wordAlone) { String newId = rm.id(); String newAuthor = rm.author(); String newDescription = rm.description(); ListMultimap<String, String> newFields = LinkedListMultimap.create(rm.fields()); for (String word : words) { String regex = (wordAlone) ? (STR + word + "(\\b)") : ("(?i)" + word); newId = newId.replaceAll(regex, replacement); newAuthor = replaceAuthor(newAuthor, word, replacement); newDescription = newDescription.replaceAll(regex, replacement); for (Entry<String, String> entry : newFields.entries()) { entry.setValue(entry.getValue().replaceAll(regex, replacement)); } } return rm.toBuilder() .id(newId) .author(newAuthor) .description(newDescription) .fields(ImmutableSetMultimap.copyOf(newFields)) .build(); }
|
/**
* A utility method that is useful for stripping a list of words from all the fields of the
* RevisionMetadata.
*
* @param rm the RevisionMetadata to scrub
* @param words the list of words to replace
* @param replacement the String to replace the target words with
* @param wordAlone true if the words to match must surrounded by word boundaries
* @return a copy representing the RevisionMetadata resulting from the scrub
*/
|
A utility method that is useful for stripping a list of words from all the fields of the RevisionMetadata
|
stripFromAllFields
|
{
"repo_name": "cgruber/MOE",
"path": "client/src/main/java/com/google/devtools/moe/client/repositories/MetadataScrubber.java",
"license": "apache-2.0",
"size": 5605
}
|
[
"com.google.common.collect.ImmutableSetMultimap",
"com.google.common.collect.LinkedListMultimap",
"com.google.common.collect.ListMultimap",
"java.util.List",
"java.util.Map"
] |
import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.ListMultimap; import java.util.List; import java.util.Map;
|
import com.google.common.collect.*; import java.util.*;
|
[
"com.google.common",
"java.util"
] |
com.google.common; java.util;
| 2,462,534 |
public String getActiveMenuTitle(Player player) {
SMSMenu activeMenu = getActiveMenu(player);
String prefix = activeMenu == getNativeMenu() ? "" : ScrollingMenuSign.getInstance().getConfigCache().getSubmenuTitlePrefix();
return prefix + activeMenu.getTitle();
}
|
String function(Player player) { SMSMenu activeMenu = getActiveMenu(player); String prefix = activeMenu == getNativeMenu() ? "" : ScrollingMenuSign.getInstance().getConfigCache().getSubmenuTitlePrefix(); return prefix + activeMenu.getTitle(); }
|
/**
* Get the title for the given player's currently active menu.
*
* @param player the player to check
* @return title of the active menu
*/
|
Get the title for the given player's currently active menu
|
getActiveMenuTitle
|
{
"repo_name": "desht/ScrollingMenuSign",
"path": "src/main/java/me/desht/scrollingmenusign/views/CommandTrigger.java",
"license": "lgpl-3.0",
"size": 4949
}
|
[
"me.desht.scrollingmenusign.SMSMenu",
"me.desht.scrollingmenusign.ScrollingMenuSign",
"org.bukkit.entity.Player"
] |
import me.desht.scrollingmenusign.SMSMenu; import me.desht.scrollingmenusign.ScrollingMenuSign; import org.bukkit.entity.Player;
|
import me.desht.scrollingmenusign.*; import org.bukkit.entity.*;
|
[
"me.desht.scrollingmenusign",
"org.bukkit.entity"
] |
me.desht.scrollingmenusign; org.bukkit.entity;
| 2,629,316 |
public static void setMockClock(long mockClockSeconds) {
mockTime = new Date(mockClockSeconds * 1000);
}
|
static void function(long mockClockSeconds) { mockTime = new Date(mockClockSeconds * 1000); }
|
/**
* Sets the mock clock to the given time (in seconds).
*/
|
Sets the mock clock to the given time (in seconds)
|
setMockClock
|
{
"repo_name": "bitcoinj/bitcoinj",
"path": "core/src/main/java/org/bitcoinj/core/Utils.java",
"license": "apache-2.0",
"size": 22176
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,539,306 |
public void onChangeAffinityMessage(final GridDhtPartitionsExchangeFuture exchFut,
boolean crd,
final CacheAffinityChangeMessage msg)
throws IgniteCheckedException {
assert msg.topologyVersion() != null && msg.exchangeId() == null : msg;
final AffinityTopologyVersion topVer = exchFut.initialVersion();
if (log.isDebugEnabled()) {
log.debug("Process affinity change message [exchVer=" + topVer +
", msgVer=" + msg.topologyVersion() + ']');
}
final Map<Integer, Map<Integer, List<UUID>>> affChange = msg.assignmentChange();
assert !F.isEmpty(affChange) : msg;
final Map<Integer, IgniteUuid> deploymentIds = msg.cacheDeploymentIds();
final Map<Object, List<List<ClusterNode>>> affCache = new HashMap<>();
|
void function(final GridDhtPartitionsExchangeFuture exchFut, boolean crd, final CacheAffinityChangeMessage msg) throws IgniteCheckedException { assert msg.topologyVersion() != null && msg.exchangeId() == null : msg; final AffinityTopologyVersion topVer = exchFut.initialVersion(); if (log.isDebugEnabled()) { log.debug(STR + topVer + STR + msg.topologyVersion() + ']'); } final Map<Integer, Map<Integer, List<UUID>>> affChange = msg.assignmentChange(); assert !F.isEmpty(affChange) : msg; final Map<Integer, IgniteUuid> deploymentIds = msg.cacheDeploymentIds(); final Map<Object, List<List<ClusterNode>>> affCache = new HashMap<>();
|
/**
* Called on exchange initiated by {@link CacheAffinityChangeMessage} which sent after rebalance finished.
*
* @param exchFut Exchange future.
* @param crd Coordinator flag.
* @param msg Message.
* @throws IgniteCheckedException If failed.
*/
|
Called on exchange initiated by <code>CacheAffinityChangeMessage</code> which sent after rebalance finished
|
onChangeAffinityMessage
|
{
"repo_name": "vladisav/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheAffinitySharedManager.java",
"license": "apache-2.0",
"size": 101317
}
|
[
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.cluster.ClusterNode",
"org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion",
"org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture",
"org.apache.ignite.internal.util.typedef.F",
"org.apache.ignite.lang.IgniteUuid"
] |
import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.lang.IgniteUuid;
|
import java.util.*; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.lang.*;
|
[
"java.util",
"org.apache.ignite"
] |
java.util; org.apache.ignite;
| 2,562,004 |
public final CustomColor findColor( String colorName )
{
StructureDefn defn = (StructureDefn) MetaDataDictionary.getInstance( )
.getStructure( CustomColor.CUSTOM_COLOR_STRUCT );
return (CustomColor) StructureRefUtil.findStructure( getModule( ),
defn, colorName );
}
|
final CustomColor function( String colorName ) { StructureDefn defn = (StructureDefn) MetaDataDictionary.getInstance( ) .getStructure( CustomColor.CUSTOM_COLOR_STRUCT ); return (CustomColor) StructureRefUtil.findStructure( getModule( ), defn, colorName ); }
|
/**
* Finds a custom color by name.
*
* @param colorName
* the custom color name
* @return the custom defined color that matches, or <code>null</code> if
* the color name was not found in the custom color palette.
*/
|
Finds a custom color by name
|
findColor
|
{
"repo_name": "sguan-actuate/birt",
"path": "model/org.eclipse.birt.report.model/src/org/eclipse/birt/report/model/core/ModuleImpl.java",
"license": "epl-1.0",
"size": 72136
}
|
[
"org.eclipse.birt.report.model.api.elements.structures.CustomColor",
"org.eclipse.birt.report.model.metadata.MetaDataDictionary",
"org.eclipse.birt.report.model.metadata.StructureDefn",
"org.eclipse.birt.report.model.util.StructureRefUtil"
] |
import org.eclipse.birt.report.model.api.elements.structures.CustomColor; import org.eclipse.birt.report.model.metadata.MetaDataDictionary; import org.eclipse.birt.report.model.metadata.StructureDefn; import org.eclipse.birt.report.model.util.StructureRefUtil;
|
import org.eclipse.birt.report.model.api.elements.structures.*; import org.eclipse.birt.report.model.metadata.*; import org.eclipse.birt.report.model.util.*;
|
[
"org.eclipse.birt"
] |
org.eclipse.birt;
| 650,727 |
interface WithNewPublicIPAddressNoDnsLabel<ReturnT> {
ReturnT withNewPublicIpAddress(Creatable<PublicIpAddress> creatable);
|
interface WithNewPublicIPAddressNoDnsLabel<ReturnT> { ReturnT withNewPublicIpAddress(Creatable<PublicIpAddress> creatable);
|
/**
* Creates a new public IP address to associate with the resource.
*
* @param creatable a creatable definition for a new public IP
* @return the next stage of the definition
*/
|
Creates a new public IP address to associate with the resource
|
withNewPublicIpAddress
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/models/HasPublicIpAddress.java",
"license": "mit",
"size": 12039
}
|
[
"com.azure.resourcemanager.resources.fluentcore.model.Creatable"
] |
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
|
import com.azure.resourcemanager.resources.fluentcore.model.*;
|
[
"com.azure.resourcemanager"
] |
com.azure.resourcemanager;
| 1,648,468 |
EReference getListType_Lists();
|
EReference getListType_Lists();
|
/**
* Returns the meta object for the containment reference list '{@link org.camunda.bpm.modeler.runtime.engine.model.ListType#getLists <em>Lists</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Lists</em>'.
* @see org.camunda.bpm.modeler.runtime.engine.model.ListType#getLists()
* @see #getListType()
* @generated
*/
|
Returns the meta object for the containment reference list '<code>org.camunda.bpm.modeler.runtime.engine.model.ListType#getLists Lists</code>'.
|
getListType_Lists
|
{
"repo_name": "camunda/camunda-eclipse-plugin",
"path": "org.camunda.bpm.modeler/src/org/camunda/bpm/modeler/runtime/engine/model/ModelPackage.java",
"license": "epl-1.0",
"size": 231785
}
|
[
"org.eclipse.emf.ecore.EReference"
] |
import org.eclipse.emf.ecore.EReference;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 171,559 |
public synchronized HBaseAdmin getHBaseAdmin()
throws IOException {
if (hbaseAdmin == null){
hbaseAdmin = new HBaseAdmin(getConfiguration());
}
return hbaseAdmin;
}
private HBaseAdmin hbaseAdmin = null;
|
synchronized HBaseAdmin function() throws IOException { if (hbaseAdmin == null){ hbaseAdmin = new HBaseAdmin(getConfiguration()); } return hbaseAdmin; } private HBaseAdmin hbaseAdmin = null;
|
/**
* Returns a HBaseAdmin instance.
* This instance is shared between HBaseTestingUtility instance users.
* Don't close it, it will be closed automatically when the
* cluster shutdowns
*
* @return The HBaseAdmin instance.
* @throws IOException
*/
|
Returns a HBaseAdmin instance. This instance is shared between HBaseTestingUtility instance users. Don't close it, it will be closed automatically when the cluster shutdowns
|
getHBaseAdmin
|
{
"repo_name": "matteobertozzi/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 79792
}
|
[
"java.io.IOException",
"org.apache.hadoop.hbase.client.HBaseAdmin"
] |
import java.io.IOException; import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import java.io.*; import org.apache.hadoop.hbase.client.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,571,857 |
AccumT mergeAccumulators(K key, Iterable<AccumT> accumulators, PipelineOptions options,
SideInputReader sideInputReader, Collection<? extends BoundedWindow> windows);
|
AccumT mergeAccumulators(K key, Iterable<AccumT> accumulators, PipelineOptions options, SideInputReader sideInputReader, Collection<? extends BoundedWindow> windows);
|
/**
* Forwards the call to a {@link PerKeyCombineFn} to merge accumulators.
*
* <p>It constructs a {@code CombineWithContext.Context} from
* {@link PipelineOptions} and {@link SideInputReader} if it is required.
*/
|
Forwards the call to a <code>PerKeyCombineFn</code> to merge accumulators. It constructs a CombineWithContext.Context from <code>PipelineOptions</code> and <code>SideInputReader</code> if it is required
|
mergeAccumulators
|
{
"repo_name": "joshualitt/incubator-beam",
"path": "runners/core-java/src/main/java/org/apache/beam/runners/core/PerKeyCombineFnRunner.java",
"license": "apache-2.0",
"size": 6151
}
|
[
"java.util.Collection",
"org.apache.beam.sdk.options.PipelineOptions",
"org.apache.beam.sdk.transforms.windowing.BoundedWindow",
"org.apache.beam.sdk.util.SideInputReader"
] |
import java.util.Collection; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.SideInputReader;
|
import java.util.*; import org.apache.beam.sdk.options.*; import org.apache.beam.sdk.transforms.windowing.*; import org.apache.beam.sdk.util.*;
|
[
"java.util",
"org.apache.beam"
] |
java.util; org.apache.beam;
| 2,795,404 |
Collection<TestCase> processTestSpecification(Collection<TestSpecification> testCaseSpecification);
|
Collection<TestCase> processTestSpecification(Collection<TestSpecification> testCaseSpecification);
|
/**
* Converts test specifications into test cases.
*
* @param testCaseSpecification The blueprint for creating the test cases
* @return A collection of Test Cases
*/
|
Converts test specifications into test cases
|
processTestSpecification
|
{
"repo_name": "google/polymorphicDSL",
"path": "src/main/java/com/pdsl/testcases/TestCaseFactory.java",
"license": "apache-2.0",
"size": 603
}
|
[
"com.pdsl.specifications.TestSpecification",
"java.util.Collection"
] |
import com.pdsl.specifications.TestSpecification; import java.util.Collection;
|
import com.pdsl.specifications.*; import java.util.*;
|
[
"com.pdsl.specifications",
"java.util"
] |
com.pdsl.specifications; java.util;
| 1,295,514 |
@Override
public Response apisPost(APIDetailedDTO body, String contentType){
URI createdApiUri;
APIDetailedDTO createdApiDTO;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String username = RestApiUtil.getLoggedInUsername();
boolean isWSAPI = APIDetailedDTO.TypeEnum.WS == body.getType();
boolean isSoapToRestConvertedApi = APIDetailedDTO.TypeEnum.SOAPTOREST == body.getType();
// validate web socket api endpoint configurations
if (isWSAPI) {
if (!RestApiPublisherUtils.isValidWSAPI(body)) {
RestApiUtil.handleBadRequest("Endpoint URLs should be valid web socket URLs", log);
}
} else {
if (body.getApiDefinition() == null) {
RestApiUtil.handleBadRequest("Parameter: \"apiDefinition\" cannot be null", log);
}
}
String apiSecurity = body.getApiSecurity();
if (!apiProvider.isClientCertificateBasedAuthenticationConfigured() && apiSecurity != null && apiSecurity
.contains(APIConstants.API_SECURITY_MUTUAL_SSL)) {
RestApiUtil.handleBadRequest("Mutual SSL Based authentication is not supported in this server", log);
}
if (body.getAccessControlRoles() != null) {
String errorMessage = RestApiPublisherUtils.validateUserRoles(body.getAccessControlRoles());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getAdditionalProperties() != null) {
String errorMessage = RestApiPublisherUtils
.validateAdditionalProperties(body.getAdditionalProperties());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getContext() == null) {
RestApiUtil.handleBadRequest("Parameter: \"context\" cannot be null", log);
} else if (body.getContext().endsWith("/")) {
RestApiUtil.handleBadRequest("Context cannot end with '/' character", log);
}
if (apiProvider.isApiNameWithDifferentCaseExist(body.getName())) {
RestApiUtil.handleBadRequest("Error occurred while adding API. API with name " + body.getName()
+ " already exists.", log);
}
//Get all existing versions of api been adding
List<String> apiVersions = apiProvider.getApiVersionsMatchingApiName(body.getName(), username);
if (apiVersions.size() > 0) {
//If any previous version exists
for (String version : apiVersions) {
if (version.equalsIgnoreCase(body.getVersion())) {
//If version already exists
if (apiProvider.isDuplicateContextTemplate(body.getContext())) {
RestApiUtil.handleResourceAlreadyExistsError("Error occurred while " +
"adding the API. A duplicate API already exists for "
+ body.getName() + "-" + body.getVersion(), log);
} else {
RestApiUtil.handleBadRequest("Error occurred while adding API. API with name " +
body.getName() + " already exists with different " +
"context", log);
}
}
}
} else {
//If no any previous version exists
if (apiProvider.isDuplicateContextTemplate(body.getContext())) {
RestApiUtil.handleBadRequest("Error occurred while adding the API. A duplicate API context " +
"already exists for " + body.getContext(), log);
}
}
//Check if the user has admin permission before applying a different provider than the current user
String provider = body.getProvider();
if (!StringUtils.isBlank(provider) && !provider.equals(username)) {
if (!APIUtil.hasPermission(username, APIConstants.Permissions.APIM_ADMIN)) {
if (log.isDebugEnabled()) {
log.debug("User " + username + " does not have admin permission ("
+ APIConstants.Permissions.APIM_ADMIN + ") hence provider (" +
provider + ") overridden with current user (" + username + ")");
}
provider = username;
} else {
if (!MultitenantUtils.getTenantDomain(username).equals(MultitenantUtils
.getTenantDomain(provider))) {
String errorMessage = "Error while adding new API : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + ". The tenant " +
"domain '" + MultitenantUtils.getTenantDomain(provider) + "' of provider '" + provider
+ "' is not compatible with admin's('" + username + "') tenant domain '" +
MultitenantUtils.getTenantDomain(username) + "'";
RestApiUtil.handleBadRequest(errorMessage, log);
} else {
//When tenant domain contains upper case characters, this will convert those to lowercase
provider = MultitenantUtils.getTenantAwareUsername(provider) + "@" +
MultitenantUtils.getTenantDomain(provider);
}
}
} else {
//Set username in case provider is null or empty
provider = username;
}
List<String> tiersFromDTO = body.getTiers();
//If tiers are not defined, the api should be a PROTOTYPED one,
if (!APIConstants.PROTOTYPED.equals(body.getStatus()) &&
(tiersFromDTO == null || tiersFromDTO.isEmpty())) {
RestApiUtil.handleBadRequest("No tier defined for the API", log);
}
//check whether the added API's tiers are all valid
Set<Tier> definedTiers = apiProvider.getTiers();
List<String> invalidTiers = RestApiUtil.getInvalidTierNames(definedTiers, tiersFromDTO);
if (invalidTiers.size() > 0) {
RestApiUtil.handleBadRequest(
"Specified tier(s) " + Arrays.toString(invalidTiers.toArray()) + " are invalid", log);
}
APIPolicy apiPolicy = apiProvider.getAPIPolicy(username, body.getApiLevelPolicy());
if (apiPolicy == null && body.getApiLevelPolicy() != null) {
RestApiUtil.handleBadRequest(
"Specified policy " + body.getApiLevelPolicy() + " is invalid", log);
}
if (isSoapToRestConvertedApi && StringUtils.isNotBlank(body.getWsdlUri())) {
String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(body.getWsdlUri());
body.setApiDefinition(swaggerStr);
}
API apiToAdd = APIMappingUtil.fromDTOtoAPI(body, provider);
//Overriding some properties:
//only allow CREATED as the stating state for the new api if not status is PROTOTYPED
if (!APIConstants.PROTOTYPED.equals(apiToAdd.getStatus())) {
apiToAdd.setStatus(APIConstants.CREATED);
}
//we are setting the api owner as the logged in user until we support checking admin privileges and assigning
// the owner as a different user
apiToAdd.setApiOwner(provider);
//attach micro-geteway labels
apiToAdd = assignLabelsToDTO(body,apiToAdd);
//adding the api
apiProvider.addAPI(apiToAdd);
if (isSoapToRestConvertedApi) {
if (StringUtils.isNotBlank(apiToAdd.getWsdlUrl())) {
String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(body.getWsdlUri());
apiProvider.saveSwagger20Definition(apiToAdd.getId(), swaggerStr);
SequenceGenerator.generateSequencesFromSwagger(swaggerStr, new Gson().toJson(body));
} else {
String errorMessage =
"Error while generating the swagger since the wsdl url is null for: " + body.getProvider()
+ "-" + body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, log);
}
} else if (!isWSAPI) {
apiProvider.saveSwagger20Definition(apiToAdd.getId(), body.getApiDefinition());
}
APIIdentifier createdApiId = apiToAdd.getId();
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (JSONException e) {
String errorMessage = "Error while validating endpoint configurations : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + "-" + body.getEndpointConfig();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
|
Response function(APIDetailedDTO body, String contentType){ URI createdApiUri; APIDetailedDTO createdApiDTO; try { APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider(); String username = RestApiUtil.getLoggedInUsername(); boolean isWSAPI = APIDetailedDTO.TypeEnum.WS == body.getType(); boolean isSoapToRestConvertedApi = APIDetailedDTO.TypeEnum.SOAPTOREST == body.getType(); if (isWSAPI) { if (!RestApiPublisherUtils.isValidWSAPI(body)) { RestApiUtil.handleBadRequest(STR, log); } } else { if (body.getApiDefinition() == null) { RestApiUtil.handleBadRequest(STRapiDefinition\STR, log); } } String apiSecurity = body.getApiSecurity(); if (!apiProvider.isClientCertificateBasedAuthenticationConfigured() && apiSecurity != null && apiSecurity .contains(APIConstants.API_SECURITY_MUTUAL_SSL)) { RestApiUtil.handleBadRequest(STR, log); } if (body.getAccessControlRoles() != null) { String errorMessage = RestApiPublisherUtils.validateUserRoles(body.getAccessControlRoles()); if (!errorMessage.isEmpty()) { RestApiUtil.handleBadRequest(errorMessage, log); } } if (body.getAdditionalProperties() != null) { String errorMessage = RestApiPublisherUtils .validateAdditionalProperties(body.getAdditionalProperties()); if (!errorMessage.isEmpty()) { RestApiUtil.handleBadRequest(errorMessage, log); } } if (body.getContext() == null) { RestApiUtil.handleBadRequest(STRcontext\STR, log); } else if (body.getContext().endsWith("/")) { RestApiUtil.handleBadRequest(STR, log); } if (apiProvider.isApiNameWithDifferentCaseExist(body.getName())) { RestApiUtil.handleBadRequest(STR + body.getName() + STR, log); } List<String> apiVersions = apiProvider.getApiVersionsMatchingApiName(body.getName(), username); if (apiVersions.size() > 0) { for (String version : apiVersions) { if (version.equalsIgnoreCase(body.getVersion())) { if (apiProvider.isDuplicateContextTemplate(body.getContext())) { RestApiUtil.handleResourceAlreadyExistsError(STR + STR + body.getName() + "-" + body.getVersion(), log); } else { RestApiUtil.handleBadRequest(STR + body.getName() + STR + STR, log); } } } } else { if (apiProvider.isDuplicateContextTemplate(body.getContext())) { RestApiUtil.handleBadRequest(STR + STR + body.getContext(), log); } } String provider = body.getProvider(); if (!StringUtils.isBlank(provider) && !provider.equals(username)) { if (!APIUtil.hasPermission(username, APIConstants.Permissions.APIM_ADMIN)) { if (log.isDebugEnabled()) { log.debug(STR + username + STR + APIConstants.Permissions.APIM_ADMIN + STR + provider + STR + username + ")"); } provider = username; } else { if (!MultitenantUtils.getTenantDomain(username).equals(MultitenantUtils .getTenantDomain(provider))) { String errorMessage = STR + body.getProvider() + "-" + body.getName() + "-" + body.getVersion() + STR + STR + MultitenantUtils.getTenantDomain(provider) + STR + provider + STR + username + STR + MultitenantUtils.getTenantDomain(username) + "'"; RestApiUtil.handleBadRequest(errorMessage, log); } else { provider = MultitenantUtils.getTenantAwareUsername(provider) + "@" + MultitenantUtils.getTenantDomain(provider); } } } else { provider = username; } List<String> tiersFromDTO = body.getTiers(); if (!APIConstants.PROTOTYPED.equals(body.getStatus()) && (tiersFromDTO == null tiersFromDTO.isEmpty())) { RestApiUtil.handleBadRequest(STR, log); } Set<Tier> definedTiers = apiProvider.getTiers(); List<String> invalidTiers = RestApiUtil.getInvalidTierNames(definedTiers, tiersFromDTO); if (invalidTiers.size() > 0) { RestApiUtil.handleBadRequest( STR + Arrays.toString(invalidTiers.toArray()) + STR, log); } APIPolicy apiPolicy = apiProvider.getAPIPolicy(username, body.getApiLevelPolicy()); if (apiPolicy == null && body.getApiLevelPolicy() != null) { RestApiUtil.handleBadRequest( STR + body.getApiLevelPolicy() + STR, log); } if (isSoapToRestConvertedApi && StringUtils.isNotBlank(body.getWsdlUri())) { String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(body.getWsdlUri()); body.setApiDefinition(swaggerStr); } API apiToAdd = APIMappingUtil.fromDTOtoAPI(body, provider); if (!APIConstants.PROTOTYPED.equals(apiToAdd.getStatus())) { apiToAdd.setStatus(APIConstants.CREATED); } apiToAdd.setApiOwner(provider); apiToAdd = assignLabelsToDTO(body,apiToAdd); apiProvider.addAPI(apiToAdd); if (isSoapToRestConvertedApi) { if (StringUtils.isNotBlank(apiToAdd.getWsdlUrl())) { String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(body.getWsdlUri()); apiProvider.saveSwagger20Definition(apiToAdd.getId(), swaggerStr); SequenceGenerator.generateSequencesFromSwagger(swaggerStr, new Gson().toJson(body)); } else { String errorMessage = STR + body.getProvider() + "-" + body.getName() + "-" + body.getVersion(); RestApiUtil.handleInternalServerError(errorMessage, log); } } else if (!isWSAPI) { apiProvider.saveSwagger20Definition(apiToAdd.getId(), body.getApiDefinition()); } APIIdentifier createdApiId = apiToAdd.getId(); API createdApi = apiProvider.getAPI(createdApiId); createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi); createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId()); return Response.created(createdApiUri).entity(createdApiDTO).build(); } catch (APIManagementException e) { String errorMessage = STR + body.getProvider() + "-" + body.getName() + "-" + body.getVersion() + STR + e.getMessage(); RestApiUtil.handleInternalServerError(errorMessage, e, log); } catch (URISyntaxException e) { String errorMessage = STR + body.getProvider() + "-" + body.getName() + "-" + body.getVersion(); RestApiUtil.handleInternalServerError(errorMessage, e, log); } catch (JSONException e) { String errorMessage = STR + body.getProvider() + "-" + body.getName() + "-" + body.getVersion() + "-" + body.getEndpointConfig(); RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; }
|
/**
* Create new API
*
* @param body DTO model of new API to be created
* @param contentType content type of the payload
* @return created API
*/
|
Create new API
|
apisPost
|
{
"repo_name": "pubudu538/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/impl/ApisApiServiceImpl.java",
"license": "apache-2.0",
"size": 106588
}
|
[
"com.google.gson.Gson",
"java.net.URISyntaxException",
"java.util.Arrays",
"java.util.List",
"java.util.Set",
"javax.ws.rs.core.Response",
"org.apache.commons.lang3.StringUtils",
"org.json.JSONException",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.APIProvider",
"org.wso2.carbon.apimgt.api.model.APIIdentifier",
"org.wso2.carbon.apimgt.api.model.Tier",
"org.wso2.carbon.apimgt.api.model.policy.APIPolicy",
"org.wso2.carbon.apimgt.impl.APIConstants",
"org.wso2.carbon.apimgt.impl.soaptorest.SequenceGenerator",
"org.wso2.carbon.apimgt.impl.soaptorest.util.SOAPOperationBindingUtils",
"org.wso2.carbon.apimgt.impl.utils.APIUtil",
"org.wso2.carbon.apimgt.rest.api.publisher.dto.APIDetailedDTO",
"org.wso2.carbon.apimgt.rest.api.publisher.utils.RestApiPublisherUtils",
"org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.APIMappingUtil",
"org.wso2.carbon.apimgt.rest.api.util.RestApiConstants",
"org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil",
"org.wso2.carbon.utils.multitenancy.MultitenantUtils"
] |
import com.google.gson.Gson; import java.net.URISyntaxException; import java.util.Arrays; import java.util.List; import java.util.Set; import javax.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.json.JSONException; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.soaptorest.SequenceGenerator; import org.wso2.carbon.apimgt.impl.soaptorest.util.SOAPOperationBindingUtils; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.rest.api.publisher.dto.APIDetailedDTO; import org.wso2.carbon.apimgt.rest.api.publisher.utils.RestApiPublisherUtils; import org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.APIMappingUtil; import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil; import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
|
import com.google.gson.*; import java.net.*; import java.util.*; import javax.ws.rs.core.*; import org.apache.commons.lang3.*; import org.json.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.api.model.policy.*; import org.wso2.carbon.apimgt.impl.*; import org.wso2.carbon.apimgt.impl.soaptorest.*; import org.wso2.carbon.apimgt.impl.soaptorest.util.*; import org.wso2.carbon.apimgt.impl.utils.*; import org.wso2.carbon.apimgt.rest.api.publisher.dto.*; import org.wso2.carbon.apimgt.rest.api.publisher.utils.*; import org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.*; import org.wso2.carbon.apimgt.rest.api.util.*; import org.wso2.carbon.apimgt.rest.api.util.utils.*; import org.wso2.carbon.utils.multitenancy.*;
|
[
"com.google.gson",
"java.net",
"java.util",
"javax.ws",
"org.apache.commons",
"org.json",
"org.wso2.carbon"
] |
com.google.gson; java.net; java.util; javax.ws; org.apache.commons; org.json; org.wso2.carbon;
| 685,284 |
private SimpleOrderedMap<List<NamedList<Object>>> processSingle(
List<String> pivotFields,
String refinements,
List<StatsField> statsFields,
final ParsedParams parsed,
List<FacetComponent.FacetBase> facetQueries,
List<RangeFacetRequest> facetRanges)
throws IOException {
SolrIndexSearcher searcher = rb.req.getSearcher();
SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<>();
String field = pivotFields.get(0);
SchemaField sfield = searcher.getSchema().getField(field);
Deque<String> fnames = new LinkedList<>();
for (int i = pivotFields.size() - 1; i > 1; i--) {
fnames.push(pivotFields.get(i));
}
NamedList<Integer> facetCounts;
Deque<String> vnames = new LinkedList<>();
if (null != refinements) {
// All values, split by the field they should go to
List<String> refinementValuesByField =
PivotFacetHelper.decodeRefinementValuePath(refinements);
for (int i = refinementValuesByField.size() - 1; i > 0; i--) {
vnames.push(refinementValuesByField.get(i)); // Only for [1] and on
}
String firstFieldsValues = refinementValuesByField.get(0);
facetCounts = new NamedList<>();
facetCounts.add(firstFieldsValues, getSubsetSize(parsed.docs, sfield, firstFieldsValues));
} else {
// no refinements needed
facetCounts = this.getTermCountsForPivots(field, parsed);
}
if (pivotFields.size() > 1) {
String subField = pivotFields.get(1);
pivotResponse.add(
parsed.key,
doPivots(
facetCounts,
field,
subField,
fnames,
vnames,
parsed,
statsFields,
facetQueries,
facetRanges));
} else {
pivotResponse.add(
parsed.key,
doPivots(
facetCounts,
field,
null,
fnames,
vnames,
parsed,
statsFields,
facetQueries,
facetRanges));
}
return pivotResponse;
}
|
SimpleOrderedMap<List<NamedList<Object>>> function( List<String> pivotFields, String refinements, List<StatsField> statsFields, final ParsedParams parsed, List<FacetComponent.FacetBase> facetQueries, List<RangeFacetRequest> facetRanges) throws IOException { SolrIndexSearcher searcher = rb.req.getSearcher(); SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<>(); String field = pivotFields.get(0); SchemaField sfield = searcher.getSchema().getField(field); Deque<String> fnames = new LinkedList<>(); for (int i = pivotFields.size() - 1; i > 1; i--) { fnames.push(pivotFields.get(i)); } NamedList<Integer> facetCounts; Deque<String> vnames = new LinkedList<>(); if (null != refinements) { List<String> refinementValuesByField = PivotFacetHelper.decodeRefinementValuePath(refinements); for (int i = refinementValuesByField.size() - 1; i > 0; i--) { vnames.push(refinementValuesByField.get(i)); } String firstFieldsValues = refinementValuesByField.get(0); facetCounts = new NamedList<>(); facetCounts.add(firstFieldsValues, getSubsetSize(parsed.docs, sfield, firstFieldsValues)); } else { facetCounts = this.getTermCountsForPivots(field, parsed); } if (pivotFields.size() > 1) { String subField = pivotFields.get(1); pivotResponse.add( parsed.key, doPivots( facetCounts, field, subField, fnames, vnames, parsed, statsFields, facetQueries, facetRanges)); } else { pivotResponse.add( parsed.key, doPivots( facetCounts, field, null, fnames, vnames, parsed, statsFields, facetQueries, facetRanges)); } return pivotResponse; }
|
/**
* Process a single branch of refinement values for a specific pivot
*
* @param pivotFields the ordered list of fields in this pivot
* @param refinements the comma separate list of refinement values corresponding to each field in
* the pivot, or null if there are no refinements
* @param statsFields List of {@link StatsField} instances to compute for each pivot value
* @param facetQueries the list of facet queries hung under this pivot
* @param facetRanges the list of facet ranges hung under this pivot
*/
|
Process a single branch of refinement values for a specific pivot
|
processSingle
|
{
"repo_name": "apache/solr",
"path": "solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java",
"license": "apache-2.0",
"size": 19747
}
|
[
"java.io.IOException",
"java.util.Deque",
"java.util.LinkedList",
"java.util.List",
"org.apache.solr.common.util.NamedList",
"org.apache.solr.common.util.SimpleOrderedMap",
"org.apache.solr.schema.SchemaField",
"org.apache.solr.search.SolrIndexSearcher"
] |
import java.io.IOException; import java.util.Deque; import java.util.LinkedList; import java.util.List; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.SolrIndexSearcher;
|
import java.io.*; import java.util.*; import org.apache.solr.common.util.*; import org.apache.solr.schema.*; import org.apache.solr.search.*;
|
[
"java.io",
"java.util",
"org.apache.solr"
] |
java.io; java.util; org.apache.solr;
| 2,551,292 |
public Locale getLocale()
{
return locale;
}
|
Locale function() { return locale; }
|
/**
* Retrieve the set locale.
*
* @return the locale
*/
|
Retrieve the set locale
|
getLocale
|
{
"repo_name": "kingkybel/utilities",
"path": "GUIComponents/src/com/kybelksties/gui/controls/DateChooserModel.java",
"license": "gpl-2.0",
"size": 14891
}
|
[
"java.util.Locale"
] |
import java.util.Locale;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,824,370 |
public Iterable<Entry<byte[], byte[]>> entries() {
return buffer.entrySet();
}
|
Iterable<Entry<byte[], byte[]>> function() { return buffer.entrySet(); }
|
/**
* Allows sorted iteration over the buffer contents.
* @return the buffer entries
*/
|
Allows sorted iteration over the buffer contents
|
entries
|
{
"repo_name": "sshcherbakov/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/persistence/soplog/SortedBuffer.java",
"license": "apache-2.0",
"size": 9298
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,630,214 |
@Override public T visitCompilationUnit(@NotNull ScalaParser.CompilationUnitContext ctx) { return visitChildren(ctx); }
|
@Override public T visitCompilationUnit(@NotNull ScalaParser.CompilationUnitContext ctx) { return visitChildren(ctx); }
|
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
|
The default implementation returns the result of calling <code>#visitChildren</code> on ctx
|
visitPrefixExpr
|
{
"repo_name": "IsThisThePayneResidence/intellidots",
"path": "src/main/java/ua/edu/hneu/ast/parsers/ScalaBaseVisitor.java",
"license": "gpl-3.0",
"size": 26845
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 1,020,827 |
public static Function1<IConstraint, Optional<IConstraint>> filter(Function1<IConstraint, Optional<IConstraint>> f,
boolean recurseInLogicalScopes) {
// @formatter:off
return cases(
c -> f.apply(c),
c -> {
final Optional<IConstraint> left = filter(f, recurseInLogicalScopes).apply(c.left());
final Optional<IConstraint> right = filter(f, recurseInLogicalScopes).apply(c.right());
return Optionals.lift(left, right, (l, r) -> new CConj(l, r, c.cause().orElse(null)));
},
c -> f.apply(c),
c -> {
final Optional<IConstraint> body = filter(f, recurseInLogicalScopes).apply(c.constraint());
return body.map(b -> new CExists(c.vars(), b, c.cause().orElse(null)));
},
c -> f.apply(c),
c -> f.apply(c),
c -> f.apply(c),
c -> f.apply(c),
c -> f.apply(c),
c -> f.apply(c),
c -> f.apply(c),
c -> f.apply(c),
c -> {
if(recurseInLogicalScopes) {
final Optional<IConstraint> body = filter(f, recurseInLogicalScopes).apply(c.constraint());
return body.map(b -> new CTry(b, c.cause().orElse(null), c.message().orElse(null)));
} else {
return Optional.of(c);
}
},
c -> f.apply(c)
);
// @formatter:on
}
|
static Function1<IConstraint, Optional<IConstraint>> function(Function1<IConstraint, Optional<IConstraint>> f, boolean recurseInLogicalScopes) { return cases( c -> f.apply(c), c -> { final Optional<IConstraint> left = filter(f, recurseInLogicalScopes).apply(c.left()); final Optional<IConstraint> right = filter(f, recurseInLogicalScopes).apply(c.right()); return Optionals.lift(left, right, (l, r) -> new CConj(l, r, c.cause().orElse(null))); }, c -> f.apply(c), c -> { final Optional<IConstraint> body = filter(f, recurseInLogicalScopes).apply(c.constraint()); return body.map(b -> new CExists(c.vars(), b, c.cause().orElse(null))); }, c -> f.apply(c), c -> f.apply(c), c -> f.apply(c), c -> f.apply(c), c -> f.apply(c), c -> f.apply(c), c -> f.apply(c), c -> f.apply(c), c -> { if(recurseInLogicalScopes) { final Optional<IConstraint> body = filter(f, recurseInLogicalScopes).apply(c.constraint()); return body.map(b -> new CTry(b, c.cause().orElse(null), c.message().orElse(null))); } else { return Optional.of(c); } }, c -> f.apply(c) ); }
|
/**
* In order transformation of the leaf constraints, fail if the given function fails on any of the leaves.
*/
|
In order transformation of the leaf constraints, fail if the given function fails on any of the leaves
|
filter
|
{
"repo_name": "metaborg/nabl",
"path": "statix.solver/src/main/java/mb/statix/constraints/Constraints.java",
"license": "apache-2.0",
"size": 27027
}
|
[
"java.util.Optional",
"org.metaborg.util.functions.Function1",
"org.metaborg.util.optionals.Optionals"
] |
import java.util.Optional; import org.metaborg.util.functions.Function1; import org.metaborg.util.optionals.Optionals;
|
import java.util.*; import org.metaborg.util.functions.*; import org.metaborg.util.optionals.*;
|
[
"java.util",
"org.metaborg.util"
] |
java.util; org.metaborg.util;
| 2,188,124 |
Map<String, String> getProperties(
ExecutionContext context,
ExecutionScript script) throws InterruptedException, IOException;
|
Map<String, String> getProperties( ExecutionContext context, ExecutionScript script) throws InterruptedException, IOException;
|
/**
* Returns desired system/hadoop properties to execute scripts using this handler.
* @param context the current execution context
* @param script the target script (nullable)
* @return desired system or hadoop properties
* @throws InterruptedException if this operation is interrupted
* @throws IOException if failed to setup the target environment
* @since 0.2.6
*/
|
Returns desired system/hadoop properties to execute scripts using this handler
|
getProperties
|
{
"repo_name": "akirakw/asakusafw",
"path": "yaess-project/asakusa-yaess-core/src/main/java/com/asakusafw/yaess/core/ExecutionScriptHandler.java",
"license": "apache-2.0",
"size": 5419
}
|
[
"java.io.IOException",
"java.util.Map"
] |
import java.io.IOException; import java.util.Map;
|
import java.io.*; import java.util.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 1,828,539 |
void removeStaleRecoveringRegions(final Set<ServerName> failedServers) throws IOException,
InterruptedIOException {
Set<String> knownFailedServers = new HashSet<String>();
if (failedServers != null) {
for (ServerName tmpServerName : failedServers) {
knownFailedServers.add(tmpServerName.getServerName());
}
}
this.recoveringRegionLock.lock();
try {
((BaseCoordinatedStateManager) server.getCoordinatedStateManager())
.getSplitLogManagerCoordination().removeStaleRecoveringRegions(knownFailedServers);
} finally {
this.recoveringRegionLock.unlock();
}
}
|
void removeStaleRecoveringRegions(final Set<ServerName> failedServers) throws IOException, InterruptedIOException { Set<String> knownFailedServers = new HashSet<String>(); if (failedServers != null) { for (ServerName tmpServerName : failedServers) { knownFailedServers.add(tmpServerName.getServerName()); } } this.recoveringRegionLock.lock(); try { ((BaseCoordinatedStateManager) server.getCoordinatedStateManager()) .getSplitLogManagerCoordination().removeStaleRecoveringRegions(knownFailedServers); } finally { this.recoveringRegionLock.unlock(); } }
|
/**
* It removes stale recovering regions under /hbase/recovering-regions/[encoded region name]
* during master initialization phase.
* @param failedServers A set of known failed servers
* @throws IOException
*/
|
It removes stale recovering regions under /hbase/recovering-regions/[encoded region name] during master initialization phase
|
removeStaleRecoveringRegions
|
{
"repo_name": "juwi/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java",
"license": "apache-2.0",
"size": 33433
}
|
[
"java.io.IOException",
"java.io.InterruptedIOException",
"java.util.HashSet",
"java.util.Set",
"org.apache.hadoop.hbase.ServerName",
"org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager"
] |
import java.io.IOException; import java.io.InterruptedIOException; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
|
import java.io.*; import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.coordination.*;
|
[
"java.io",
"java.util",
"org.apache.hadoop"
] |
java.io; java.util; org.apache.hadoop;
| 2,469,129 |
public static void loadJMeterProperties(String file) {
Properties p = new Properties(System.getProperties());
InputStream is = null;
try {
File f = new File(file);
is = new FileInputStream(f);
p.load(is);
} catch (IOException e) {
try {
is =
ClassLoader.getSystemResourceAsStream("org/apache/jmeter/jmeter.properties"); // $NON-NLS-1$
if (is == null) {
throw new RuntimeException("Could not read JMeter properties file:"+file);
}
p.load(is);
} catch (IOException ex) {
// JMeter.fail("Could not read internal resource. " +
// "Archive is broken.");
}
} finally {
JOrphanUtils.closeQuietly(is);
}
appProperties = p;
}
|
static void function(String file) { Properties p = new Properties(System.getProperties()); InputStream is = null; try { File f = new File(file); is = new FileInputStream(f); p.load(is); } catch (IOException e) { try { is = ClassLoader.getSystemResourceAsStream(STR); if (is == null) { throw new RuntimeException(STR+file); } p.load(is); } catch (IOException ex) { } } finally { JOrphanUtils.closeQuietly(is); } appProperties = p; }
|
/**
* Load the JMeter properties file; if not found, then
* default to "org/apache/jmeter/jmeter.properties" from the classpath
*
* <p>
* c.f. loadProperties
*
* @param file Name of the file from which the JMeter properties should be loaded
*/
|
Load the JMeter properties file; if not found, then default to "org/apache/jmeter/jmeter.properties" from the classpath c.f. loadProperties
|
loadJMeterProperties
|
{
"repo_name": "hizhangqi/jmeter-1",
"path": "src/core/org/apache/jmeter/util/JMeterUtils.java",
"license": "apache-2.0",
"size": 47312
}
|
[
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStream",
"java.util.Properties",
"org.apache.jorphan.util.JOrphanUtils"
] |
import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import org.apache.jorphan.util.JOrphanUtils;
|
import java.io.*; import java.util.*; import org.apache.jorphan.util.*;
|
[
"java.io",
"java.util",
"org.apache.jorphan"
] |
java.io; java.util; org.apache.jorphan;
| 2,679,018 |
protected void clearText(By by) throws InterruptedException {
driver.findElement(by).clear();
}
|
void function(By by) throws InterruptedException { driver.findElement(by).clear(); }
|
/**
* Clear the text written in an input field by xpath of an element
*
* @param by method used for finding the element
*/
|
Clear the text written in an input field by xpath of an element
|
clearText
|
{
"repo_name": "bhutchinson/rice",
"path": "rice-tools-test/src/main/java/org/kuali/rice/testtools/selenium/WebDriverITBase.java",
"license": "apache-2.0",
"size": 15648
}
|
[
"org.openqa.selenium.By"
] |
import org.openqa.selenium.By;
|
import org.openqa.selenium.*;
|
[
"org.openqa.selenium"
] |
org.openqa.selenium;
| 1,455,790 |
@Test
@Issue("JENKINS-2671")
public void simultaneousPollAndBuild() throws Exception {
FreeStyleProject p = j.createFreeStyleProject();
// used to coordinate polling and check out
final OneShotEvent checkoutStarted = new OneShotEvent();
p.setScm(new TestSCM(checkoutStarted));
Future<FreeStyleBuild> build = p.scheduleBuild2(0, new Cause.UserCause());
checkoutStarted.block();
assertFalse("SCM-poll after build has started should wait until that build finishes SCM-update", p.pollSCMChanges(StreamTaskListener.fromStdout()));
build.get(); // let mock build finish
}
private static class TestSCM extends NullSCM {
private volatile int myRev = 1;
private final OneShotEvent checkoutStarted;
public TestSCM(OneShotEvent checkoutStarted) {
this.checkoutStarted = checkoutStarted;
}
|
@Issue(STR) void function() throws Exception { FreeStyleProject p = j.createFreeStyleProject(); final OneShotEvent checkoutStarted = new OneShotEvent(); p.setScm(new TestSCM(checkoutStarted)); Future<FreeStyleBuild> build = p.scheduleBuild2(0, new Cause.UserCause()); checkoutStarted.block(); assertFalse(STR, p.pollSCMChanges(StreamTaskListener.fromStdout())); build.get(); } private static class TestSCM extends NullSCM { private volatile int myRev = 1; private final OneShotEvent checkoutStarted; public TestSCM(OneShotEvent checkoutStarted) { this.checkoutStarted = checkoutStarted; }
|
/**
* Make sure that SCMTrigger doesn't trigger another build when a build has just started,
* but not yet completed its SCM update.
*/
|
Make sure that SCMTrigger doesn't trigger another build when a build has just started, but not yet completed its SCM update
|
simultaneousPollAndBuild
|
{
"repo_name": "lindzh/jenkins",
"path": "test/src/test/java/hudson/triggers/SCMTriggerTest.java",
"license": "mit",
"size": 5721
}
|
[
"hudson.model.Cause",
"hudson.model.FreeStyleBuild",
"hudson.model.FreeStyleProject",
"hudson.scm.NullSCM",
"hudson.util.OneShotEvent",
"hudson.util.StreamTaskListener",
"java.util.concurrent.Future",
"org.junit.Assert",
"org.jvnet.hudson.test.Issue"
] |
import hudson.model.Cause; import hudson.model.FreeStyleBuild; import hudson.model.FreeStyleProject; import hudson.scm.NullSCM; import hudson.util.OneShotEvent; import hudson.util.StreamTaskListener; import java.util.concurrent.Future; import org.junit.Assert; import org.jvnet.hudson.test.Issue;
|
import hudson.model.*; import hudson.scm.*; import hudson.util.*; import java.util.concurrent.*; import org.junit.*; import org.jvnet.hudson.test.*;
|
[
"hudson.model",
"hudson.scm",
"hudson.util",
"java.util",
"org.junit",
"org.jvnet.hudson"
] |
hudson.model; hudson.scm; hudson.util; java.util; org.junit; org.jvnet.hudson;
| 486,783 |
void addType(JSType type, JSType relatedType) {
checkState(!skipRenaming, "Attempt to record skipped property: %s", name);
JSType top = getTypeWithProperty(this.name, type);
if (invalidatingTypes.isInvalidating(top)) {
invalidate();
return;
}
if (isTypeToSkip(top)) {
addTypeToSkip(top);
}
if (relatedType == null) {
getTypes().add(top);
} else {
getTypes().union(top, relatedType);
}
FunctionType constructor = getConstructor(type);
if (constructor != null && recordInterfacesCache.add(type)) {
recordInterfaces(constructor, top, this);
}
}
|
void addType(JSType type, JSType relatedType) { checkState(!skipRenaming, STR, name); JSType top = getTypeWithProperty(this.name, type); if (invalidatingTypes.isInvalidating(top)) { invalidate(); return; } if (isTypeToSkip(top)) { addTypeToSkip(top); } if (relatedType == null) { getTypes().add(top); } else { getTypes().union(top, relatedType); } FunctionType constructor = getConstructor(type); if (constructor != null && recordInterfacesCache.add(type)) { recordInterfaces(constructor, top, this); } }
|
/**
* Record that this property is referenced from this type.
*/
|
Record that this property is referenced from this type
|
addType
|
{
"repo_name": "tiobe/closure-compiler",
"path": "src/com/google/javascript/jscomp/DisambiguateProperties.java",
"license": "apache-2.0",
"size": 34882
}
|
[
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.jstype.FunctionType",
"com.google.javascript.rhino.jstype.JSType"
] |
import com.google.common.base.Preconditions; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType;
|
import com.google.common.base.*; import com.google.javascript.rhino.jstype.*;
|
[
"com.google.common",
"com.google.javascript"
] |
com.google.common; com.google.javascript;
| 476,621 |
private static org.jdom.Element toJdom(final Element e) {
return new DOMBuilder().build(e);
}
|
static org.jdom.Element function(final Element e) { return new DOMBuilder().build(e); }
|
/**
* Convert to a jdom element.
*
* @param e the e
* @return the element
*/
|
Convert to a jdom element
|
toJdom
|
{
"repo_name": "Unicon/cas",
"path": "support/cas-server-support-saml/src/main/java/org/apereo/cas/support/saml/util/AbstractSamlObjectBuilder.java",
"license": "apache-2.0",
"size": 15941
}
|
[
"org.jdom.input.DOMBuilder",
"org.w3c.dom.Element"
] |
import org.jdom.input.DOMBuilder; import org.w3c.dom.Element;
|
import org.jdom.input.*; import org.w3c.dom.*;
|
[
"org.jdom.input",
"org.w3c.dom"
] |
org.jdom.input; org.w3c.dom;
| 2,912,751 |
interface WithDelegations {
WithCreate withDelegations(List<Delegation> delegations);
}
|
interface WithDelegations { WithCreate withDelegations(List<Delegation> delegations); }
|
/**
* Specifies delegations.
* @param delegations An array of references to the delegations on the subnet
* @return the next definition stage
*/
|
Specifies delegations
|
withDelegations
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/network/v2019_11_01/Subnet.java",
"license": "mit",
"size": 16679
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 760,827 |
void setAnnotationLayers(List<AnnotationLayer> aAnnotationLayers);
|
void setAnnotationLayers(List<AnnotationLayer> aAnnotationLayers);
|
/**
* Set the annotation layers which are usable by the annotator (i.e. enabled, visible according
* to the user preferences , etc.)
*
* @param aAnnotationLayers
* usable layers
*/
|
Set the annotation layers which are usable by the annotator (i.e. enabled, visible according to the user preferences , etc.)
|
setAnnotationLayers
|
{
"repo_name": "webanno/webanno",
"path": "webanno-api-annotation/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/annotation/model/AnnotatorState.java",
"license": "apache-2.0",
"size": 8551
}
|
[
"de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer",
"java.util.List"
] |
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer; import java.util.List;
|
import de.tudarmstadt.ukp.clarin.webanno.model.*; import java.util.*;
|
[
"de.tudarmstadt.ukp",
"java.util"
] |
de.tudarmstadt.ukp; java.util;
| 1,400,777 |
public static void move(IProgressMonitor monitor, File src, File dst) throws IOException {
IFileSystem fs = EFS.getLocalFileSystem();
IFileStore from = fs.fromLocalFile(src);
IFileStore to = fs.fromLocalFile(dst);
try {
from.move(to, EFS.OVERWRITE, monitor);
} catch (CoreException e) {
LogUtil.log(e.getStatus());
throw new IOException(MessageFormat.format(
Messages.IoUtils_errorFailedToMoveFile,
src, dst));
}
}
|
static void function(IProgressMonitor monitor, File src, File dst) throws IOException { IFileSystem fs = EFS.getLocalFileSystem(); IFileStore from = fs.fromLocalFile(src); IFileStore to = fs.fromLocalFile(dst); try { from.move(to, EFS.OVERWRITE, monitor); } catch (CoreException e) { LogUtil.log(e.getStatus()); throw new IOException(MessageFormat.format( Messages.IoUtils_errorFailedToMoveFile, src, dst)); } }
|
/**
* Moves a file or folder.
* @param monitor the current progress monitor
* @param src the source file or folder
* @param dst the target file or folder
* @throws IOException if the operation was failed
*/
|
Moves a file or folder
|
move
|
{
"repo_name": "asakusafw/asakusafw-shafu",
"path": "com.asakusafw.shafu.core/src/com/asakusafw/shafu/core/util/IoUtils.java",
"license": "apache-2.0",
"size": 12832
}
|
[
"com.asakusafw.shafu.internal.core.LogUtil",
"java.io.File",
"java.io.IOException",
"java.text.MessageFormat",
"org.eclipse.core.filesystem.EFS",
"org.eclipse.core.filesystem.IFileStore",
"org.eclipse.core.filesystem.IFileSystem",
"org.eclipse.core.runtime.CoreException",
"org.eclipse.core.runtime.IProgressMonitor"
] |
import com.asakusafw.shafu.internal.core.LogUtil; import java.io.File; import java.io.IOException; import java.text.MessageFormat; import org.eclipse.core.filesystem.EFS; import org.eclipse.core.filesystem.IFileStore; import org.eclipse.core.filesystem.IFileSystem; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor;
|
import com.asakusafw.shafu.internal.core.*; import java.io.*; import java.text.*; import org.eclipse.core.filesystem.*; import org.eclipse.core.runtime.*;
|
[
"com.asakusafw.shafu",
"java.io",
"java.text",
"org.eclipse.core"
] |
com.asakusafw.shafu; java.io; java.text; org.eclipse.core;
| 2,168,055 |
boolean canBeSynchronized(LOTLInfo listOfTrustedList);
|
boolean canBeSynchronized(LOTLInfo listOfTrustedList);
|
/**
* Returns true if the certificates from the list of trusted lists and its
* trusted list can be synchronized
*
* @param listOfTrustedList
* the list of trusted lists to be tested
* @return true if the list of trusted lists can be synchronized
*/
|
Returns true if the certificates from the list of trusted lists and its trusted list can be synchronized
|
canBeSynchronized
|
{
"repo_name": "openlimit-signcubes/dss",
"path": "dss-tsl-validation/src/main/java/eu/europa/esig/dss/tsl/sync/SynchronizationStrategy.java",
"license": "lgpl-2.1",
"size": 1738
}
|
[
"eu.europa.esig.dss.spi.tsl.LOTLInfo"
] |
import eu.europa.esig.dss.spi.tsl.LOTLInfo;
|
import eu.europa.esig.dss.spi.tsl.*;
|
[
"eu.europa.esig"
] |
eu.europa.esig;
| 890,968 |
switch (type) {
case VAR_INTS:
case FIXED_INTS_8:
case FIXED_INTS_16:
case FIXED_INTS_32:
case FIXED_INTS_64:
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new Lng(groupSort, topNGroups, groupField, diskResident, type);
case FLOAT_32:
case FLOAT_64:
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new Dbl(groupSort, topNGroups, groupField, diskResident, type);
case BYTES_FIXED_STRAIGHT:
case BYTES_FIXED_DEREF:
case BYTES_VAR_STRAIGHT:
case BYTES_VAR_DEREF:
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new BR(groupSort, topNGroups, groupField, diskResident, type);
case BYTES_VAR_SORTED:
case BYTES_FIXED_SORTED:
// Type erasure b/c otherwise we have inconvertible types...
return (DVFirstPassGroupingCollector) new SortedBR(groupSort, topNGroups, groupField, diskResident, type);
default:
throw new IllegalArgumentException(String.format(Locale.ROOT, "ValueType %s not supported", type));
}
}
DVFirstPassGroupingCollector(Sort groupSort, int topNGroups, String groupField, boolean diskResident, DocValues.Type valueType) throws IOException {
super(groupSort, topNGroups);
this.groupField = groupField;
this.diskResident = diskResident;
this.valueType = valueType;
}
|
switch (type) { case VAR_INTS: case FIXED_INTS_8: case FIXED_INTS_16: case FIXED_INTS_32: case FIXED_INTS_64: return (DVFirstPassGroupingCollector) new Lng(groupSort, topNGroups, groupField, diskResident, type); case FLOAT_32: case FLOAT_64: return (DVFirstPassGroupingCollector) new Dbl(groupSort, topNGroups, groupField, diskResident, type); case BYTES_FIXED_STRAIGHT: case BYTES_FIXED_DEREF: case BYTES_VAR_STRAIGHT: case BYTES_VAR_DEREF: return (DVFirstPassGroupingCollector) new BR(groupSort, topNGroups, groupField, diskResident, type); case BYTES_VAR_SORTED: case BYTES_FIXED_SORTED: return (DVFirstPassGroupingCollector) new SortedBR(groupSort, topNGroups, groupField, diskResident, type); default: throw new IllegalArgumentException(String.format(Locale.ROOT, STR, type)); } } DVFirstPassGroupingCollector(Sort groupSort, int topNGroups, String groupField, boolean diskResident, DocValues.Type valueType) throws IOException { super(groupSort, topNGroups); this.groupField = groupField; this.diskResident = diskResident; this.valueType = valueType; }
|
/**
* Constructs a {@link DVFirstPassGroupingCollector}.
* Selects and constructs the most optimal first pass collector implementation for grouping by {@link DocValues}.
*
* @param groupField The field to group by
* @param topNGroups The maximum top number of groups to return. Typically this equals to offset + rows.
* @param diskResident Whether the values to group by should be disk resident
* @param type The {@link Type} which is used to select a concrete implementation.
* @param groupSort The sort used for the groups
* @return the most optimal first pass collector implementation for grouping by {@link DocValues}
* @throws IOException If I/O related errors occur
*/
|
Constructs a <code>DVFirstPassGroupingCollector</code>. Selects and constructs the most optimal first pass collector implementation for grouping by <code>DocValues</code>
|
create
|
{
"repo_name": "terrancesnyder/solr-analytics",
"path": "lucene/grouping/src/java/org/apache/lucene/search/grouping/dv/DVFirstPassGroupingCollector.java",
"license": "apache-2.0",
"size": 8292
}
|
[
"java.io.IOException",
"java.util.Locale",
"org.apache.lucene.index.DocValues",
"org.apache.lucene.search.Sort"
] |
import java.io.IOException; import java.util.Locale; import org.apache.lucene.index.DocValues; import org.apache.lucene.search.Sort;
|
import java.io.*; import java.util.*; import org.apache.lucene.index.*; import org.apache.lucene.search.*;
|
[
"java.io",
"java.util",
"org.apache.lucene"
] |
java.io; java.util; org.apache.lucene;
| 953,878 |
private void insertData() {
PodamFactory factory = new PodamFactoryImpl();
for (int i = 0; i < 3; i++) {
ClientEntity entity = factory.manufacturePojo(ClientEntity.class);
em.persist(entity);
data.add(entity);
}
}
|
void function() { PodamFactory factory = new PodamFactoryImpl(); for (int i = 0; i < 3; i++) { ClientEntity entity = factory.manufacturePojo(ClientEntity.class); em.persist(entity); data.add(entity); } }
|
/**
* Inserta los datos iniciales para el correcto funcionamiento de las pruebas.
*
* @generated
*/
|
Inserta los datos iniciales para el correcto funcionamiento de las pruebas
|
insertData
|
{
"repo_name": "Uniandes-MISO4203/artwork-201620-1",
"path": "artwork-logic/src/test/java/co/edu/uniandes/csw/artwork/test/persistence/ClientPersistenceTest.java",
"license": "mit",
"size": 7198
}
|
[
"co.edu.uniandes.csw.artwork.entities.ClientEntity",
"uk.co.jemos.podam.api.PodamFactory",
"uk.co.jemos.podam.api.PodamFactoryImpl"
] |
import co.edu.uniandes.csw.artwork.entities.ClientEntity; import uk.co.jemos.podam.api.PodamFactory; import uk.co.jemos.podam.api.PodamFactoryImpl;
|
import co.edu.uniandes.csw.artwork.entities.*; import uk.co.jemos.podam.api.*;
|
[
"co.edu.uniandes",
"uk.co.jemos"
] |
co.edu.uniandes; uk.co.jemos;
| 1,784,877 |
public Collection<String> getAllLoggerNames() {
Set<String> loggerNames = new TreeSet<>();
for (Logger logger : getParentLoggers()) {
loggerNames.add(logger.getName());
}
for (Logger logger : getLoggers()) {
loggerNames.add(logger.getName());
}
if (!customLoggers.isEmpty()) {
for (Entry<LoggerConfig, String> entry : customLoggers.entrySet()) {
loggerNames.add(entry.getKey().getName());
}
}
if (!customParentLoggers.isEmpty()) {
for (Entry<LoggerConfig, String> entry : customParentLoggers.entrySet()) {
loggerNames.add(entry.getKey().getName());
}
}
return loggerNames;
}
|
Collection<String> function() { Set<String> loggerNames = new TreeSet<>(); for (Logger logger : getParentLoggers()) { loggerNames.add(logger.getName()); } for (Logger logger : getLoggers()) { loggerNames.add(logger.getName()); } if (!customLoggers.isEmpty()) { for (Entry<LoggerConfig, String> entry : customLoggers.entrySet()) { loggerNames.add(entry.getKey().getName()); } } if (!customParentLoggers.isEmpty()) { for (Entry<LoggerConfig, String> entry : customParentLoggers.entrySet()) { loggerNames.add(entry.getKey().getName()); } } return loggerNames; }
|
/**
* returns all logger names including custom loggers
*
* @since 1.1.1
* @return
*/
|
returns all logger names including custom loggers
|
getAllLoggerNames
|
{
"repo_name": "andrehertwig/admintool",
"path": "admin-tools-log4j2/src/main/java/de/chandre/admintool/log4j2/AdminToolLog4j2Util.java",
"license": "mit",
"size": 18425
}
|
[
"java.util.Collection",
"java.util.Map",
"java.util.Set",
"java.util.TreeSet",
"org.apache.logging.log4j.core.Logger",
"org.apache.logging.log4j.core.config.LoggerConfig"
] |
import java.util.Collection; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.apache.logging.log4j.core.Logger; import org.apache.logging.log4j.core.config.LoggerConfig;
|
import java.util.*; import org.apache.logging.log4j.core.*; import org.apache.logging.log4j.core.config.*;
|
[
"java.util",
"org.apache.logging"
] |
java.util; org.apache.logging;
| 1,140,856 |
@Override
public boolean addAll(Collection<? extends T> paramCollection) {
boolean result = false;
if (paramCollection.size() > 4) {
result = super.addAll(paramCollection);
Collections.sort(this, comparator);
}
else {
for (T paramT:paramCollection) {
result |= add(paramT);
}
}
return result;
}
|
boolean function(Collection<? extends T> paramCollection) { boolean result = false; if (paramCollection.size() > 4) { result = super.addAll(paramCollection); Collections.sort(this, comparator); } else { for (T paramT:paramCollection) { result = add(paramT); } } return result; }
|
/**
* Adds all elements in the specified collection to the list. Each element
* will be inserted at the correct position to keep the list sorted.
*
* @param paramCollection
*/
|
Adds all elements in the specified collection to the list. Each element will be inserted at the correct position to keep the list sorted
|
addAll
|
{
"repo_name": "Limeth/CustomItemLibrary",
"path": "src/main/java/cz/creeper/customitemlibrary/util/SortedList.java",
"license": "mit",
"size": 3201
}
|
[
"java.util.Collection",
"java.util.Collections"
] |
import java.util.Collection; import java.util.Collections;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 347,563 |
List<Ptg> temp = new ArrayList<Ptg>(4 + size / 2);
int pos = 0;
boolean hasArrayPtgs = false;
while (pos < size) {
Ptg ptg = Ptg.createPtg(in);
if (ptg instanceof ArrayPtg.Initial) {
hasArrayPtgs = true;
}
pos += ptg.getSize();
temp.add(ptg);
}
if(pos != size) {
throw new RuntimeException("Ptg array size mismatch");
}
if (hasArrayPtgs) {
Ptg[] result = toPtgArray(temp);
for (int i=0;i<result.length;i++) {
if (result[i] instanceof ArrayPtg.Initial) {
result[i] = ((ArrayPtg.Initial) result[i]).finishReading(in);
}
}
return result;
}
return toPtgArray(temp);
}
|
List<Ptg> temp = new ArrayList<Ptg>(4 + size / 2); int pos = 0; boolean hasArrayPtgs = false; while (pos < size) { Ptg ptg = Ptg.createPtg(in); if (ptg instanceof ArrayPtg.Initial) { hasArrayPtgs = true; } pos += ptg.getSize(); temp.add(ptg); } if(pos != size) { throw new RuntimeException(STR); } if (hasArrayPtgs) { Ptg[] result = toPtgArray(temp); for (int i=0;i<result.length;i++) { if (result[i] instanceof ArrayPtg.Initial) { result[i] = ((ArrayPtg.Initial) result[i]).finishReading(in); } } return result; } return toPtgArray(temp); }
|
/**
* Reads <tt>size</tt> bytes of the input stream, to create an array of <tt>Ptg</tt>s.
* Extra data (beyond <tt>size</tt>) may be read if and <tt>ArrayPtg</tt>s are present.
*/
|
Reads size bytes of the input stream, to create an array of Ptgs. Extra data (beyond size) may be read if and ArrayPtgs are present
|
readTokens
|
{
"repo_name": "tobyclemson/msci-project",
"path": "vendor/poi-3.6/src/java/org/apache/poi/hssf/record/formula/Ptg.java",
"license": "mit",
"size": 11240
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,080,417 |
public Object oneTransition (QName elementName, int[] currentState, SubstitutionGroupHandler subGroupHandler) {
// error state
if (currentState[0] < 0) {
currentState[0] = XSCMValidator.SUBSEQUENT_ERROR;
return findMatchingDecl(elementName, subGroupHandler);
}
// seen child
currentState[0] = STATE_CHILD;
Object matchingDecl = null;
for (int i = 0; i < fNumElements; i++) {
// we only try to look for a matching decl if we have not seen
// this element yet.
if (currentState[i+1] != STATE_START)
continue;
matchingDecl = subGroupHandler.getMatchingElemDecl(elementName, fAllElements[i]);
if (matchingDecl != null) {
// found the decl, mark this element as "seen".
currentState[i+1] = STATE_VALID;
return matchingDecl;
}
}
// couldn't find the decl, change to error state.
currentState[0] = XSCMValidator.FIRST_ERROR;
return findMatchingDecl(elementName, subGroupHandler);
}
|
Object function (QName elementName, int[] currentState, SubstitutionGroupHandler subGroupHandler) { if (currentState[0] < 0) { currentState[0] = XSCMValidator.SUBSEQUENT_ERROR; return findMatchingDecl(elementName, subGroupHandler); } currentState[0] = STATE_CHILD; Object matchingDecl = null; for (int i = 0; i < fNumElements; i++) { if (currentState[i+1] != STATE_START) continue; matchingDecl = subGroupHandler.getMatchingElemDecl(elementName, fAllElements[i]); if (matchingDecl != null) { currentState[i+1] = STATE_VALID; return matchingDecl; } } currentState[0] = XSCMValidator.FIRST_ERROR; return findMatchingDecl(elementName, subGroupHandler); }
|
/**
* The method corresponds to one transition in the content model.
*
* @param elementName
* @param currentState Current state
* @return an element decl object
*/
|
The method corresponds to one transition in the content model
|
oneTransition
|
{
"repo_name": "BIORIMP/biorimp",
"path": "BIO-RIMP/test_data/code/xerces/src/org/apache/xerces/impl/xs/models/XSAllCM.java",
"license": "gpl-2.0",
"size": 7033
}
|
[
"org.apache.xerces.impl.xs.SubstitutionGroupHandler",
"org.apache.xerces.xni.QName"
] |
import org.apache.xerces.impl.xs.SubstitutionGroupHandler; import org.apache.xerces.xni.QName;
|
import org.apache.xerces.impl.xs.*; import org.apache.xerces.xni.*;
|
[
"org.apache.xerces"
] |
org.apache.xerces;
| 1,880,396 |
@SuppressWarnings("unchecked")
public <T extends Serializable> T stringToObject(String str, Class<T> clazz) {
byte[] bytes = Base64.decodeBase64(str.getBytes());
T object = null;
try {
ObjectInputStream objectInputStream = new ObjectInputStream(new ByteArrayInputStream(bytes));
object = (T)objectInputStream.readObject();
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (ClassCastException e) {
e.printStackTrace();
}
return object;
}
|
@SuppressWarnings(STR) <T extends Serializable> T function(String str, Class<T> clazz) { byte[] bytes = Base64.decodeBase64(str.getBytes()); T object = null; try { ObjectInputStream objectInputStream = new ObjectInputStream(new ByteArrayInputStream(bytes)); object = (T)objectInputStream.readObject(); } catch (IOException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (ClassCastException e) { e.printStackTrace(); } return object; }
|
/**
* Convert String to Object
*
* @param str
* @param clazz
* @return T
*/
|
Convert String to Object
|
stringToObject
|
{
"repo_name": "ankitbaderiya/code-samples",
"path": "ETRAMInquiryControl.java",
"license": "mit",
"size": 96362
}
|
[
"java.io.ByteArrayInputStream",
"java.io.IOException",
"java.io.ObjectInputStream",
"java.io.Serializable",
"org.apache.commons.codec.binary.Base64"
] |
import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import org.apache.commons.codec.binary.Base64;
|
import java.io.*; import org.apache.commons.codec.binary.*;
|
[
"java.io",
"org.apache.commons"
] |
java.io; org.apache.commons;
| 1,429,245 |
boolean afterInsert(SharedSessionContractImplementor session, Object key, Object value, Object version);
|
boolean afterInsert(SharedSessionContractImplementor session, Object key, Object value, Object version);
|
/**
* Called afterQuery an item has been inserted (afterQuery the transaction completes),
* instead of calling release().
* This method is used by "asynchronous" concurrency strategies.
*
* @param session Current session
* @param key The item key
* @param value The item
* @param version The item's version value
* @return Were the contents of the cache actual changed by this operation?
* @throws CacheException Propagated from underlying cache provider
*/
|
Called afterQuery an item has been inserted (afterQuery the transaction completes), instead of calling release(). This method is used by "asynchronous" concurrency strategies
|
afterInsert
|
{
"repo_name": "lamsfoundation/lams",
"path": "3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/EntityDataAccess.java",
"license": "gpl-2.0",
"size": 5004
}
|
[
"org.hibernate.engine.spi.SharedSessionContractImplementor"
] |
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
import org.hibernate.engine.spi.*;
|
[
"org.hibernate.engine"
] |
org.hibernate.engine;
| 1,470,082 |
void doFinalize(StorageDirectory sd) throws IOException {
File prevDir = sd.getPreviousDir();
if (!prevDir.exists())
return; // already discarded
final String dataDirPath = sd.getRoot().getCanonicalPath();
LOG.info("Finalizing upgrade for storage directory "
+ dataDirPath
+ ".\n cur LV = " + this.getLayoutVersion()
+ "; cur CTime = " + this.getCTime());
assert sd.getCurrentDir().exists() : "Current directory must exist.";
final File tmpDir = sd.getFinalizedTmp();//finalized.tmp directory
final File bbwDir = new File(sd.getRoot(), Storage.STORAGE_1_BBW);
// 1. rename previous to finalized.tmp
rename(prevDir, tmpDir);
|
void doFinalize(StorageDirectory sd) throws IOException { File prevDir = sd.getPreviousDir(); if (!prevDir.exists()) return; final String dataDirPath = sd.getRoot().getCanonicalPath(); LOG.info(STR + dataDirPath + STR + this.getLayoutVersion() + STR + this.getCTime()); assert sd.getCurrentDir().exists() : STR; final File tmpDir = sd.getFinalizedTmp(); final File bbwDir = new File(sd.getRoot(), Storage.STORAGE_1_BBW); rename(prevDir, tmpDir);
|
/**
* Finalize procedure deletes an existing snapshot.
* <ol>
* <li>Rename previous to finalized.tmp directory</li>
* <li>Fully delete the finalized.tmp directory</li>
* </ol>
*
* Do nothing, if previous directory does not exist
*/
|
Finalize procedure deletes an existing snapshot. Rename previous to finalized.tmp directory Fully delete the finalized.tmp directory Do nothing, if previous directory does not exist
|
doFinalize
|
{
"repo_name": "jaypatil/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java",
"license": "gpl-3.0",
"size": 51183
}
|
[
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.hdfs.server.common.Storage"
] |
import java.io.File; import java.io.IOException; import org.apache.hadoop.hdfs.server.common.Storage;
|
import java.io.*; import org.apache.hadoop.hdfs.server.common.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 541,792 |
public ActionListener[] getActionListeners ()
{
return (ActionListener[]) getListeners (ActionListener.class);
}
|
ActionListener[] function () { return (ActionListener[]) getListeners (ActionListener.class); }
|
/**
* Return all ActionListeners register to this <code>TextField</code> object
* as an array.
*
* @since 1.4
*/
|
Return all ActionListeners register to this <code>TextField</code> object as an array
|
getActionListeners
|
{
"repo_name": "shaotuanchen/sunflower_exp",
"path": "tools/source/gcc-4.2.4/libjava/classpath/java/awt/TextField.java",
"license": "bsd-3-clause",
"size": 12914
}
|
[
"java.awt.event.ActionListener"
] |
import java.awt.event.ActionListener;
|
import java.awt.event.*;
|
[
"java.awt"
] |
java.awt;
| 923,230 |
public void warn(Throwable throwable, String msg) {
logIfEnabled(Level.WARNING, throwable, msg, UNKNOWN_ARG, UNKNOWN_ARG, UNKNOWN_ARG, null);
}
|
void function(Throwable throwable, String msg) { logIfEnabled(Level.WARNING, throwable, msg, UNKNOWN_ARG, UNKNOWN_ARG, UNKNOWN_ARG, null); }
|
/**
* Log a warning message with a throwable.
*/
|
Log a warning message with a throwable
|
warn
|
{
"repo_name": "yswang0927/simplemagic",
"path": "src/main/java/com/j256/simplemagic/logger/Logger.java",
"license": "isc",
"size": 17390
}
|
[
"com.j256.simplemagic.logger.Log"
] |
import com.j256.simplemagic.logger.Log;
|
import com.j256.simplemagic.logger.*;
|
[
"com.j256.simplemagic"
] |
com.j256.simplemagic;
| 1,787,144 |
public List<ParameterDescription> getParametersDescription(String checkerId)
{
if (checkerId == null)
{
return new ArrayList<ParameterDescription>(getParametersDescriptionAsMap().values());
}
else
{
CheckerExtension checkerExt = getCheckerExtension(checkerId);
if (checkerExt != null)
{
return checkerExt.getChecker().getParameterDescriptions();
}
else
{
return null;
}
}
}
/**
* @return Map from {@link ParameterDescription#getName()} to {@link ParameterDescription}
|
List<ParameterDescription> function(String checkerId) { if (checkerId == null) { return new ArrayList<ParameterDescription>(getParametersDescriptionAsMap().values()); } else { CheckerExtension checkerExt = getCheckerExtension(checkerId); if (checkerExt != null) { return checkerExt.getChecker().getParameterDescriptions(); } else { return null; } } } /** * @return Map from {@link ParameterDescription#getName()} to {@link ParameterDescription}
|
/**
* Get the description for the parameters of the given checker.
* @param checkerId
* @return List of parameter descriptions.
*/
|
Get the description for the parameters of the given checker
|
getParametersDescription
|
{
"repo_name": "rex-xxx/mt6572_x201",
"path": "tools/motodev/src/plugins/preflighting.core/src/com/motorolamobility/preflighting/core/validation/ValidationManager.java",
"license": "gpl-2.0",
"size": 84748
}
|
[
"com.motorolamobility.preflighting.core.checker.CheckerExtension",
"java.util.ArrayList",
"java.util.List",
"java.util.Map"
] |
import com.motorolamobility.preflighting.core.checker.CheckerExtension; import java.util.ArrayList; import java.util.List; import java.util.Map;
|
import com.motorolamobility.preflighting.core.checker.*; import java.util.*;
|
[
"com.motorolamobility.preflighting",
"java.util"
] |
com.motorolamobility.preflighting; java.util;
| 2,753,762 |
private long pendingVideoMessages() {
OOBControlMessage pendingRequest = new OOBControlMessage();
pendingRequest.setTarget("ConnectionConsumer");
pendingRequest.setServiceName("pendingVideoCount");
msgOut.sendOOBControlMessage(this, pendingRequest);
if (pendingRequest.getResult() != null) {
return (Long) pendingRequest.getResult();
} else {
return 0;
}
}
|
long function() { OOBControlMessage pendingRequest = new OOBControlMessage(); pendingRequest.setTarget(STR); pendingRequest.setServiceName(STR); msgOut.sendOOBControlMessage(this, pendingRequest); if (pendingRequest.getResult() != null) { return (Long) pendingRequest.getResult(); } else { return 0; } }
|
/**
* Get number of pending video messages
* @return Number of pending video messages
*/
|
Get number of pending video messages
|
pendingVideoMessages
|
{
"repo_name": "OpenCorrelate/red5load",
"path": "red5/src/main/java/org/red5/server/stream/PlayEngine.java",
"license": "lgpl-3.0",
"size": 47873
}
|
[
"org.red5.server.messaging.OOBControlMessage"
] |
import org.red5.server.messaging.OOBControlMessage;
|
import org.red5.server.messaging.*;
|
[
"org.red5.server"
] |
org.red5.server;
| 1,813,468 |
public void setRange(Date lower, Date upper) {
if (lower.getTime() >= upper.getTime()) {
throw new IllegalArgumentException("Requires 'lower' < 'upper'.");
}
setRange(new DateRange(lower, upper));
}
|
void function(Date lower, Date upper) { if (lower.getTime() >= upper.getTime()) { throw new IllegalArgumentException(STR); } setRange(new DateRange(lower, upper)); }
|
/**
* Sets the axis range and sends an {@link AxisChangeEvent} to all
* registered listeners.
*
* @param lower the lower bound for the axis.
* @param upper the upper bound for the axis.
*/
|
Sets the axis range and sends an <code>AxisChangeEvent</code> to all registered listeners
|
setRange
|
{
"repo_name": "integrated/jfreechart",
"path": "source/org/jfree/chart/axis/DateAxis.java",
"license": "lgpl-2.1",
"size": 74495
}
|
[
"java.util.Date",
"org.jfree.data.time.DateRange"
] |
import java.util.Date; import org.jfree.data.time.DateRange;
|
import java.util.*; import org.jfree.data.time.*;
|
[
"java.util",
"org.jfree.data"
] |
java.util; org.jfree.data;
| 1,184,358 |
public ConfigParams getPlatformConfig() {
PlatformConfig advertisement = (PlatformConfig) AdvertisementFactory.newAdvertisement(
PlatformConfig.getAdvertisementType());
advertisement.setName(name);
advertisement.setDescription(description);
if (tcpConfig != null) {
boolean enabled = tcpEnabled && (tcpConfig.isServerEnabled() || tcpConfig.isClientEnabled());
advertisement.putServiceParam(PeerGroup.tcpProtoClassID, getParmDoc(enabled, tcpConfig));
}
if (multicastConfig != null) {
boolean enabled = multicastConfig.getMulticastState();
advertisement.putServiceParam(PeerGroup.multicastProtoClassID, getParmDoc(enabled, multicastConfig));
}
if (httpConfig != null) {
boolean enabled = httpEnabled && (httpConfig.isServerEnabled() || httpConfig.isClientEnabled());
advertisement.putServiceParam(PeerGroup.httpProtoClassID, getParmDoc(enabled, httpConfig));
}
if (http2Config != null) {
boolean enabled = http2Enabled && (http2Config.isServerEnabled() || http2Config.isClientEnabled());
advertisement.putServiceParam(PeerGroup.http2ProtoClassID, getParmDoc(enabled, http2Config));
}
if (relayConfig != null) {
boolean isOff = ((mode & RELAY_OFF) == RELAY_OFF) || (relayConfig.isServerEnabled() && relayConfig.isClientEnabled());
XMLDocument relayDoc = (XMLDocument) relayConfig.getDocument(MimeMediaType.XMLUTF8);
if (isOff) {
relayDoc.appendChild(relayDoc.createElement("isOff"));
}
advertisement.putServiceParam(PeerGroup.relayProtoClassID, relayDoc);
}
if (rdvConfig != null) {
XMLDocument rdvDoc = (XMLDocument) rdvConfig.getDocument(MimeMediaType.XMLUTF8);
advertisement.putServiceParam(PeerGroup.rendezvousClassID, rdvDoc);
}
if (principal == null) {
principal = System.getProperty("impl.membership.pse.authentication.principal", "JxtaCN");
}
if (password == null) {
password = System.getProperty("impl.membership.pse.authentication.password", "the!one!password");
}
if (cert != null) {
pseConf = createPSEAdv(cert);
} else {
pseConf = createPSEAdv(principal, password);
cert = pseConf.getCertificateChain();
}
if (pseConf != null) {
if (keyStoreLocation != null) {
if (keyStoreLocation.isAbsolute()) {
pseConf.setKeyStoreLocation(keyStoreLocation);
} else {
Logging.logCheckedWarning(LOG, "Keystore location set, but is not absolute: ", keyStoreLocation);
}
}
XMLDocument pseDoc = (XMLDocument) pseConf.getDocument(MimeMediaType.XMLUTF8);
advertisement.putServiceParam(PeerGroup.membershipClassID, pseDoc);
}
if (authenticationType == null) {
authenticationType = System.getProperty("impl.membership.pse.authentication.type", "StringAuthentication");
}
StdPeerGroup.setPSEMembershipServiceKeystoreInfoFactory(new StdPeerGroup.DefaultPSEMembershipServiceKeystoreInfoFactory(authenticationType, password));
if (peerid == null) {
peerid = IDFactory.newPeerID(PeerGroupID.worldPeerGroupID, cert[0].getPublicKey().getEncoded());
}
advertisement.setPeerID(peerid);
// if (proxyConfig != null && ((mode & PROXY_SERVER) == PROXY_SERVER)) {
// advertisement.putServiceParam(PeerGroup.proxyClassID, proxyConfig);
// }
if ((null != infraPeerGroupConfig) && (null != infraPeerGroupConfig.getPeerGroupID())
&& (ID.nullID != infraPeerGroupConfig.getPeerGroupID())
&& (PeerGroupID.defaultNetPeerGroupID != infraPeerGroupConfig.getPeerGroupID())) {
advertisement.setSvcConfigAdvertisement(PeerGroup.peerGroupClassID, infraPeerGroupConfig);
}
return advertisement;
}
|
ConfigParams function() { PlatformConfig advertisement = (PlatformConfig) AdvertisementFactory.newAdvertisement( PlatformConfig.getAdvertisementType()); advertisement.setName(name); advertisement.setDescription(description); if (tcpConfig != null) { boolean enabled = tcpEnabled && (tcpConfig.isServerEnabled() tcpConfig.isClientEnabled()); advertisement.putServiceParam(PeerGroup.tcpProtoClassID, getParmDoc(enabled, tcpConfig)); } if (multicastConfig != null) { boolean enabled = multicastConfig.getMulticastState(); advertisement.putServiceParam(PeerGroup.multicastProtoClassID, getParmDoc(enabled, multicastConfig)); } if (httpConfig != null) { boolean enabled = httpEnabled && (httpConfig.isServerEnabled() httpConfig.isClientEnabled()); advertisement.putServiceParam(PeerGroup.httpProtoClassID, getParmDoc(enabled, httpConfig)); } if (http2Config != null) { boolean enabled = http2Enabled && (http2Config.isServerEnabled() http2Config.isClientEnabled()); advertisement.putServiceParam(PeerGroup.http2ProtoClassID, getParmDoc(enabled, http2Config)); } if (relayConfig != null) { boolean isOff = ((mode & RELAY_OFF) == RELAY_OFF) (relayConfig.isServerEnabled() && relayConfig.isClientEnabled()); XMLDocument relayDoc = (XMLDocument) relayConfig.getDocument(MimeMediaType.XMLUTF8); if (isOff) { relayDoc.appendChild(relayDoc.createElement("isOff")); } advertisement.putServiceParam(PeerGroup.relayProtoClassID, relayDoc); } if (rdvConfig != null) { XMLDocument rdvDoc = (XMLDocument) rdvConfig.getDocument(MimeMediaType.XMLUTF8); advertisement.putServiceParam(PeerGroup.rendezvousClassID, rdvDoc); } if (principal == null) { principal = System.getProperty(STR, STR); } if (password == null) { password = System.getProperty(STR, STR); } if (cert != null) { pseConf = createPSEAdv(cert); } else { pseConf = createPSEAdv(principal, password); cert = pseConf.getCertificateChain(); } if (pseConf != null) { if (keyStoreLocation != null) { if (keyStoreLocation.isAbsolute()) { pseConf.setKeyStoreLocation(keyStoreLocation); } else { Logging.logCheckedWarning(LOG, STR, keyStoreLocation); } } XMLDocument pseDoc = (XMLDocument) pseConf.getDocument(MimeMediaType.XMLUTF8); advertisement.putServiceParam(PeerGroup.membershipClassID, pseDoc); } if (authenticationType == null) { authenticationType = System.getProperty(STR, STR); } StdPeerGroup.setPSEMembershipServiceKeystoreInfoFactory(new StdPeerGroup.DefaultPSEMembershipServiceKeystoreInfoFactory(authenticationType, password)); if (peerid == null) { peerid = IDFactory.newPeerID(PeerGroupID.worldPeerGroupID, cert[0].getPublicKey().getEncoded()); } advertisement.setPeerID(peerid); if ((null != infraPeerGroupConfig) && (null != infraPeerGroupConfig.getPeerGroupID()) && (ID.nullID != infraPeerGroupConfig.getPeerGroupID()) && (PeerGroupID.defaultNetPeerGroupID != infraPeerGroupConfig.getPeerGroupID())) { advertisement.setSvcConfigAdvertisement(PeerGroup.peerGroupClassID, infraPeerGroupConfig); } return advertisement; }
|
/**
* Returns a PlatformConfig which represents a platform configuration.
* <p/>Fine tuning is achieved through accessing each configured advertisement
* and achieved through accessing each configured advertisement and modifying
* each object directly.
*
* @return the PeerPlatformConfig Advertisement
*/
|
Returns a PlatformConfig which represents a platform configuration. Fine tuning is achieved through accessing each configured advertisement and achieved through accessing each configured advertisement and modifying each object directly
|
getPlatformConfig
|
{
"repo_name": "johnjianfang/jxse",
"path": "src/main/java/net/jxta/platform/NetworkConfigurator.java",
"license": "apache-2.0",
"size": 82829
}
|
[
"net.jxta.document.AdvertisementFactory",
"net.jxta.document.MimeMediaType",
"net.jxta.document.XMLDocument",
"net.jxta.id.IDFactory",
"net.jxta.impl.peergroup.StdPeerGroup",
"net.jxta.impl.protocol.PlatformConfig",
"net.jxta.logging.Logging",
"net.jxta.peergroup.PeerGroup",
"net.jxta.peergroup.PeerGroupID",
"net.jxta.protocol.ConfigParams"
] |
import net.jxta.document.AdvertisementFactory; import net.jxta.document.MimeMediaType; import net.jxta.document.XMLDocument; import net.jxta.id.IDFactory; import net.jxta.impl.peergroup.StdPeerGroup; import net.jxta.impl.protocol.PlatformConfig; import net.jxta.logging.Logging; import net.jxta.peergroup.PeerGroup; import net.jxta.peergroup.PeerGroupID; import net.jxta.protocol.ConfigParams;
|
import net.jxta.document.*; import net.jxta.id.*; import net.jxta.impl.peergroup.*; import net.jxta.impl.protocol.*; import net.jxta.logging.*; import net.jxta.peergroup.*; import net.jxta.protocol.*;
|
[
"net.jxta.document",
"net.jxta.id",
"net.jxta.impl",
"net.jxta.logging",
"net.jxta.peergroup",
"net.jxta.protocol"
] |
net.jxta.document; net.jxta.id; net.jxta.impl; net.jxta.logging; net.jxta.peergroup; net.jxta.protocol;
| 1,284,819 |
protected BufferedImage getBufferedImageDXT1(final ByteBuffer bb, final boolean alpha) {
final int[] pixels = new int[16];
final BufferedImage result =
new BufferedImage(this.width, this.height, BufferedImage.TYPE_INT_ARGB_PRE);
final int numTilesWide = this.width / 4;
final int numTilesHigh = this.height / 4;
for (int i = 0; i < numTilesHigh; i++) {
for (int j = 0; j < numTilesWide; j++) {
final short c0 = bb.getShort();
final short c1 = bb.getShort();
int uC0 = c0;
int uC1 = c1;
if (uC0 < 0) {
uC0 = 65536 + c0;
}
if (uC1 < 0) {
uC1 = 65536 + c1;
}
final Color[] lookupTable = expandLookupTableDXT1(c0, c1, alpha);
final int colorData = bb.getInt();
for (int k = pixels.length - 1; k >= 0; k--) {
final int colorCode = (colorData >>> (k * 2)) & 0x03;
int alphaValue = 255;
if (alpha && (colorCode == 3) && (uC0 < uC1)) {
alphaValue = 0;
}
pixels[k] = (alphaValue << 24) | getPixel888(lookupTable[colorCode]);
}
result.setRGB(j * 4, i * 4, 4, 4, pixels, 0, 4);
}
}
return result;
}
|
BufferedImage function(final ByteBuffer bb, final boolean alpha) { final int[] pixels = new int[16]; final BufferedImage result = new BufferedImage(this.width, this.height, BufferedImage.TYPE_INT_ARGB_PRE); final int numTilesWide = this.width / 4; final int numTilesHigh = this.height / 4; for (int i = 0; i < numTilesHigh; i++) { for (int j = 0; j < numTilesWide; j++) { final short c0 = bb.getShort(); final short c1 = bb.getShort(); int uC0 = c0; int uC1 = c1; if (uC0 < 0) { uC0 = 65536 + c0; } if (uC1 < 0) { uC1 = 65536 + c1; } final Color[] lookupTable = expandLookupTableDXT1(c0, c1, alpha); final int colorData = bb.getInt(); for (int k = pixels.length - 1; k >= 0; k--) { final int colorCode = (colorData >>> (k * 2)) & 0x03; int alphaValue = 255; if (alpha && (colorCode == 3) && (uC0 < uC1)) { alphaValue = 0; } pixels[k] = (alphaValue << 24) getPixel888(lookupTable[colorCode]); } result.setRGB(j * 4, i * 4, 4, 4, pixels, 0, 4); } } return result; }
|
/**
* DXT1 doesn't look right. Alpha is odd (INV_Misc_Bag_09_Blue.blp)
*
* @param bb
* @param alpha
* @return
*/
|
DXT1 doesn't look right. Alpha is odd (INV_Misc_Bag_09_Blue.blp)
|
getBufferedImageDXT1
|
{
"repo_name": "JMaNGOS/JMaNGOS",
"path": "Tools/src/main/java/org/jmangos/tools/blp/BLP.java",
"license": "gpl-2.0",
"size": 20219
}
|
[
"java.awt.image.BufferedImage",
"java.nio.ByteBuffer"
] |
import java.awt.image.BufferedImage; import java.nio.ByteBuffer;
|
import java.awt.image.*; import java.nio.*;
|
[
"java.awt",
"java.nio"
] |
java.awt; java.nio;
| 118,544 |
private static boolean compareArrays(Object array1, Object array2, List<String> path, Deque<DualKey> toCompare,
Set<DualKey> visited) {
int len = Array.getLength(array1);
if (len != Array.getLength(array2)) {
return false;
}
for (int i = 0; i < len; i++) {
DualKey dk = new DualKey(path, Array.get(array1, i), Array.get(array2, i));
if (!visited.contains(dk)) {
toCompare.addFirst(dk);
}
}
return true;
}
|
static boolean function(Object array1, Object array2, List<String> path, Deque<DualKey> toCompare, Set<DualKey> visited) { int len = Array.getLength(array1); if (len != Array.getLength(array2)) { return false; } for (int i = 0; i < len; i++) { DualKey dk = new DualKey(path, Array.get(array1, i), Array.get(array2, i)); if (!visited.contains(dk)) { toCompare.addFirst(dk); } } return true; }
|
/**
* Deeply compare to Arrays []. Both arrays must be of the same type, same
* length, and all elements within the arrays must be deeply equal in order
* to return true.
*
* @param array1 [] type (Object[], String[], etc.)
* @param array2 [] type (Object[], String[], etc.)
* @param path the path to the arrays to compare
* @param toCompare add items to compare to the Stack (Stack versus recursion)
* @param visited Set of objects already compared (prevents cycles)
* @return true if the two arrays are the same length and contain deeply
* equivalent items.
*/
|
Deeply compare to Arrays []. Both arrays must be of the same type, same length, and all elements within the arrays must be deeply equal in order to return true
|
compareArrays
|
{
"repo_name": "xasx/assertj-core",
"path": "src/main/java/org/assertj/core/internal/DeepDifference.java",
"license": "apache-2.0",
"size": 28959
}
|
[
"java.lang.reflect.Array",
"java.util.Deque",
"java.util.List",
"java.util.Set"
] |
import java.lang.reflect.Array; import java.util.Deque; import java.util.List; import java.util.Set;
|
import java.lang.reflect.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 1,568,100 |
void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
byte[] qualifier, Long timestamp, Object inputValue,
Attribute... attributes) throws IOException;
|
void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator, byte[] qualifier, Long timestamp, Object inputValue, Attribute... attributes) throws IOException;
|
/**
* Sends a Mutation to the table. The mutations will be buffered and sent over
* the wire as part of a batch.
*
* @param rowKey identifying the row to write. Nothing gets written when null.
* @param tableMutator used to modify the underlying HBase table. Caller is
* responsible to pass a mutator for the table that actually has this
* column.
* @param qualifier column qualifier. Nothing gets written when null.
* @param timestamp version timestamp. When null the server timestamp will be
* used.
* @param attributes attributes for the mutation that are used by the
* coprocessor to set/read the cell tags.
* @param inputValue the value to write to the rowKey and column qualifier.
* Nothing gets written when null.
* @throws IOException if there is any exception encountered while doing
* store operation(sending mutation to the table).
*/
|
Sends a Mutation to the table. The mutations will be buffered and sent over the wire as part of a batch
|
store
|
{
"repo_name": "jaypatil/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java",
"license": "gpl-3.0",
"size": 6397
}
|
[
"java.io.IOException",
"org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute"
] |
import java.io.IOException; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
|
import java.io.*; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,201,598 |
private void __renderDocument( OutputStream out ) throws Exception
{
HttpServletRequest request = (HttpServletRequest) pageContext
.getRequest( );
// Get document file path
String documentFile = ParameterAccessor.getReportDocument( request,
viewer.getReportDocument( ), false );
IReportDocument doc = ReportEngineService.getInstance( )
.openReportDocument( null, documentFile,
BirtTagUtil.getModuleOptions( viewer ) );
try
{
String realReportletId = viewer.getReportletId( );
if ( realReportletId == null )
{
if ( viewer.getBookmark( ) != null
&& "true".equalsIgnoreCase( viewer.getIsReportlet( ) ) ) //$NON-NLS-1$
{
realReportletId = viewer.getBookmark( );
}
}
if ( realReportletId != null )
{
// Render the reportlet
ReportEngineService.getInstance( ).renderReportlet( out, doc,
this.options, realReportletId, null );
}
else
{
// Render the report document file
ReportEngineService.getInstance( ).renderReport( out, doc,
viewer.getPageNum( ), viewer.getPageRange( ),
this.options, null );
}
}
finally
{
if ( doc != null )
doc.close( );
}
}
|
void function( OutputStream out ) throws Exception { HttpServletRequest request = (HttpServletRequest) pageContext .getRequest( ); String documentFile = ParameterAccessor.getReportDocument( request, viewer.getReportDocument( ), false ); IReportDocument doc = ReportEngineService.getInstance( ) .openReportDocument( null, documentFile, BirtTagUtil.getModuleOptions( viewer ) ); try { String realReportletId = viewer.getReportletId( ); if ( realReportletId == null ) { if ( viewer.getBookmark( ) != null && "true".equalsIgnoreCase( viewer.getIsReportlet( ) ) ) { realReportletId = viewer.getBookmark( ); } } if ( realReportletId != null ) { ReportEngineService.getInstance( ).renderReportlet( out, doc, this.options, realReportletId, null ); } else { ReportEngineService.getInstance( ).renderReport( out, doc, viewer.getPageNum( ), viewer.getPageRange( ), this.options, null ); } } finally { if ( doc != null ) doc.close( ); } }
|
/**
* Render context from document file
*
* @param out
* @throws Exception
*/
|
Render context from document file
|
__renderDocument
|
{
"repo_name": "sguan-actuate/birt",
"path": "viewer/org.eclipse.birt.report.viewer/birt/WEB-INF/classes/org/eclipse/birt/report/taglib/ReportTag.java",
"license": "epl-1.0",
"size": 20389
}
|
[
"java.io.OutputStream",
"javax.servlet.http.HttpServletRequest",
"org.eclipse.birt.report.engine.api.IReportDocument",
"org.eclipse.birt.report.service.ReportEngineService",
"org.eclipse.birt.report.taglib.util.BirtTagUtil",
"org.eclipse.birt.report.utility.ParameterAccessor"
] |
import java.io.OutputStream; import javax.servlet.http.HttpServletRequest; import org.eclipse.birt.report.engine.api.IReportDocument; import org.eclipse.birt.report.service.ReportEngineService; import org.eclipse.birt.report.taglib.util.BirtTagUtil; import org.eclipse.birt.report.utility.ParameterAccessor;
|
import java.io.*; import javax.servlet.http.*; import org.eclipse.birt.report.engine.api.*; import org.eclipse.birt.report.service.*; import org.eclipse.birt.report.taglib.util.*; import org.eclipse.birt.report.utility.*;
|
[
"java.io",
"javax.servlet",
"org.eclipse.birt"
] |
java.io; javax.servlet; org.eclipse.birt;
| 2,107,216 |
protected boolean isEligibleAspectBean(String beanName) {
if (this.includePatterns == null) {
return true;
}
else {
for (Pattern pattern : this.includePatterns) {
if (pattern.matcher(beanName).matches()) {
return true;
}
}
return false;
}
}
private class BeanFactoryAspectJAdvisorsBuilderAdapter extends BeanFactoryAspectJAdvisorsBuilder {
public BeanFactoryAspectJAdvisorsBuilderAdapter(
ListableBeanFactory beanFactory, AspectJAdvisorFactory advisorFactory) {
super(beanFactory, advisorFactory);
}
|
boolean function(String beanName) { if (this.includePatterns == null) { return true; } else { for (Pattern pattern : this.includePatterns) { if (pattern.matcher(beanName).matches()) { return true; } } return false; } } private class BeanFactoryAspectJAdvisorsBuilderAdapter extends BeanFactoryAspectJAdvisorsBuilder { public BeanFactoryAspectJAdvisorsBuilderAdapter( ListableBeanFactory beanFactory, AspectJAdvisorFactory advisorFactory) { super(beanFactory, advisorFactory); }
|
/**
* Check whether the given aspect bean is eligible for auto-proxying.
* <p>If no <aop:include> elements were used then "includePatterns" will be
* <code>null</code> and all beans are included. If "includePatterns" is non-null,
* then one of the patterns must match.
*/
|
Check whether the given aspect bean is eligible for auto-proxying. If no <aop:include> elements were used then "includePatterns" will be <code>null</code> and all beans are included. If "includePatterns" is non-null, then one of the patterns must match
|
isEligibleAspectBean
|
{
"repo_name": "cbeams-archive/spring-framework-2.5.x",
"path": "tiger/src/org/springframework/aop/aspectj/annotation/AnnotationAwareAspectJAutoProxyCreator.java",
"license": "apache-2.0",
"size": 5309
}
|
[
"java.util.regex.Pattern",
"org.springframework.beans.factory.ListableBeanFactory"
] |
import java.util.regex.Pattern; import org.springframework.beans.factory.ListableBeanFactory;
|
import java.util.regex.*; import org.springframework.beans.factory.*;
|
[
"java.util",
"org.springframework.beans"
] |
java.util; org.springframework.beans;
| 1,398,627 |
public static void setHideOfflineContacts(ContentResolver contentResolver,
long providerId, boolean hideOfflineContacts) {
putBooleanValue(contentResolver, providerId, HIDE_OFFLINE_CONTACTS,
hideOfflineContacts);
}
|
static void function(ContentResolver contentResolver, long providerId, boolean hideOfflineContacts) { putBooleanValue(contentResolver, providerId, HIDE_OFFLINE_CONTACTS, hideOfflineContacts); }
|
/**
* A convenience method to set whether or not the offline contacts should be hided
*
* @param contentResolver The ContentResolver to use to access the setting table
* @param hideOfflineContacts Whether the offline contacts should be hided
*/
|
A convenience method to set whether or not the offline contacts should be hided
|
setHideOfflineContacts
|
{
"repo_name": "joechen2010/Gibberbot",
"path": "src/info/guardianproject/otr/app/im/provider/Imps.java",
"license": "apache-2.0",
"size": 92941
}
|
[
"android.content.ContentResolver"
] |
import android.content.ContentResolver;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 2,783,472 |
public CustomProcessorBolt config (Map<String, Object> config) {
this.config = config;
return this;
}
|
CustomProcessorBolt function (Map<String, Object> config) { this.config = config; return this; }
|
/**
* Associate config as a Map of String to Object
* @param config
* @return
*/
|
Associate config as a Map of String to Object
|
config
|
{
"repo_name": "hortonworks/streamline",
"path": "streams/runners/storm/runtime/src/main/java/com/hortonworks/streamline/streams/runtime/storm/bolt/CustomProcessorBolt.java",
"license": "apache-2.0",
"size": 11517
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,000,790 |
public void toPNG(ElementDataset dataset, File directory,
ElementDataImageConfiguration configuration,
ProgressHandler progressHandler) throws IOException;
|
void function(ElementDataset dataset, File directory, ElementDataImageConfiguration configuration, ProgressHandler progressHandler) throws IOException;
|
/**
* Writes all the {@code ElementData} into PNG files at {@code directory}.
*
* @param dataset the dataset to export.
* @param directory the directory to save the data.
* @param configuration the image configuration.
* @param progressHandler the callback object to notify progress.
* @throws IOException if an error occurs during the operation.
*/
|
Writes all the ElementData into PNG files at directory
|
toPNG
|
{
"repo_name": "sing-group/la-images",
"path": "la-images-gui/src/main/java/es/uvigo/ei/sing/laimages/gui/export/ElementDatasetToPngExporter.java",
"license": "gpl-3.0",
"size": 2692
}
|
[
"es.uvigo.ei.sing.laimages.core.entities.datasets.ElementDataset",
"es.uvigo.ei.sing.laimages.core.util.ProgressHandler",
"es.uvigo.ei.sing.laimages.gui.jzy3d.ElementDataImageConfiguration",
"java.io.File",
"java.io.IOException"
] |
import es.uvigo.ei.sing.laimages.core.entities.datasets.ElementDataset; import es.uvigo.ei.sing.laimages.core.util.ProgressHandler; import es.uvigo.ei.sing.laimages.gui.jzy3d.ElementDataImageConfiguration; import java.io.File; import java.io.IOException;
|
import es.uvigo.ei.sing.laimages.core.entities.datasets.*; import es.uvigo.ei.sing.laimages.core.util.*; import es.uvigo.ei.sing.laimages.gui.jzy3d.*; import java.io.*;
|
[
"es.uvigo.ei",
"java.io"
] |
es.uvigo.ei; java.io;
| 1,406,453 |
if (firstArg.isVariable()) {
if ((firstArg.toString().charAt(0) >= 'A') && (firstArg.toString().charAt(0) <= 'Z')) {
if (((ISymbol) firstArg).hasOrderlessAttribute()) {
return F.NIL;
}
if (firstArg.equals(S.Print)) {
// Print function has "side-effects"
return F.NIL;
}
// probably a built-in function
return firstArg;
}
if (Config.SERVER_MODE && (firstArg.toString().charAt(0) == '$')) {
// a user-modifiable variable in server mode is not allowed
return F.NIL;
}
} else {
// not a symbol
return F.NIL;
}
// a variable which could be replaced by a slot:
IExpr result = fMap.get(firstArg);
if (result == null) {
result = Slot(F.ZZ(++fSlotCounter));
fMap.put(firstArg, result);
fVariableList.add(firstArg);
}
return result != null ? result : F.NIL;
}
|
if (firstArg.isVariable()) { if ((firstArg.toString().charAt(0) >= 'A') && (firstArg.toString().charAt(0) <= 'Z')) { if (((ISymbol) firstArg).hasOrderlessAttribute()) { return F.NIL; } if (firstArg.equals(S.Print)) { return F.NIL; } return firstArg; } if (Config.SERVER_MODE && (firstArg.toString().charAt(0) == '$')) { return F.NIL; } } else { return F.NIL; } IExpr result = fMap.get(firstArg); if (result == null) { result = Slot(F.ZZ(++fSlotCounter)); fMap.put(firstArg, result); fVariableList.add(firstArg); } return result != null ? result : F.NIL; }
|
/**
* For every given argument return the associated unique slot from the internal Map
*
* @return <code>F.NIL</code>
*/
|
For every given argument return the associated unique slot from the internal Map
|
apply
|
{
"repo_name": "axkr/symja_android_library",
"path": "symja_android_library/matheclipse-core/src/main/java/org/matheclipse/core/generic/UnaryVariable2Slot.java",
"license": "gpl-3.0",
"size": 1890
}
|
[
"org.matheclipse.core.basic.Config",
"org.matheclipse.core.expression.F",
"org.matheclipse.core.interfaces.IExpr",
"org.matheclipse.core.interfaces.ISymbol"
] |
import org.matheclipse.core.basic.Config; import org.matheclipse.core.expression.F; import org.matheclipse.core.interfaces.IExpr; import org.matheclipse.core.interfaces.ISymbol;
|
import org.matheclipse.core.basic.*; import org.matheclipse.core.expression.*; import org.matheclipse.core.interfaces.*;
|
[
"org.matheclipse.core"
] |
org.matheclipse.core;
| 2,194,035 |
public void afterRegionLoss(SystemMemberRegionEvent event) {
adminDS.handleRegionLossEvent(event);
}
|
void function(SystemMemberRegionEvent event) { adminDS.handleRegionLossEvent(event); }
|
/**
* See SystemMemberCacheListener#afterRegionLoss(SystemMemberCacheEvent)
*/
|
See SystemMemberCacheListener#afterRegionLoss(SystemMemberCacheEvent)
|
afterRegionLoss
|
{
"repo_name": "robertgeiger/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/admin/jmx/internal/AdminDistributedSystemJmxImpl.java",
"license": "apache-2.0",
"size": 85628
}
|
[
"com.gemstone.gemfire.admin.SystemMemberRegionEvent"
] |
import com.gemstone.gemfire.admin.SystemMemberRegionEvent;
|
import com.gemstone.gemfire.admin.*;
|
[
"com.gemstone.gemfire"
] |
com.gemstone.gemfire;
| 2,298,295 |
private static void respond(Request request, List<LocationSuggestion> suggestions, Callback callback) {
callback.onSuggestionsReady(request, new Response(suggestions));
}
|
static void function(Request request, List<LocationSuggestion> suggestions, Callback callback) { callback.onSuggestionsReady(request, new Response(suggestions)); }
|
/**
* Executes the suggestions callback.<p>
*
* @param request the suggestions request
* @param suggestions the suggestions
* @param callback the callback
*/
|
Executes the suggestions callback
|
respond
|
{
"repo_name": "victos/opencms-core",
"path": "src-gwt/org/opencms/gwt/client/ui/input/location/CmsLocationSuggestOracle.java",
"license": "lgpl-2.1",
"size": 8251
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,216,441 |
public CounterLocalService getCounterLocalService() {
return counterLocalService;
}
|
CounterLocalService function() { return counterLocalService; }
|
/**
* Returns the counter local service.
*
* @return the counter local service
*/
|
Returns the counter local service
|
getCounterLocalService
|
{
"repo_name": "fraunhoferfokus/govapps",
"path": "data-portlet/src/main/java/de/fraunhofer/fokus/movepla/service/base/CategoryLocalServiceBaseImpl.java",
"license": "bsd-3-clause",
"size": 42077
}
|
[
"com.liferay.counter.service.CounterLocalService"
] |
import com.liferay.counter.service.CounterLocalService;
|
import com.liferay.counter.service.*;
|
[
"com.liferay.counter"
] |
com.liferay.counter;
| 2,022,533 |
PropertyRecord getPropertyFromCache( long id );
|
PropertyRecord getPropertyFromCache( long id );
|
/**
* Gets a cached {@link PropertyRecord} of a specific {@code id}, see {@link #putPropertiesToCache(Collection)}.
*
* @param id the property record id to look for.
* @return cached {@link PropertyRecord} {@link PropertyRecord#getId() id}, or {@code null} if not found.
*/
|
Gets a cached <code>PropertyRecord</code> of a specific id, see <code>#putPropertiesToCache(Collection)</code>
|
getPropertyFromCache
|
{
"repo_name": "HuangLS/neo4j",
"path": "community/consistency-check/src/main/java/org/neo4j/consistency/checking/cache/CacheAccess.java",
"license": "apache-2.0",
"size": 7713
}
|
[
"org.neo4j.kernel.impl.store.record.PropertyRecord"
] |
import org.neo4j.kernel.impl.store.record.PropertyRecord;
|
import org.neo4j.kernel.impl.store.record.*;
|
[
"org.neo4j.kernel"
] |
org.neo4j.kernel;
| 559,859 |
@WebMethod
@WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202102")
@RequestWrapper(localName = "createMakegoods", targetNamespace = "https://www.google.com/apis/ads/publisher/v202102", className = "com.google.api.ads.admanager.jaxws.v202102.ProposalLineItemServiceInterfacecreateMakegoods")
@ResponseWrapper(localName = "createMakegoodsResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202102", className = "com.google.api.ads.admanager.jaxws.v202102.ProposalLineItemServiceInterfacecreateMakegoodsResponse")
public List<ProposalLineItem> createMakegoods(
@WebParam(name = "makegoodInfos", targetNamespace = "https://www.google.com/apis/ads/publisher/v202102")
List<ProposalLineItemMakegoodInfo> makegoodInfos)
throws ApiException_Exception
;
|
@WebResult(name = "rval", targetNamespace = STRcreateMakegoodsSTRhttps: @ResponseWrapper(localName = "createMakegoodsResponseSTRhttps: List<ProposalLineItem> function( @WebParam(name = "makegoodInfosSTRhttps: List<ProposalLineItemMakegoodInfo> makegoodInfos) throws ApiException_Exception ;
|
/**
*
* Creates makegood proposal line items given the specifications provided.
*
*
* @param makegoodInfos
* @return
* returns java.util.List<com.google.api.ads.admanager.jaxws.v202102.ProposalLineItem>
* @throws ApiException_Exception
*/
|
Creates makegood proposal line items given the specifications provided
|
createMakegoods
|
{
"repo_name": "googleads/googleads-java-lib",
"path": "modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202102/ProposalLineItemServiceInterface.java",
"license": "apache-2.0",
"size": 11224
}
|
[
"java.util.List",
"javax.jws.WebParam",
"javax.jws.WebResult",
"javax.xml.ws.ResponseWrapper"
] |
import java.util.List; import javax.jws.WebParam; import javax.jws.WebResult; import javax.xml.ws.ResponseWrapper;
|
import java.util.*; import javax.jws.*; import javax.xml.ws.*;
|
[
"java.util",
"javax.jws",
"javax.xml"
] |
java.util; javax.jws; javax.xml;
| 1,837,340 |
private void mockDAOForQuery() throws Exception {
when(getDbFacadeMockInstance().getVmDao()).thenReturn(vmDAO);
when(getQueryParameters().getId()).thenReturn(vmId);
}
|
void function() throws Exception { when(getDbFacadeMockInstance().getVmDao()).thenReturn(vmDAO); when(getQueryParameters().getId()).thenReturn(vmId); }
|
/**
* Initialize DAO to be used in query.
*
* @throws Exception
*/
|
Initialize DAO to be used in query
|
mockDAOForQuery
|
{
"repo_name": "halober/ovirt-engine",
"path": "backend/manager/modules/bll/src/test/java/org/ovirt/engine/core/bll/GetNextAvailableDiskAliasNameByVMIdQueryTest.java",
"license": "apache-2.0",
"size": 4344
}
|
[
"org.mockito.Mockito"
] |
import org.mockito.Mockito;
|
import org.mockito.*;
|
[
"org.mockito"
] |
org.mockito;
| 44,664 |
CacheBuilder<K, V> strongValues() {
return setValueStrength(Strength.STRONG);
}
|
CacheBuilder<K, V> strongValues() { return setValueStrength(Strength.STRONG); }
|
/**
* Specifies that each value (not key) stored in the cache should be strongly referenced.
*
* @throws IllegalStateException if the value strength was already set
*/
|
Specifies that each value (not key) stored in the cache should be strongly referenced
|
strongValues
|
{
"repo_name": "hceylan/guava",
"path": "guava/src/com/google/common/cache/CacheBuilder.java",
"license": "apache-2.0",
"size": 37542
}
|
[
"com.google.common.cache.LocalCache"
] |
import com.google.common.cache.LocalCache;
|
import com.google.common.cache.*;
|
[
"com.google.common"
] |
com.google.common;
| 1,639,005 |
public void setExternalVariable(String name, String value) {
eval.setExternalVariable(new QName(name), new XdmAtomicValue(value));
}
|
void function(String name, String value) { eval.setExternalVariable(new QName(name), new XdmAtomicValue(value)); }
|
/**
* Set the external variable. The value should be a String.
*
* @param name Name of the external variable in the XQuery
* @param value Value for the external variable
*/
|
Set the external variable. The value should be a String
|
setExternalVariable
|
{
"repo_name": "dmcbeath/spark-xml-utils",
"path": "src/main/java/com/elsevier/spark_xml_utils/xquery/XQueryProcessor.java",
"license": "apache-2.0",
"size": 10869
}
|
[
"net.sf.saxon.s9api.QName",
"net.sf.saxon.s9api.XdmAtomicValue"
] |
import net.sf.saxon.s9api.QName; import net.sf.saxon.s9api.XdmAtomicValue;
|
import net.sf.saxon.s9api.*;
|
[
"net.sf.saxon"
] |
net.sf.saxon;
| 1,792,492 |
public JRVariable[] getVariables();
/**
* Retrieves the run direction of this crosstab
* @return a value representing one of the run direction constants in {@link RunDirectionEnum}
|
JRVariable[] function(); /** * Retrieves the run direction of this crosstab * @return a value representing one of the run direction constants in {@link RunDirectionEnum}
|
/**
* Returns the variables defined for the crosstab.
*
* @return variables defined for the crosstab
* @see JRCrosstabGroup#getVariable()
* @see JRCrosstabMeasure#getVariable()
* @see #VARIABLE_ROW_COUNT
* @see #VARIABLE_COLUMN_COUNT
*/
|
Returns the variables defined for the crosstab
|
getVariables
|
{
"repo_name": "MHTaleb/Encologim",
"path": "lib/JasperReport/src/net/sf/jasperreports/crosstabs/JRCrosstab.java",
"license": "gpl-3.0",
"size": 11531
}
|
[
"net.sf.jasperreports.engine.JRVariable",
"net.sf.jasperreports.engine.type.RunDirectionEnum"
] |
import net.sf.jasperreports.engine.JRVariable; import net.sf.jasperreports.engine.type.RunDirectionEnum;
|
import net.sf.jasperreports.engine.*; import net.sf.jasperreports.engine.type.*;
|
[
"net.sf.jasperreports"
] |
net.sf.jasperreports;
| 2,005,768 |
@DELETE
@Path("events")
public Response deleteEvents() throws Exception {
final Lock writeLock = eventsLock.writeLock();
writeLock.lock();
try {
EVENTS.clear();
return Response.ok().build();
} finally {
writeLock.unlock();
}
}
|
@Path(STR) Response function() throws Exception { final Lock writeLock = eventsLock.writeLock(); writeLock.lock(); try { EVENTS.clear(); return Response.ok().build(); } finally { writeLock.unlock(); } }
|
/**
* Delete all stored events.
*
* @return A {@link Response} indicating the status of the deletion attempt.
* @throws Exception
* If any errors occur during the deletion.
*/
|
Delete all stored events
|
deleteEvents
|
{
"repo_name": "jrh3k5/flume-http-server-sink",
"path": "src/main/java/com/github/jrh3k5/flume/sink/http/server/FlumeSinkServerResource.java",
"license": "apache-2.0",
"size": 4073
}
|
[
"java.util.concurrent.locks.Lock",
"javax.ws.rs.Path",
"javax.ws.rs.core.Response"
] |
import java.util.concurrent.locks.Lock; import javax.ws.rs.Path; import javax.ws.rs.core.Response;
|
import java.util.concurrent.locks.*; import javax.ws.rs.*; import javax.ws.rs.core.*;
|
[
"java.util",
"javax.ws"
] |
java.util; javax.ws;
| 2,409,644 |
public void setRatedAt(Date ratedAt)
{
this.ratedAt = ratedAt;
}
|
void function(Date ratedAt) { this.ratedAt = ratedAt; }
|
/**
* Set the date time the current authenticated user rated the item of content.
*
* @param ratedAt
*/
|
Set the date time the current authenticated user rated the item of content
|
setRatedAt
|
{
"repo_name": "Alfresco/spring-social-alfresco",
"path": "src/main/java/org/springframework/social/alfresco/api/entities/Rating.java",
"license": "apache-2.0",
"size": 3985
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,049,011 |
public static DataBuffer createBuffer(int dataType, int size, int numBanks)
{
switch (dataType)
{
case DataBuffer.TYPE_BYTE:
return new DataBufferByte(size, numBanks);
case DataBuffer.TYPE_SHORT:
return new DataBufferShort(size, numBanks);
case DataBuffer.TYPE_USHORT:
return new DataBufferUShort(size, numBanks);
case DataBuffer.TYPE_INT:
return new DataBufferInt(size, numBanks);
case DataBuffer.TYPE_FLOAT:
return new DataBufferFloat(size, numBanks);
case DataBuffer.TYPE_DOUBLE:
return new DataBufferDouble(size, numBanks);
default:
throw new UnsupportedOperationException();
}
}
|
static DataBuffer function(int dataType, int size, int numBanks) { switch (dataType) { case DataBuffer.TYPE_BYTE: return new DataBufferByte(size, numBanks); case DataBuffer.TYPE_SHORT: return new DataBufferShort(size, numBanks); case DataBuffer.TYPE_USHORT: return new DataBufferUShort(size, numBanks); case DataBuffer.TYPE_INT: return new DataBufferInt(size, numBanks); case DataBuffer.TYPE_FLOAT: return new DataBufferFloat(size, numBanks); case DataBuffer.TYPE_DOUBLE: return new DataBufferDouble(size, numBanks); default: throw new UnsupportedOperationException(); } }
|
/**
* Create a data buffer of a particular type.
*
* @param dataType the desired data type of the buffer.
* @param size the size of the data buffer bank
* @param numBanks the number of banks the buffer should have
*/
|
Create a data buffer of a particular type
|
createBuffer
|
{
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/gnu/java/awt/Buffers.java",
"license": "gpl-2.0",
"size": 7459
}
|
[
"java.awt.image.DataBuffer",
"java.awt.image.DataBufferByte",
"java.awt.image.DataBufferDouble",
"java.awt.image.DataBufferFloat",
"java.awt.image.DataBufferInt",
"java.awt.image.DataBufferShort",
"java.awt.image.DataBufferUShort"
] |
import java.awt.image.DataBuffer; import java.awt.image.DataBufferByte; import java.awt.image.DataBufferDouble; import java.awt.image.DataBufferFloat; import java.awt.image.DataBufferInt; import java.awt.image.DataBufferShort; import java.awt.image.DataBufferUShort;
|
import java.awt.image.*;
|
[
"java.awt"
] |
java.awt;
| 39,228 |
public int processPacket(byte[] in, int inOff, int inLen, byte[] output, int outOff)
throws IllegalStateException, InvalidCipherTextException, DataLengthException
{
// TODO: handle null keyParam (e.g. via RepeatedKeySpec)
// Need to keep the CTR and CBC Mac parts around and reset
if (keyParam == null)
{
throw new IllegalStateException("CCM cipher unitialized.");
}
int n = nonce.length;
int q = 15 - n;
if (q < 4)
{
int limitLen = 1 << (8 * q);
if (inLen >= limitLen)
{
throw new IllegalStateException("CCM packet too large for choice of q.");
}
}
byte[] iv = new byte[blockSize];
iv[0] = (byte)((q - 1) & 0x7);
System.arraycopy(nonce, 0, iv, 1, nonce.length);
BlockCipher ctrCipher = new SICBlockCipher(cipher);
ctrCipher.init(forEncryption, new ParametersWithIV(keyParam, iv));
int outputLen;
int inIndex = inOff;
int outIndex = outOff;
if (forEncryption)
{
outputLen = inLen + macSize;
if (output.length < (outputLen + outOff))
{
throw new OutputLengthException("Output buffer too short.");
}
calculateMac(in, inOff, inLen, macBlock);
ctrCipher.processBlock(macBlock, 0, macBlock, 0); // S0
while (inIndex < (inOff + inLen - blockSize)) // S1...
{
ctrCipher.processBlock(in, inIndex, output, outIndex);
outIndex += blockSize;
inIndex += blockSize;
}
byte[] block = new byte[blockSize];
System.arraycopy(in, inIndex, block, 0, inLen + inOff - inIndex);
ctrCipher.processBlock(block, 0, block, 0);
System.arraycopy(block, 0, output, outIndex, inLen + inOff - inIndex);
System.arraycopy(macBlock, 0, output, outOff + inLen, macSize);
}
else
{
if (inLen < macSize)
{
throw new InvalidCipherTextException("data too short");
}
outputLen = inLen - macSize;
if (output.length < (outputLen + outOff))
{
throw new OutputLengthException("Output buffer too short.");
}
System.arraycopy(in, inOff + outputLen, macBlock, 0, macSize);
ctrCipher.processBlock(macBlock, 0, macBlock, 0);
for (int i = macSize; i != macBlock.length; i++)
{
macBlock[i] = 0;
}
while (inIndex < (inOff + outputLen - blockSize))
{
ctrCipher.processBlock(in, inIndex, output, outIndex);
outIndex += blockSize;
inIndex += blockSize;
}
byte[] block = new byte[blockSize];
System.arraycopy(in, inIndex, block, 0, outputLen - (inIndex - inOff));
ctrCipher.processBlock(block, 0, block, 0);
System.arraycopy(block, 0, output, outIndex, outputLen - (inIndex - inOff));
byte[] calculatedMacBlock = new byte[blockSize];
calculateMac(output, outOff, outputLen, calculatedMacBlock);
if (!Arrays.constantTimeAreEqual(macBlock, calculatedMacBlock))
{
throw new InvalidCipherTextException("mac check in CCM failed");
}
}
return outputLen;
}
|
int function(byte[] in, int inOff, int inLen, byte[] output, int outOff) throws IllegalStateException, InvalidCipherTextException, DataLengthException { if (keyParam == null) { throw new IllegalStateException(STR); } int n = nonce.length; int q = 15 - n; if (q < 4) { int limitLen = 1 << (8 * q); if (inLen >= limitLen) { throw new IllegalStateException(STR); } } byte[] iv = new byte[blockSize]; iv[0] = (byte)((q - 1) & 0x7); System.arraycopy(nonce, 0, iv, 1, nonce.length); BlockCipher ctrCipher = new SICBlockCipher(cipher); ctrCipher.init(forEncryption, new ParametersWithIV(keyParam, iv)); int outputLen; int inIndex = inOff; int outIndex = outOff; if (forEncryption) { outputLen = inLen + macSize; if (output.length < (outputLen + outOff)) { throw new OutputLengthException(STR); } calculateMac(in, inOff, inLen, macBlock); ctrCipher.processBlock(macBlock, 0, macBlock, 0); while (inIndex < (inOff + inLen - blockSize)) { ctrCipher.processBlock(in, inIndex, output, outIndex); outIndex += blockSize; inIndex += blockSize; } byte[] block = new byte[blockSize]; System.arraycopy(in, inIndex, block, 0, inLen + inOff - inIndex); ctrCipher.processBlock(block, 0, block, 0); System.arraycopy(block, 0, output, outIndex, inLen + inOff - inIndex); System.arraycopy(macBlock, 0, output, outOff + inLen, macSize); } else { if (inLen < macSize) { throw new InvalidCipherTextException(STR); } outputLen = inLen - macSize; if (output.length < (outputLen + outOff)) { throw new OutputLengthException(STR); } System.arraycopy(in, inOff + outputLen, macBlock, 0, macSize); ctrCipher.processBlock(macBlock, 0, macBlock, 0); for (int i = macSize; i != macBlock.length; i++) { macBlock[i] = 0; } while (inIndex < (inOff + outputLen - blockSize)) { ctrCipher.processBlock(in, inIndex, output, outIndex); outIndex += blockSize; inIndex += blockSize; } byte[] block = new byte[blockSize]; System.arraycopy(in, inIndex, block, 0, outputLen - (inIndex - inOff)); ctrCipher.processBlock(block, 0, block, 0); System.arraycopy(block, 0, output, outIndex, outputLen - (inIndex - inOff)); byte[] calculatedMacBlock = new byte[blockSize]; calculateMac(output, outOff, outputLen, calculatedMacBlock); if (!Arrays.constantTimeAreEqual(macBlock, calculatedMacBlock)) { throw new InvalidCipherTextException(STR); } } return outputLen; }
|
/**
* Process a packet of data for either CCM decryption or encryption.
*
* @param in data for processing.
* @param inOff offset at which data starts in the input array.
* @param inLen length of the data in the input array.
* @param output output array.
* @param outOff offset into output array to start putting processed bytes.
* @return the number of bytes added to output.
* @throws IllegalStateException if the cipher is not appropriately set up.
* @throws InvalidCipherTextException if the input data is truncated or the mac check fails.
* @throws DataLengthException if output buffer too short.
*/
|
Process a packet of data for either CCM decryption or encryption
|
processPacket
|
{
"repo_name": "alphallc/connectbot",
"path": "src/org/bouncycastle/crypto/modes/CCMBlockCipher.java",
"license": "apache-2.0",
"size": 13280
}
|
[
"org.bouncycastle.crypto.BlockCipher",
"org.bouncycastle.crypto.DataLengthException",
"org.bouncycastle.crypto.InvalidCipherTextException",
"org.bouncycastle.crypto.OutputLengthException",
"org.bouncycastle.crypto.params.ParametersWithIV",
"org.bouncycastle.util.Arrays"
] |
import org.bouncycastle.crypto.BlockCipher; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.OutputLengthException; import org.bouncycastle.crypto.params.ParametersWithIV; import org.bouncycastle.util.Arrays;
|
import org.bouncycastle.crypto.*; import org.bouncycastle.crypto.params.*; import org.bouncycastle.util.*;
|
[
"org.bouncycastle.crypto",
"org.bouncycastle.util"
] |
org.bouncycastle.crypto; org.bouncycastle.util;
| 2,839,748 |
MapFactory getMapFactory();
interface Literals {
EClass STRING_TO_INTEGER_MAP = eINSTANCE.getStringToIntegerMap();
EAttribute STRING_TO_INTEGER_MAP__KEY = eINSTANCE.getStringToIntegerMap_Key();
EAttribute STRING_TO_INTEGER_MAP__VALUE = eINSTANCE.getStringToIntegerMap_Value();
EClass ACTOR_TO_STATISTICAL_DATA_MAP = eINSTANCE.getActorToStatisticalDataMap();
EReference ACTOR_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getActorToStatisticalDataMap_Key();
EReference ACTOR_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getActorToStatisticalDataMap_Value();
EClass ACTION_TO_STATISTICAL_DATA_MAP = eINSTANCE.getActionToStatisticalDataMap();
EReference ACTION_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getActionToStatisticalDataMap_Key();
EReference ACTION_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getActionToStatisticalDataMap_Value();
EClass BUFFER_TO_STATISTICAL_DATA_MAP = eINSTANCE.getBufferToStatisticalDataMap();
EReference BUFFER_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getBufferToStatisticalDataMap_Key();
EReference BUFFER_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getBufferToStatisticalDataMap_Value();
EClass PROCEDURE_TO_STATISTICAL_DATA_MAP = eINSTANCE.getProcedureToStatisticalDataMap();
EReference PROCEDURE_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getProcedureToStatisticalDataMap_Key();
EReference PROCEDURE_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getProcedureToStatisticalDataMap_Value();
EClass VARIABLE_TO_STATISTICAL_DATA_MAP = eINSTANCE.getVariableToStatisticalDataMap();
EReference VARIABLE_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getVariableToStatisticalDataMap_Key();
EReference VARIABLE_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getVariableToStatisticalDataMap_Value();
EClass ACTOR_CLASS_TO_STATISTICAL_DATA_MAP = eINSTANCE.getActorClassToStatisticalDataMap();
EReference ACTOR_CLASS_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getActorClassToStatisticalDataMap_Key();
EReference ACTOR_CLASS_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getActorClassToStatisticalDataMap_Value();
EClass EOPERATOR_TO_STATISTICAL_DATA_MAP = eINSTANCE.getEOperatorToStatisticalDataMap();
EAttribute EOPERATOR_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getEOperatorToStatisticalDataMap_Key();
EReference EOPERATOR_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getEOperatorToStatisticalDataMap_Value();
EClass ACTION_TO_LONG_MAP = eINSTANCE.getActionToLongMap();
EReference ACTION_TO_LONG_MAP__KEY = eINSTANCE.getActionToLongMap_Key();
EAttribute ACTION_TO_LONG_MAP__VALUE = eINSTANCE.getActionToLongMap_Value();
EClass ACTOR_TO_LONG_MAP = eINSTANCE.getActorToLongMap();
EReference ACTOR_TO_LONG_MAP__KEY = eINSTANCE.getActorToLongMap_Key();
EAttribute ACTOR_TO_LONG_MAP__VALUE = eINSTANCE.getActorToLongMap_Value();
EClass BUFFER_TO_LONG_MAP = eINSTANCE.getBufferToLongMap();
EReference BUFFER_TO_LONG_MAP__KEY = eINSTANCE.getBufferToLongMap_Key();
EAttribute BUFFER_TO_LONG_MAP__VALUE = eINSTANCE.getBufferToLongMap_Value();
EClass STRING_TO_LONG_MAP = eINSTANCE.getStringToLongMap();
EAttribute STRING_TO_LONG_MAP__KEY = eINSTANCE.getStringToLongMap_Key();
EAttribute STRING_TO_LONG_MAP__VALUE = eINSTANCE.getStringToLongMap_Value();
EClass DOUBLE_TO_DOUBLE_MAP = eINSTANCE.getDoubleToDoubleMap();
EAttribute DOUBLE_TO_DOUBLE_MAP__KEY = eINSTANCE.getDoubleToDoubleMap_Key();
EAttribute DOUBLE_TO_DOUBLE_MAP__VALUE = eINSTANCE.getDoubleToDoubleMap_Value();
EClass VARIABLE_TO_LONG_MAP = eINSTANCE.getVariableToLongMap();
EReference VARIABLE_TO_LONG_MAP__KEY = eINSTANCE.getVariableToLongMap_Key();
EAttribute VARIABLE_TO_LONG_MAP__VALUE = eINSTANCE.getVariableToLongMap_Value();
EClass GUARD_TO_LONG_MAP = eINSTANCE.getGuardToLongMap();
EReference GUARD_TO_LONG_MAP__KEY = eINSTANCE.getGuardToLongMap_Key();
EAttribute GUARD_TO_LONG_MAP__VALUE = eINSTANCE.getGuardToLongMap_Value();
EClass PORT_TO_LONG_MAP = eINSTANCE.getPortToLongMap();
EReference PORT_TO_LONG_MAP__KEY = eINSTANCE.getPortToLongMap_Key();
EAttribute PORT_TO_LONG_MAP__VALUE = eINSTANCE.getPortToLongMap_Value();
EClass STRING_TO_DOUBLE_MAP = eINSTANCE.getStringToDoubleMap();
EAttribute STRING_TO_DOUBLE_MAP__KEY = eINSTANCE.getStringToDoubleMap_Key();
EAttribute STRING_TO_DOUBLE_MAP__VALUE = eINSTANCE.getStringToDoubleMap_Value();
EClass ACTION_TO_DOUBLE_MAP = eINSTANCE.getActionToDoubleMap();
EReference ACTION_TO_DOUBLE_MAP__KEY = eINSTANCE.getActionToDoubleMap_Key();
EAttribute ACTION_TO_DOUBLE_MAP__VALUE = eINSTANCE.getActionToDoubleMap_Value();
EClass BUFFER_TO_INTEGER_MAP = eINSTANCE.getBufferToIntegerMap();
EReference BUFFER_TO_INTEGER_MAP__KEY = eINSTANCE.getBufferToIntegerMap_Key();
EAttribute BUFFER_TO_INTEGER_MAP__VALUE = eINSTANCE.getBufferToIntegerMap_Value();
EClass BUFFER_TO_DOUBLE_MAP = eINSTANCE.getBufferToDoubleMap();
EReference BUFFER_TO_DOUBLE_MAP__KEY = eINSTANCE.getBufferToDoubleMap_Key();
EAttribute BUFFER_TO_DOUBLE_MAP__VALUE = eINSTANCE.getBufferToDoubleMap_Value();
EClass PARTITION_TO_ACTOR_SELECTION_SCHEDULE_MAP = eINSTANCE.getPartitionToActorSelectionScheduleMap();
EAttribute PARTITION_TO_ACTOR_SELECTION_SCHEDULE_MAP__KEY = eINSTANCE.getPartitionToActorSelectionScheduleMap_Key();
EReference PARTITION_TO_ACTOR_SELECTION_SCHEDULE_MAP__VALUE = eINSTANCE.getPartitionToActorSelectionScheduleMap_Value();
EClass STRING_TO_STRING_MAP = eINSTANCE.getStringToStringMap();
EAttribute STRING_TO_STRING_MAP__KEY = eINSTANCE.getStringToStringMap_Key();
EAttribute STRING_TO_STRING_MAP__VALUE = eINSTANCE.getStringToStringMap_Value();
}
|
MapFactory getMapFactory(); interface Literals { EClass STRING_TO_INTEGER_MAP = eINSTANCE.getStringToIntegerMap(); EAttribute STRING_TO_INTEGER_MAP__KEY = eINSTANCE.getStringToIntegerMap_Key(); EAttribute STRING_TO_INTEGER_MAP__VALUE = eINSTANCE.getStringToIntegerMap_Value(); EClass ACTOR_TO_STATISTICAL_DATA_MAP = eINSTANCE.getActorToStatisticalDataMap(); EReference ACTOR_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getActorToStatisticalDataMap_Key(); EReference ACTOR_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getActorToStatisticalDataMap_Value(); EClass ACTION_TO_STATISTICAL_DATA_MAP = eINSTANCE.getActionToStatisticalDataMap(); EReference ACTION_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getActionToStatisticalDataMap_Key(); EReference ACTION_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getActionToStatisticalDataMap_Value(); EClass BUFFER_TO_STATISTICAL_DATA_MAP = eINSTANCE.getBufferToStatisticalDataMap(); EReference BUFFER_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getBufferToStatisticalDataMap_Key(); EReference BUFFER_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getBufferToStatisticalDataMap_Value(); EClass PROCEDURE_TO_STATISTICAL_DATA_MAP = eINSTANCE.getProcedureToStatisticalDataMap(); EReference PROCEDURE_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getProcedureToStatisticalDataMap_Key(); EReference PROCEDURE_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getProcedureToStatisticalDataMap_Value(); EClass VARIABLE_TO_STATISTICAL_DATA_MAP = eINSTANCE.getVariableToStatisticalDataMap(); EReference VARIABLE_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getVariableToStatisticalDataMap_Key(); EReference VARIABLE_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getVariableToStatisticalDataMap_Value(); EClass ACTOR_CLASS_TO_STATISTICAL_DATA_MAP = eINSTANCE.getActorClassToStatisticalDataMap(); EReference ACTOR_CLASS_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getActorClassToStatisticalDataMap_Key(); EReference ACTOR_CLASS_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getActorClassToStatisticalDataMap_Value(); EClass EOPERATOR_TO_STATISTICAL_DATA_MAP = eINSTANCE.getEOperatorToStatisticalDataMap(); EAttribute EOPERATOR_TO_STATISTICAL_DATA_MAP__KEY = eINSTANCE.getEOperatorToStatisticalDataMap_Key(); EReference EOPERATOR_TO_STATISTICAL_DATA_MAP__VALUE = eINSTANCE.getEOperatorToStatisticalDataMap_Value(); EClass ACTION_TO_LONG_MAP = eINSTANCE.getActionToLongMap(); EReference ACTION_TO_LONG_MAP__KEY = eINSTANCE.getActionToLongMap_Key(); EAttribute ACTION_TO_LONG_MAP__VALUE = eINSTANCE.getActionToLongMap_Value(); EClass ACTOR_TO_LONG_MAP = eINSTANCE.getActorToLongMap(); EReference ACTOR_TO_LONG_MAP__KEY = eINSTANCE.getActorToLongMap_Key(); EAttribute ACTOR_TO_LONG_MAP__VALUE = eINSTANCE.getActorToLongMap_Value(); EClass BUFFER_TO_LONG_MAP = eINSTANCE.getBufferToLongMap(); EReference BUFFER_TO_LONG_MAP__KEY = eINSTANCE.getBufferToLongMap_Key(); EAttribute BUFFER_TO_LONG_MAP__VALUE = eINSTANCE.getBufferToLongMap_Value(); EClass STRING_TO_LONG_MAP = eINSTANCE.getStringToLongMap(); EAttribute STRING_TO_LONG_MAP__KEY = eINSTANCE.getStringToLongMap_Key(); EAttribute STRING_TO_LONG_MAP__VALUE = eINSTANCE.getStringToLongMap_Value(); EClass DOUBLE_TO_DOUBLE_MAP = eINSTANCE.getDoubleToDoubleMap(); EAttribute DOUBLE_TO_DOUBLE_MAP__KEY = eINSTANCE.getDoubleToDoubleMap_Key(); EAttribute DOUBLE_TO_DOUBLE_MAP__VALUE = eINSTANCE.getDoubleToDoubleMap_Value(); EClass VARIABLE_TO_LONG_MAP = eINSTANCE.getVariableToLongMap(); EReference VARIABLE_TO_LONG_MAP__KEY = eINSTANCE.getVariableToLongMap_Key(); EAttribute VARIABLE_TO_LONG_MAP__VALUE = eINSTANCE.getVariableToLongMap_Value(); EClass GUARD_TO_LONG_MAP = eINSTANCE.getGuardToLongMap(); EReference GUARD_TO_LONG_MAP__KEY = eINSTANCE.getGuardToLongMap_Key(); EAttribute GUARD_TO_LONG_MAP__VALUE = eINSTANCE.getGuardToLongMap_Value(); EClass PORT_TO_LONG_MAP = eINSTANCE.getPortToLongMap(); EReference PORT_TO_LONG_MAP__KEY = eINSTANCE.getPortToLongMap_Key(); EAttribute PORT_TO_LONG_MAP__VALUE = eINSTANCE.getPortToLongMap_Value(); EClass STRING_TO_DOUBLE_MAP = eINSTANCE.getStringToDoubleMap(); EAttribute STRING_TO_DOUBLE_MAP__KEY = eINSTANCE.getStringToDoubleMap_Key(); EAttribute STRING_TO_DOUBLE_MAP__VALUE = eINSTANCE.getStringToDoubleMap_Value(); EClass ACTION_TO_DOUBLE_MAP = eINSTANCE.getActionToDoubleMap(); EReference ACTION_TO_DOUBLE_MAP__KEY = eINSTANCE.getActionToDoubleMap_Key(); EAttribute ACTION_TO_DOUBLE_MAP__VALUE = eINSTANCE.getActionToDoubleMap_Value(); EClass BUFFER_TO_INTEGER_MAP = eINSTANCE.getBufferToIntegerMap(); EReference BUFFER_TO_INTEGER_MAP__KEY = eINSTANCE.getBufferToIntegerMap_Key(); EAttribute BUFFER_TO_INTEGER_MAP__VALUE = eINSTANCE.getBufferToIntegerMap_Value(); EClass BUFFER_TO_DOUBLE_MAP = eINSTANCE.getBufferToDoubleMap(); EReference BUFFER_TO_DOUBLE_MAP__KEY = eINSTANCE.getBufferToDoubleMap_Key(); EAttribute BUFFER_TO_DOUBLE_MAP__VALUE = eINSTANCE.getBufferToDoubleMap_Value(); EClass PARTITION_TO_ACTOR_SELECTION_SCHEDULE_MAP = eINSTANCE.getPartitionToActorSelectionScheduleMap(); EAttribute PARTITION_TO_ACTOR_SELECTION_SCHEDULE_MAP__KEY = eINSTANCE.getPartitionToActorSelectionScheduleMap_Key(); EReference PARTITION_TO_ACTOR_SELECTION_SCHEDULE_MAP__VALUE = eINSTANCE.getPartitionToActorSelectionScheduleMap_Value(); EClass STRING_TO_STRING_MAP = eINSTANCE.getStringToStringMap(); EAttribute STRING_TO_STRING_MAP__KEY = eINSTANCE.getStringToStringMap_Key(); EAttribute STRING_TO_STRING_MAP__VALUE = eINSTANCE.getStringToStringMap_Value(); }
|
/**
* Returns the factory that creates the instances of the model.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the factory that creates the instances of the model.
* @generated
*/
|
Returns the factory that creates the instances of the model.
|
getMapFactory
|
{
"repo_name": "turnus/turnus",
"path": "turnus.model/src/turnus/model/analysis/map/MapPackage.java",
"license": "gpl-3.0",
"size": 77072
}
|
[
"org.eclipse.emf.ecore.EAttribute",
"org.eclipse.emf.ecore.EClass",
"org.eclipse.emf.ecore.EReference"
] |
import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EReference;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,534,014 |
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
// Sync the toggle state after onRestoreInstanceState has occurred.
if (mDrawerToggle != null) {
mDrawerToggle.syncState();
}
}
|
void function(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); if (mDrawerToggle != null) { mDrawerToggle.syncState(); } }
|
/**
* When using the ActionBarDrawerToggle, you must call it during
* onPostCreate() and onConfigurationChanged()...
*/
|
When using the ActionBarDrawerToggle, you must call it during onPostCreate() and onConfigurationChanged()..
|
onPostCreate
|
{
"repo_name": "jakeprobst/Anki-Android",
"path": "AnkiDroid/src/main/java/com/ichi2/anki/NavigationDrawerActivity.java",
"license": "gpl-3.0",
"size": 14520
}
|
[
"android.os.Bundle"
] |
import android.os.Bundle;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 1,889,870 |
private void addMarketplaceToReview() {
Marketplace mp = new Marketplace();
mp.setOrganization(org);
Set<Marketplace> mps = new HashSet<Marketplace>();
mps.add(mp);
CatalogEntry catalogEntry = new CatalogEntry();
catalogEntry.setMarketplace(mp);
List<CatalogEntry> catalogEntries = new ArrayList<CatalogEntry>();
catalogEntries.add(catalogEntry);
productReview.getProductFeedback().getProduct()
.setCatalogEntries(catalogEntries);
}
|
void function() { Marketplace mp = new Marketplace(); mp.setOrganization(org); Set<Marketplace> mps = new HashSet<Marketplace>(); mps.add(mp); CatalogEntry catalogEntry = new CatalogEntry(); catalogEntry.setMarketplace(mp); List<CatalogEntry> catalogEntries = new ArrayList<CatalogEntry>(); catalogEntries.add(catalogEntry); productReview.getProductFeedback().getProduct() .setCatalogEntries(catalogEntries); }
|
/**
* to mock ProductReview.getPublishedMarketplaces method
*/
|
to mock ProductReview.getPublishedMarketplaces method
|
addMarketplaceToReview
|
{
"repo_name": "opetrovski/development",
"path": "oscm-review-unittests/javasrc/org/oscm/reviewservice/bean/ReviewServiceLocalBeanTest.java",
"license": "apache-2.0",
"size": 16007
}
|
[
"java.util.ArrayList",
"java.util.HashSet",
"java.util.List",
"java.util.Set",
"org.oscm.domobjects.CatalogEntry",
"org.oscm.domobjects.Marketplace"
] |
import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.oscm.domobjects.CatalogEntry; import org.oscm.domobjects.Marketplace;
|
import java.util.*; import org.oscm.domobjects.*;
|
[
"java.util",
"org.oscm.domobjects"
] |
java.util; org.oscm.domobjects;
| 1,744,929 |
@Test
public void testMax_withNullOldMin() throws Exception {
Assert.assertEquals(Double.valueOf(9.87), new DoubleContentAggregateFunction().max(null, Double.valueOf(9.87)));
}
|
void function() throws Exception { Assert.assertEquals(Double.valueOf(9.87), new DoubleContentAggregateFunction().max(null, Double.valueOf(9.87))); }
|
/**
* Test case for {@link DoubleContentAggregateFunction#max(Double, Double)} being
* provided null as input to old max parameter
*/
|
Test case for <code>DoubleContentAggregateFunction#max(Double, Double)</code> being provided null as input to old max parameter
|
testMax_withNullOldMin
|
{
"repo_name": "ottogroup/flink-operator-library",
"path": "src/test/java/com/ottogroup/bi/streaming/operator/json/aggregate/functions/DoubleContentAggregateFunctionTest.java",
"license": "apache-2.0",
"size": 7655
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 926,203 |
private void transferData(State state, InnerState innerState, byte[] data,
InputStream entityStream) throws StopRequest {
for (;;) {
int bytesRead = readFromResponse(state, innerState, data, entityStream);
if (bytesRead == -1) { // success, end of stream already reached
handleEndOfStream(state, innerState);
return;
}
state.mGotData = true;
writeDataToDestination(state, data, bytesRead);
innerState.mBytesSoFar += bytesRead;
innerState.mBytesThisSession += bytesRead;
reportProgress(state, innerState);
checkPausedOrCanceled(state);
}
}
|
void function(State state, InnerState innerState, byte[] data, InputStream entityStream) throws StopRequest { for (;;) { int bytesRead = readFromResponse(state, innerState, data, entityStream); if (bytesRead == -1) { handleEndOfStream(state, innerState); return; } state.mGotData = true; writeDataToDestination(state, data, bytesRead); innerState.mBytesSoFar += bytesRead; innerState.mBytesThisSession += bytesRead; reportProgress(state, innerState); checkPausedOrCanceled(state); } }
|
/**
* Transfer as much data as possible from the HTTP response to the
* destination file.
*
* @param data buffer to use to read data
* @param entityStream stream for reading the HTTP response entity
*/
|
Transfer as much data as possible from the HTTP response to the destination file
|
transferData
|
{
"repo_name": "reven86/dfg-gameplay",
"path": "client/source/_android/extras/market_apk_expansion/downloader_library/src/com/google/android/vending/expansion/downloader/impl/DownloadThread.java",
"license": "apache-2.0",
"size": 38480
}
|
[
"java.io.InputStream"
] |
import java.io.InputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 160,211 |
public DiskInner withOsType(OperatingSystemTypes osType) {
this.osType = osType;
return this;
}
|
DiskInner function(OperatingSystemTypes osType) { this.osType = osType; return this; }
|
/**
* Set the Operating System type. Possible values include: 'Windows', 'Linux'.
*
* @param osType the osType value to set
* @return the DiskInner object itself.
*/
|
Set the Operating System type. Possible values include: 'Windows', 'Linux'
|
withOsType
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/compute/mgmt-v2018_09_30/src/main/java/com/microsoft/azure/management/compute/v2018_09_30/implementation/DiskInner.java",
"license": "mit",
"size": 10989
}
|
[
"com.microsoft.azure.management.compute.v2018_09_30.OperatingSystemTypes"
] |
import com.microsoft.azure.management.compute.v2018_09_30.OperatingSystemTypes;
|
import com.microsoft.azure.management.compute.v2018_09_30.*;
|
[
"com.microsoft.azure"
] |
com.microsoft.azure;
| 149,018 |
@Override
public void writeMetaData(WebResponse response)
{
for (Action action : actions)
{
if (action instanceof MetaDataAction)
action.invoke(response);
}
}
|
void function(WebResponse response) { for (Action action : actions) { if (action instanceof MetaDataAction) action.invoke(response); } }
|
/**
* transfer cookie operations (add, clear) to given web response
*
* @param response
* web response that should receive the current cookie operation
*/
|
transfer cookie operations (add, clear) to given web response
|
writeMetaData
|
{
"repo_name": "astrapi69/wicket",
"path": "wicket-core/src/main/java/org/apache/wicket/protocol/http/BufferedWebResponse.java",
"license": "apache-2.0",
"size": 13163
}
|
[
"org.apache.wicket.request.http.WebResponse"
] |
import org.apache.wicket.request.http.WebResponse;
|
import org.apache.wicket.request.http.*;
|
[
"org.apache.wicket"
] |
org.apache.wicket;
| 2,568,927 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.