gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*
* PackageCmd.java --
*
* This class implements the built-in "package" command in Tcl.
*
* Copyright (c) 1997 by Sun Microsystems, Inc.
*
* See the file "license.terms" for information on usage and redistribution
* of this file, and for a DISCLAIMER OF ALL WARRANTIES.
*
* RCS: @(#) $Id: PackageCmd.java,v 1.4 2002/04/12 21:00:26 mdejong Exp $
*/
package tcl.lang;
import java.util.*;
class PackageCmd implements Command {
private static final String[] validCmds = {
"forget",
"ifneeded",
"names",
"present",
"provide",
"require",
"unknown",
"vcompare",
"versions",
"vsatisfies"
};
static final private int OPT_FORGET = 0;
static final private int OPT_IFNEEDED = 1;
static final private int OPT_NAMES = 2;
static final private int OPT_PRESENT = 3;
static final private int OPT_PROVIDE = 4;
static final private int OPT_REQUIRE = 5;
static final private int OPT_UNKNOWN = 6;
static final private int OPT_VCOMPARE = 7;
static final private int OPT_VERSIONS = 8;
static final private int OPT_VSATISFIES = 9;
/*
*----------------------------------------------------------------------
*
* pkgProvide --
*
* This procedure is invoked to declare that a particular version
* of a particular package is now present in an interpreter. There
* must not be any other version of this package already
* provided in the interpreter.
*
* Results:
* Normally does nothing; if there is already another version
* of the package loaded then an error is raised.
*
* Side effects:
* The interpreter remembers that this package is available,
* so that no other version of the package may be provided for
* the interpreter.
*
*----------------------------------------------------------------------
*/
static void
pkgProvide(
Interp interp, // Interpreter in which package is now
// available.
String pkgName, // Name of package.
String version) // Version string for package.
throws
TclException
{
Package pkg;
// Validate the version string that was passed in.
checkVersion(interp, version);
pkg = findPackage(interp, pkgName);
if (pkg.version == null) {
pkg.version = version;
return;
}
if (compareVersions(pkg.version, version, null) != 0) {
throw new TclException(interp,
"conflicting versions provided for package \"" + pkgName
+ "\": " + pkg.version + ", then " + version);
}
}
/*
*----------------------------------------------------------------------
*
* pkgRequire --
*
* This procedure is called by code that depends on a particular
* version of a particular package. If the package is not already
* provided in the interpreter, this procedure invokes a Tcl script
* to provide it. If the package is already provided, this
* procedure makes sure that the caller's needs don't conflict with
* the version that is present.
*
* Results:
* If successful, returns the version string for the currently
* provided version of the package, which may be different from
* the "version" argument. If the caller's requirements
* cannot be met (e.g. the version requested conflicts with
* a currently provided version, or the required version cannot
* be found, or the script to provide the required version
* generates an error), a TclException is raised.
*
* Side effects:
* The script from some previous "package ifneeded" command may
* be invoked to provide the package.
*
*----------------------------------------------------------------------
*/
static String
pkgRequire(
Interp interp, // Interpreter in which package is now
// available.
String pkgName, // Name of desired package.
String version, // Version string for desired version;
// null means use the latest version
// available.
boolean exact) // true means that only the particular
// version given is acceptable. false means
// use the latest compatible version.
throws
TclException
{
VersionSatisfiesResult vsres;
Package pkg;
PkgAvail avail, best;
String script;
StringBuffer sbuf;
int pass, result;
// Do extra check to make sure that version is not
// null when the exact flag is set to true.
if (version == null && exact) {
throw new TclException(interp,
"conflicting arguments : version == null and exact == true");
}
// Before we can compare versions the version string
// must be verified but if it is null we are just looking
// for the latest version so skip the check in this case.
if (version != null) {
checkVersion(interp, version);
}
// It can take up to three passes to find the package: one pass to
// run the "package unknown" script, one to run the "package ifneeded"
// script for a specific version, and a final pass to lookup the
// package loaded by the "package ifneeded" script.
vsres = new VersionSatisfiesResult();
for (pass = 1; ; pass++) {
pkg = findPackage(interp, pkgName);
if (pkg.version != null) {
break;
}
// The package isn't yet present. Search the list of available
// versions and invoke the script for the best available version.
best = null;
for (avail = pkg.avail; avail != null; avail = avail.next) {
if ((best != null) && (compareVersions(avail.version,
best.version, null) <= 0)) {
continue;
}
if (version != null) {
result = compareVersions(avail.version, version, vsres);
if ((result != 0) && exact) {
continue;
}
if (!vsres.satisfies) {
continue;
}
}
best = avail;
}
if (best != null) {
// We found an ifneeded script for the package. Be careful while
// executing it: this could cause reentrancy, so (a) protect the
// script itself from deletion and (b) don't assume that best
// will still exist when the script completes.
script = best.script;
try {
interp.eval(script, TCL.EVAL_GLOBAL);
} catch (TclException e) {
interp.addErrorInfo("\n (\"package ifneeded\" script)");
// Throw the error with new info added to errorInfo.
throw e;
}
interp.resetResult();
pkg = findPackage(interp, pkgName);
break;
}
// Package not in the database. If there is a "package unknown"
// command, invoke it (but only on the first pass; after that,
// we should not get here in the first place).
if (pass > 1) {
break;
}
script = interp.packageUnknown;
if (script != null) {
sbuf = new StringBuffer();
try {
Util.appendElement(interp, sbuf, script);
Util.appendElement(interp, sbuf, pkgName);
if (version == null) {
Util.appendElement(interp, sbuf, "");
} else {
Util.appendElement(interp, sbuf, version);
}
if (exact) {
Util.appendElement(interp, sbuf, "-exact");
}
} catch (TclException e) {
throw new TclRuntimeError("unexpected TclException: " + e);
}
try {
interp.eval(sbuf.toString(), TCL.EVAL_GLOBAL);
} catch (TclException e) {
interp.addErrorInfo("\n (\"package unknown\" script)");
// Throw the first exception.
throw e;
}
interp.resetResult();
}
}
if (pkg.version == null) {
sbuf = new StringBuffer();
sbuf.append("can't find package " + pkgName);
if (version != null) {
sbuf.append(" " + version);
}
throw new TclException(interp, sbuf.toString());
}
// At this point we know that the package is present. Make sure that the
// provided version meets the current requirement.
if (version == null) {
return pkg.version;
}
result = compareVersions(pkg.version, version, vsres);
if ((vsres.satisfies && !exact) || (result == 0)) {
return pkg.version;
}
// If we have a version conflict we throw a TclException.
throw new TclException(interp, "version conflict for package \"" + pkgName
+ "\": have " + pkg.version + ", need " + version);
}
/*
*----------------------------------------------------------------------
*
* Tcl_PkgPresent -> pkgPresent
*
* Checks to see whether the specified package is present. If it
* is not then no additional action is taken.
*
* Results:
* If successful, returns the version string for the currently
* provided version of the package, which may be different from
* the "version" argument. If the caller's requirements
* cannot be met (e.g. the version requested conflicts with
* a currently provided version), a TclException is raised.
*
* Side effects:
* None.
*
*----------------------------------------------------------------------
*/
static String
pkgPresent(
Interp interp, // Interpreter in which package is now
// available.
String pkgName, // Name of desired package.
String version, // Version string for desired version;
// null means use the latest version
// available.
boolean exact) // true means that only the particular
// version given is acceptable. false means
// use the latest compatible version.
throws
TclException
{
Package pkg;
VersionSatisfiesResult vsres = new VersionSatisfiesResult();
int result;
pkg = (Package) interp.packageTable.get(pkgName);
if (pkg != null) {
if (pkg.version != null) {
// At this point we know that the package is present. Make sure
// that the provided version meets the current requirement.
if (version == null) {
return pkg.version;
}
result = compareVersions(pkg.version, version, vsres);
if ((vsres.satisfies && !exact) || (result == 0)) {
return pkg.version;
}
throw new TclException(interp,
"version conflict for package \"" +
pkgName + "\": have " + pkg.version + ", need " + version);
}
}
if (version != null) {
throw new TclException(interp,
"package " + pkgName + " " + version + " is not present");
} else {
throw new TclException(interp,
"package " + pkgName + " is not present");
}
}
/*
*----------------------------------------------------------------------
*
* cmdProc --
*
* This procedure is invoked to process the "package" Tcl command.
* See the user documentation for details on what it does.
*
* Side effects:
* |>None.<|
*
*----------------------------------------------------------------------
*/
public void cmdProc(
Interp interp, // The current interpreter.
TclObject[] objv) // Command arguments.
throws
TclException // Thrown if an error occurs.
{
VersionSatisfiesResult vsres;
Package pkg;
PkgAvail avail;
PkgAvail prev;
String version;
String pkgName;
String key;
String cmd;
String ver1, ver2;
StringBuffer sbuf;
Enumeration enume;
int i, opt, exact;
boolean once;
if (objv.length < 2) {
throw new TclNumArgsException(interp, 1, objv,
"option ?arg arg ...?");
}
opt = TclIndex.get(interp, objv[1], validCmds, "option", 0);
switch (opt) {
case OPT_FORGET: {
// Forget takes 0 or more arguments.
for (i = 2; i < objv.length ; i++) {
// We do not need to check to make sure
// package name is "" because it would not
// be in the hash table so name will be ignored.
pkgName = objv[i].toString();
pkg = (Package) interp.packageTable.get(pkgName);
// If this package does not exist, go to next one.
if (pkg == null) {
continue;
}
interp.packageTable.remove(pkgName);
while (pkg.avail != null) {
avail = pkg.avail;
pkg.avail = avail.next;
avail = null;
}
pkg = null;
}
return;
}
case OPT_IFNEEDED: {
if ((objv.length < 4) || (objv.length > 5)) {
throw new TclNumArgsException(interp, 1, objv,
"ifneeded package version ?script?");
}
pkgName = objv[2].toString();
version = objv[3].toString();
// Verify that this version string is valid.
checkVersion(interp, version);
if (objv.length == 4) {
pkg = (Package) interp.packageTable.get(pkgName);
if (pkg == null)
return;
} else {
pkg = findPackage(interp, pkgName);
}
for (avail = pkg.avail, prev = null; avail != null;
prev = avail, avail = avail.next) {
if (compareVersions(avail.version, version, null) == 0) {
if (objv.length == 4) {
// If doing a query return current script.
interp.setResult(avail.script);
return;
}
// We matched so we must be setting the script.
break;
}
}
// When we do not match on a query return nothing.
if (objv.length == 4) {
return;
}
if (avail == null) {
avail = new PkgAvail();
avail.version = version;
if (prev == null) {
avail.next = pkg.avail;
pkg.avail = avail;
} else {
avail.next = prev.next;
prev.next = avail;
}
}
avail.script = objv[4].toString();
return;
}
case OPT_NAMES: {
if (objv.length != 2) {
throw new TclNumArgsException(interp, 1, objv,
"names");
}
try {
sbuf = new StringBuffer();
enume = interp.packageTable.keys();
once = false;
while (enume.hasMoreElements()) {
once = true;
key = (String) enume.nextElement();
pkg = (Package) interp.packageTable.get(key);
if ((pkg.version != null) || (pkg.avail != null)) {
Util.appendElement(interp, sbuf, key);
}
}
if (once) {
interp.setResult(sbuf.toString());
}
} catch (TclException e) {
throw new TclRuntimeError("unexpected TclException: " + e);
}
return;
}
case OPT_PRESENT: {
if (objv.length < 3) {
throw new TclNumArgsException(interp, 2, objv,
"?-exact? package ?version?");
}
if (objv[2].toString().equals("-exact")) {
exact = 1;
} else {
exact = 0;
}
version = null;
if (objv.length == (4 + exact)) {
version = objv[3 + exact].toString();
checkVersion(interp, version);
} else if ((objv.length != 3) || (exact == 1)) {
throw new TclNumArgsException(interp, 2, objv,
"?-exact? package ?version?");
}
if (exact == 1) {
version = pkgPresent(interp, objv[3].toString(), version, true);
} else {
version = pkgPresent(interp, objv[2].toString(), version, false);
}
interp.setResult(version);
break;
}
case OPT_PROVIDE: {
if ((objv.length < 3) || (objv.length > 4)) {
throw new TclNumArgsException(interp, 1, objv,
"provide package ?version?");
}
if (objv.length == 3) {
pkg = (Package)interp.packageTable.get( objv[2].toString() );
if (pkg != null) {
if (pkg.version != null) {
interp.setResult(pkg.version);
}
}
return;
}
pkgProvide(interp, objv[2].toString(), objv[3].toString());
return;
}
case OPT_REQUIRE: {
if ((objv.length < 3) || (objv.length > 5)) {
throw new TclNumArgsException(interp, 1, objv,
"require ?-exact? package ?version?");
}
if (objv[2].toString().equals("-exact")) {
exact = 1;
} else {
exact = 0;
}
version = null;
if (objv.length == (4 + exact)) {
version = objv[3 + exact].toString();
checkVersion(interp, version);
} else if ((objv.length != 3) || (exact == 1)) {
throw new TclNumArgsException(interp, 1, objv,
"require ?-exact? package ?version?");
}
if (exact == 1) {
version = pkgRequire(interp, objv[3].toString(), version, true);
} else {
version = pkgRequire(interp, objv[2].toString(), version, false);
}
interp.setResult(version);
return;
}
case OPT_UNKNOWN: {
if (objv.length > 3) {
throw new TclNumArgsException(interp, 1, objv,
"unknown ?command?");
}
if (objv.length == 2) {
if (interp.packageUnknown != null) {
interp.setResult(interp.packageUnknown);
}
} else if (objv.length == 3) {
interp.packageUnknown = null;
cmd = objv[2].toString();
if (cmd.length() > 0) {
interp.packageUnknown = cmd;
}
}
return;
}
case OPT_VCOMPARE: {
if (objv.length != 4) {
throw new TclNumArgsException(interp, 1, objv,
"vcompare version1 version2");
}
ver1 = objv[2].toString();
ver2 = objv[3].toString();
checkVersion(interp, ver1);
checkVersion(interp, ver2);
interp.setResult(compareVersions(ver1, ver2, null));
return;
}
case OPT_VERSIONS: {
if (objv.length != 3) {
throw new TclNumArgsException(interp, 1, objv,
"versions package");
}
pkg = (Package)interp.packageTable.get(objv[2].toString());
if (pkg != null) {
try {
sbuf = new StringBuffer();
once = false;
for (avail = pkg.avail; avail != null; avail = avail.next) {
once = true;
Util.appendElement(interp, sbuf, avail.version);
}
if (once) {
interp.setResult(sbuf.toString());
}
} catch (TclException e) {
throw new TclRuntimeError("unexpected TclException: " + e);
}
}
return;
}
case OPT_VSATISFIES: {
if (objv.length != 4) {
throw new TclNumArgsException(interp, 1, objv,
"vsatisfies version1 version2");
}
ver1 = objv[2].toString();
ver2 = objv[3].toString();
checkVersion(interp, ver1);
checkVersion(interp, ver2);
vsres = new VersionSatisfiesResult();
compareVersions(ver1,ver2,vsres);
interp.setResult(vsres.satisfies);
return;
}
default: {
throw new TclRuntimeError("TclIndex.get() error");
}
} // end switch(opt)
}
/*
*----------------------------------------------------------------------
*
* findPackage --
*
* This procedure finds the Package record for a particular package
* in a particular interpreter, creating a record if one doesn't
* already exist.
*
* Results:
* The return value is a ref to the Package record for the
* package.
*
* Side effects:
* A new Package record may be created.
*
*----------------------------------------------------------------------
*/
private static Package
findPackage(
Interp interp, // Interpreter to use for package lookup.
String pkgName) // Name of package to find.
throws
TclException
{
Package pkg;
// check package name to make sure it is not null or "".
if (pkgName == null || pkgName.length() == 0) {
throw new TclException(interp,
"expected package name but got \"\"");
}
pkg = (Package) interp.packageTable.get(pkgName);
if (pkg == null) {
// We should add a package with this name.
pkg = new Package();
interp.packageTable.put(pkgName, pkg);
}
return pkg;
}
/*
*----------------------------------------------------------------------
*
* checkVersion --
*
* This procedure checks to see whether a version number has
* valid syntax.
*
* Results:
* If string is not properly formed version number then a TclException
* is raised.
*
* Side effects:
* None.
*
*----------------------------------------------------------------------
*/
private static void
checkVersion(
Interp interp, // Used for error reporting.
String version) // Supposedly a version number, which is
// groups of decimal digits separated
// by dots.
throws
TclException
{
int i, len;
char c;
boolean error = true;
try {
if ((version == null) || (version.length() == 0)) {
version = "";
return;
}
if (!Character.isDigit(version.charAt(0))) {
return;
}
len = version.length();
for (i = 1; i < len; i++) {
c = version.charAt(i);
if (!Character.isDigit(c) && (c != '.')) {
return;
}
}
if (version.charAt(len - 1) == '.') {
return;
}
error = false;
} finally {
if (error) {
throw new TclException(interp,
"expected version number but got \"" + version + "\"");
}
}
}
/*
*----------------------------------------------------------------------
*
* compareVersions --
*
* This procedure compares two version numbers.
*
* Results:
* This function will return a -1 if v1 is less than v2, 0
* if the two version numbers are the same, and 1 if v1 is
* greater than v2. If the sat argument is not null then
* then its VersionSatisfiesResult.satisifes field will be
* true if v2 >= v1 and both numbers have the same major number
* or false otherwise.
*
* Side effects:
* None.
*
*----------------------------------------------------------------------
*/
private static int
compareVersions(
String v1, // Versions strings. (e.g. 2.1.3)
String v2,
VersionSatisfiesResult vsres)
{
int i;
int max;
int n1 = 0;
int n2 = 0;
boolean thisIsMajor = true;
String[] v1ns;
String[] v2ns;
// Each iteration of the following loop processes one number from
// each string, terminated by a ".". If those numbers don't match
// then the comparison is over; otherwise, we loop back for the
// next number.
// This should never happen because null strings would not
// have gotten past the version verify.
if ((v1 == null) || (v2 == null)) {
throw new TclRuntimeError("null version in package version compare");
}
v1ns = split(v1, '.');
v2ns = split(v2, '.');
// We are sure there is at least one string in each array so
// this should never happen.
if (v1ns.length == 0 || v2ns.length == 0) {
throw new TclRuntimeError("version length is 0");
}
if (v1ns.length > v2ns.length) {
max = v1ns.length;
} else {
max = v2ns.length;
}
for (i=0; i < max ; i++) {
n1 = n2 = 0;
// Grab number from each version ident if version spec
// ends the use a 0 as value.
try {
if (i < v1ns.length) {
n1 = Integer.parseInt( v1ns[i] );
}
if (i < v2ns.length) {
n2 = Integer.parseInt( v2ns[i] );
}
} catch (NumberFormatException ex) {
throw new TclRuntimeError(
"NumberFormatException for package versions \"" + v1
+ "\" or \"" + v2 + "\"");
}
// Compare and go on to the next version number if the
// current numbers match.
if (n1 != n2) {
break;
}
thisIsMajor = false;
}
if (vsres != null) {
vsres.satisfies = ((n1 == n2) || ((n1 > n2) && !thisIsMajor));
}
if (n1 > n2) {
return 1;
} else if (n1 == n2) {
return 0;
} else {
return -1;
}
}
/*
*----------------------------------------------------------------------
*
* split --
*
* Util function used in version compare to split a string on a
* single char it is only used in the version compare function.
*
* Results:
* |>None.<|
*
* Side effects:
* |>None.<|
*
*----------------------------------------------------------------------
*/
static String[]
split(
String in,
char splitchar)
{
Vector words;
String[] ret;
int i;
int len;
char[] str;
int wordstart = 0;
// Create an array that is as big as the input
// str plus one for an extra split char.
len = in.length();
str = new char[len + 1];
in.getChars(0,len,str,0);
str[len++] = splitchar;
words = new Vector(5);
for (i=0; i < len; i++) {
// Compare this char to the split char
// if they are the same the we need to
// add the last word to the array.
if (str[i] == splitchar) {
if (wordstart <= (i - 1)) {
words.addElement( new String(str, wordstart, i - wordstart) );
}
wordstart = (i + 1);
}
}
// Create an array that is as big as the number
// of elements in the vector, copy over and return.
ret = new String[words.size()];
words.copyInto(ret);
return ret;
}
// If compare versions is called with a third argument then one of
// these structures needs to be created and passed in
static class VersionSatisfiesResult {
boolean satisfies = false;
}
// Each invocation of the "package ifneeded" command creates a class
// of the following type, which is used to load the package into the
// interpreter if it is requested with a "package require" command.
static class PkgAvail {
String version = null; // Version string.
String script = null; // Script to invoke to provide this package version
PkgAvail next = null; // Next in list of available package versions
}
// For each package that is known in any way to an interpreter, there
// is one record of the following type. These records are stored in
// the "packageTable" hash table in the interpreter, keyed by
// package name such as "Tk" (no version number).
static class Package {
String version = null; // Version that has been supplied in this
// interpreter via "package provide"
// null means the package doesn't
// exist in this interpreter yet.
PkgAvail avail = null; // First in list of all available package versions
}
} //end of class PackageCmd
|
|
/*******************************************************************************
* Copyright (c) 2010 Haifeng Li
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package smile.math.matrix;
import smile.math.Math;
/**
* Singular Value Decomposition.
* <p>
* For an m-by-n matrix A with m ≥ n, the singular value decomposition is
* an m-by-n orthogonal matrix U, an n-by-n diagonal matrix Σ, and
* an n-by-n orthogonal matrix V so that A = U*Σ*V'.
* <p>
* For m < n, only the first m columns of V are computed and Σ is m-by-m.
* <p>
* The singular values, σ<sub>k</sub> = Σ<sub>kk</sub>, are ordered
* so that σ<sub>0</sub> ≥ σ<sub>1</sub> ≥ ... ≥ σ<sub>n-1</sub>.
* <p>
* The singular value decompostion always exists. The matrix condition number
* and the effective numerical rank can be computed from this decomposition.
* <p>
* SVD is a very powerful technique for dealing with sets of equations or matrices
* that are either singular or else numerically very close to singular. In many
* cases where Gaussian elimination and LU decomposition fail to give satisfactory
* results, SVD will diagnose precisely what the problem is. SVD is also the
* method of choice for solving most linear least squares problems.
* <p>
* Applications which employ the SVD include computing the pseudoinverse, least
* squares fitting of data, matrix approximation, and determining the rank,
* range and null space of a matrix. The SVD is also applied extensively to
* the study of linear inverse problems, and is useful in the analysis of
* regularization methods such as that of Tikhonov. It is widely used in
* statistics where it is related to principal component analysis. Yet another
* usage is latent semantic indexing in natural language text processing.
*
* @author Haifeng Li
*/
public class SingularValueDecomposition {
/**
* Arrays for internal storage of left singular vectors U.
*/
private double[][] U;
/**
* Arrays for internal storage of right singular vectors V.
*/
private double[][] V;
/**
* Array for internal storage of singular values.
*/
private double[] s;
/**
* Is this a full decomposition?
*/
private boolean full;
/**
* The number of rows.
*/
private int m;
/**
* The number of columns.
*/
private int n;
/**
* Threshold of estimated roundoff.
*/
private double tol;
/**
* Private constructor. Use factory method decompose() to get
* the decomposition.
*/
private SingularValueDecomposition(double[][] U, double[][] V, double[] s) {
this(U, V, s, true);
}
/**
* Private constructor. Use factory method decompose() to get
* the decomposition.
*/
private SingularValueDecomposition(double[][] U, double[][] V, double[] s, boolean full) {
this.U = U;
this.V = V;
this.s = s;
this.full = full;
m = U.length;
n = V.length;
tol = 0.5 * Math.sqrt(U.length + V.length + 1.0) * s[0] * Math.EPSILON;
}
/**
* Returns the left singular vectors
*/
public double[][] getU() {
return U;
}
/**
* Returns the right singular vectors
*/
public double[][] getV() {
return V;
}
/**
* Returns the one-dimensional array of singular values, ordered by
* from largest to smallest.
*/
public double[] getSingularValues() {
return s;
}
/**
* Returns the diagonal matrix of singular values
*/
public double[][] getS() {
double[][] S = new double[V.length][V.length];
for (int i = 0; i < s.length; i++) {
S[i][i] = s[i];
}
return S;
}
/**
* Returns the L2 matrix norm. The largest singular value.
*/
public double norm() {
return s[0];
}
/**
* Returns the effective numerical matrix rank. The number of nonnegligible
* singular values.
*/
public int rank() {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
int r = 0;
for (int i = 0; i < s.length; i++) {
if (s[i] > tol) {
r++;
}
}
return r;
}
/**
* Returns the dimension of null space. The number of negligible
* singular values.
*/
public int nullity() {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
int r = 0;
for (int i = 0; i < s.length; i++) {
if (s[i] <= tol) {
r++;
}
}
return r;
}
/**
* Returns the L<sub>2</sub> norm condition number, which is max(S) / min(S).
* A system of equations is considered to be well-conditioned if a small
* change in the coefficient matrix or a small change in the right hand
* side results in a small change in the solution vector. Otherwise, it is
* called ill-conditioned. Condition number is defined as the product of
* the norm of A and the norm of A<sup>-1</sup>. If we use the usual
* L<sub>2</sub> norm on vectors and the associated matrix norm, then the
* condition number is the ratio of the largest singular value of matrix
* A to the smallest. Condition number depends on the underlying norm.
* However, regardless of the norm, it is always greater or equal to 1.
* If it is close to one, the matrix is well conditioned. If the condition
* number is large, then the matrix is said to be ill-conditioned. A matrix
* that is not invertible has the condition number equal to infinity.
*/
public double condition() {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
return (s[0] <= 0.0 || s[n - 1] <= 0.0) ? Double.POSITIVE_INFINITY : s[0] / s[n - 1];
}
/**
* Returns a matrix of which columns give an orthonormal basis for the range space.
*/
public double[][] range() {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
int nr = 0;
double[][] rnge = new double[m][rank()];
for (int j = 0; j < n; j++) {
if (s[j] > tol) {
for (int i = 0; i < m; i++) {
rnge[i][nr] = U[i][j];
}
nr++;
}
}
return rnge;
}
/**
* Returns a matrix of which columns give an orthonormal basis for the null space.
*/
public double[][] nullspace() {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
int nn = 0;
double[][] nullsp = new double[n][nullity()];
for (int j = 0; j < n; j++) {
if (s[j] <= tol) {
for (int jj = 0; jj < n; jj++) {
nullsp[jj][nn] = V[jj][j];
}
nn++;
}
}
return nullsp;
}
/**
* Solve A * x = b using the pseudoinverse of A as obtained by SVD.
*/
public void solve(double[] b, double[] x) {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
if (b.length != m || x.length != n) {
throw new IllegalArgumentException("Dimensions do not agree.");
}
double[] tmp = new double[n];
for (int j = 0; j < n; j++) {
double r = 0.0;
if (s[j] > tol) {
for (int i = 0; i < m; i++) {
r += U[i][j] * b[i];
}
r /= s[j];
}
tmp[j] = r;
}
for (int j = 0; j < n; j++) {
double r = 0.0;
for (int jj = 0; jj < n; jj++) {
r += V[j][jj] * tmp[jj];
}
x[j] = r;
}
}
/**
* Solve A * X = B using the pseudoinverse of A as obtained by SVD.
*/
public void solve(double[][] B, double[][] X) {
if (!full) {
throw new IllegalStateException("This is not a FULL singular value decomposition.");
}
if (B.length != n || X.length != n || B[0].length != X[0].length) {
throw new IllegalArgumentException("Dimensions do not agree.");
}
double[] xx = new double[n];
int p = B[0].length;
for (int j = 0; j < p; j++) {
for (int i = 0; i < n; i++) {
xx[i] = B[i][j];
}
solve(xx, xx);
for (int i = 0; i < n; i++) {
X[i][j] = xx[i];
}
}
}
private static class ATA implements IMatrix {
IMatrix A;
double[] buf;
public ATA(IMatrix A) {
this.A = A;
if (A.nrows() >= A.ncols()) {
buf = new double[A.nrows()];
} else {
buf = new double[A.ncols()];
}
}
@Override
public int nrows() {
if (A.nrows() >= A.ncols()) {
return A.ncols();
} else {
return A.nrows();
}
}
@Override
public int ncols() {
return nrows();
}
@Override
public void ax(double[] x, double[] y) {
if (A.nrows() >= A.ncols()) {
A.ax(x, buf);
A.atx(buf, y);
} else {
A.atx(x, buf);
A.ax(buf, y);
}
}
@Override
public void atx(double[] x, double[] y) {
ax(x, y);
}
@Override
public void axpy(double[] x, double[] y) {
throw new UnsupportedOperationException();
}
@Override
public void axpy(double[] x, double[] y, double b) {
throw new UnsupportedOperationException();
}
@Override
public double get(int i, int j) {
throw new UnsupportedOperationException();
}
@Override
public ATA set(int i, int j, double x) {
throw new UnsupportedOperationException();
}
@Override
public void asolve(double[] b, double[] x) {
throw new UnsupportedOperationException();
}
@Override
public void atxpy(double[] x, double[] y) {
throw new UnsupportedOperationException();
}
@Override
public void atxpy(double[] x, double[] y, double b) {
throw new UnsupportedOperationException();
}
};
/**
* Find k largest approximate singular triples of a matrix by the
* Lanczos algorithm.
*
* @param A the matrix supporting matrix vector multiplication operation.
* @param k the number of singular triples we wish to compute for the input matrix.
* This number cannot exceed the size of A.
*/
public static SingularValueDecomposition decompose(IMatrix A, int k) {
return decompose(A, k, 1.0E-6);
}
/**
* Find k largest approximate singular triples of a matrix by the
* Lanczos algorithm.
*
* @param A the matrix supporting matrix vector multiplication operation.
* @param k the number of singular triples we wish to compute for the input matrix.
* This number cannot exceed the size of A.
* @param kappa relative accuracy of ritz values acceptable as singular values.
*/
public static SingularValueDecomposition decompose(IMatrix A, int k, double kappa) {
ATA B = new ATA(A);
EigenValueDecomposition eigen = EigenValueDecomposition.decompose(B, k, kappa);
double[] s = eigen.getEigenValues();
for (int i = 0; i < s.length; i++) {
s[i] = Math.sqrt(s[i]);
}
if (A.nrows() >= A.ncols()) {
double[][] V = eigen.getEigenVectors();
double[] tmp = new double[A.nrows()];
double[] vi = new double[A.ncols()];
double[][] U = new double[A.nrows()][s.length];
for (int i = 0; i < s.length; i++) {
for (int j = 0; j < A.ncols(); j++) {
vi[j] = V[j][i];
}
A.ax(vi, tmp);
for (int j = 0; j < A.nrows(); j++) {
U[j][i] = tmp[j] / s[i];
}
}
return new SingularValueDecomposition(U, V, s, false);
} else {
double[][] U = eigen.getEigenVectors();
double[] tmp = new double[A.ncols()];
double[] ui = new double[A.nrows()];
double[][] V = new double[A.ncols()][s.length];
for (int i = 0; i < s.length; i++) {
for (int j = 0; j < A.nrows(); j++) {
ui[j] = U[j][i];
}
A.atx(ui, tmp);
for (int j = 0; j < A.ncols(); j++) {
V[j][i] = tmp[j] / s[i];
}
}
return new SingularValueDecomposition(U, V, s, false);
}
}
/**
* Returns the singular value decomposition. Note that the input matrix
* A will hold U on output.
* @param A rectangular matrix. Row number should be equal to or larger
* than column number for current implementation.
*/
public static SingularValueDecomposition decompose(double[][] A) {
int m = A.length;
int n = A[0].length;
boolean flag;
int i, its, j, jj, k, l = 0, nm = 0;
double anorm, c, f, g, h, s, scale, x, y, z;
g = scale = anorm = 0.0;
double[][] u = A;
double[][] v = new double[n][n];
double[] w = new double[n];
double[] rv1 = new double[n];
for (i = 0; i < n; i++) {
l = i + 2;
rv1[i] = scale * g;
g = s = scale = 0.0;
if (i < m) {
for (k = i; k < m; k++) {
scale += Math.abs(u[k][i]);
}
if (scale != 0.0) {
for (k = i; k < m; k++) {
u[k][i] /= scale;
s += u[k][i] * u[k][i];
}
f = u[i][i];
g = -Math.copySign(Math.sqrt(s), f);
h = f * g - s;
u[i][i] = f - g;
for (j = l - 1; j < n; j++) {
for (s = 0.0, k = i; k < m; k++) {
s += u[k][i] * u[k][j];
}
f = s / h;
for (k = i; k < m; k++) {
u[k][j] += f * u[k][i];
}
}
for (k = i; k < m; k++) {
u[k][i] *= scale;
}
}
}
w[i] = scale * g;
g = s = scale = 0.0;
if (i + 1 <= m && i + 1 != n) {
for (k = l - 1; k < n; k++) {
scale += Math.abs(u[i][k]);
}
if (scale != 0.0) {
for (k = l - 1; k < n; k++) {
u[i][k] /= scale;
s += u[i][k] * u[i][k];
}
f = u[i][l - 1];
g = -Math.copySign(Math.sqrt(s), f);
h = f * g - s;
u[i][l - 1] = f - g;
for (k = l - 1; k < n; k++) {
rv1[k] = u[i][k] / h;
}
for (j = l - 1; j < m; j++) {
for (s = 0.0, k = l - 1; k < n; k++) {
s += u[j][k] * u[i][k];
}
for (k = l - 1; k < n; k++) {
u[j][k] += s * rv1[k];
}
}
for (k = l - 1; k < n; k++) {
u[i][k] *= scale;
}
}
}
anorm = Math.max(anorm, (Math.abs(w[i]) + Math.abs(rv1[i])));
}
for (i = n - 1; i >= 0; i--) {
if (i < n - 1) {
if (g != 0.0) {
for (j = l; j < n; j++) {
v[j][i] = (u[i][j] / u[i][l]) / g;
}
for (j = l; j < n; j++) {
for (s = 0.0, k = l; k < n; k++) {
s += u[i][k] * v[k][j];
}
for (k = l; k < n; k++) {
v[k][j] += s * v[k][i];
}
}
}
for (j = l; j < n; j++) {
v[i][j] = v[j][i] = 0.0;
}
}
v[i][i] = 1.0;
g = rv1[i];
l = i;
}
for (i = Math.min(m, n) - 1; i >= 0; i--) {
l = i + 1;
g = w[i];
for (j = l; j < n; j++) {
u[i][j] = 0.0;
}
if (g != 0.0) {
g = 1.0 / g;
for (j = l; j < n; j++) {
for (s = 0.0, k = l; k < m; k++) {
s += u[k][i] * u[k][j];
}
f = (s / u[i][i]) * g;
for (k = i; k < m; k++) {
u[k][j] += f * u[k][i];
}
}
for (j = i; j < m; j++) {
u[j][i] *= g;
}
} else {
for (j = i; j < m; j++) {
u[j][i] = 0.0;
}
}
++u[i][i];
}
for (k = n - 1; k >= 0; k--) {
for (its = 0; its < 30; its++) {
flag = true;
for (l = k; l >= 0; l--) {
nm = l - 1;
if (l == 0 || Math.abs(rv1[l]) <= Math.EPSILON * anorm) {
flag = false;
break;
}
if (Math.abs(w[nm]) <= Math.EPSILON * anorm) {
break;
}
}
if (flag) {
c = 0.0;
s = 1.0;
for (i = l; i < k + 1; i++) {
f = s * rv1[i];
rv1[i] = c * rv1[i];
if (Math.abs(f) <= Math.EPSILON * anorm) {
break;
}
g = w[i];
h = Math.hypot(f, g);
w[i] = h;
h = 1.0 / h;
c = g * h;
s = -f * h;
for (j = 0; j < m; j++) {
y = u[j][nm];
z = u[j][i];
u[j][nm] = y * c + z * s;
u[j][i] = z * c - y * s;
}
}
}
z = w[k];
if (l == k) {
if (z < 0.0) {
w[k] = -z;
for (j = 0; j < n; j++) {
v[j][k] = -v[j][k];
}
}
break;
}
if (its == 29) {
throw new IllegalStateException("no convergence in 30 iterations");
}
x = w[l];
nm = k - 1;
y = w[nm];
g = rv1[nm];
h = rv1[k];
f = ((y - z) * (y + z) + (g - h) * (g + h)) / (2.0 * h * y);
g = Math.hypot(f, 1.0);
f = ((x - z) * (x + z) + h * ((y / (f + Math.copySign(g, f))) - h)) / x;
c = s = 1.0;
for (j = l; j <= nm; j++) {
i = j + 1;
g = rv1[i];
y = w[i];
h = s * g;
g = c * g;
z = Math.hypot(f, h);
rv1[j] = z;
c = f / z;
s = h / z;
f = x * c + g * s;
g = g * c - x * s;
h = y * s;
y *= c;
for (jj = 0; jj < n; jj++) {
x = v[jj][j];
z = v[jj][i];
v[jj][j] = x * c + z * s;
v[jj][i] = z * c - x * s;
}
z = Math.hypot(f, h);
w[j] = z;
if (z != 0.0) {
z = 1.0 / z;
c = f * z;
s = h * z;
}
f = c * g + s * y;
x = c * y - s * g;
for (jj = 0; jj < m; jj++) {
y = u[jj][j];
z = u[jj][i];
u[jj][j] = y * c + z * s;
u[jj][i] = z * c - y * s;
}
}
rv1[l] = 0.0;
rv1[k] = f;
w[k] = x;
}
}
// order singular values
int inc = 1;
double sw;
double[] su = new double[m], sv = new double[n];
do {
inc *= 3;
inc++;
} while (inc <= n);
do {
inc /= 3;
for (i = inc; i < n; i++) {
sw = w[i];
for (k = 0; k < m; k++) {
su[k] = u[k][i];
}
for (k = 0; k < n; k++) {
sv[k] = v[k][i];
}
j = i;
while (w[j - inc] < sw) {
w[j] = w[j - inc];
for (k = 0; k < m; k++) {
u[k][j] = u[k][j - inc];
}
for (k = 0; k < n; k++) {
v[k][j] = v[k][j - inc];
}
j -= inc;
if (j < inc) {
break;
}
}
w[j] = sw;
for (k = 0; k < m; k++) {
u[k][j] = su[k];
}
for (k = 0; k < n; k++) {
v[k][j] = sv[k];
}
}
} while (inc > 1);
for (k = 0; k < n; k++) {
s = 0;
for (i = 0; i < m; i++) {
if (u[i][k] < 0.) {
s++;
}
}
for (j = 0; j < n; j++) {
if (v[j][k] < 0.) {
s++;
}
}
if (s > (m + n) / 2) {
for (i = 0; i < m; i++) {
u[i][k] = -u[i][k];
}
for (j = 0; j < n; j++) {
v[j][k] = -v[j][k];
}
}
}
return new SingularValueDecomposition(u, v, w);
}
}
|
|
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.adapter.servlet;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.graphene.page.Page;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Test;
import org.keycloak.admin.client.resource.ClientResource;
import org.keycloak.admin.client.resource.ProtocolMappersResource;
import org.keycloak.common.util.KeyUtils;
import org.keycloak.common.util.PemUtils;
import org.keycloak.keys.Attributes;
import org.keycloak.keys.KeyProvider;
import org.keycloak.keys.RsaKeyProviderFactory;
import org.keycloak.representations.idm.ComponentRepresentation;
import org.keycloak.protocol.saml.mappers.AttributeStatementHelper;
import org.keycloak.protocol.saml.mappers.RoleListMapper;
import org.keycloak.representations.idm.ClientRepresentation;
import org.keycloak.representations.idm.ProtocolMapperRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import org.keycloak.saml.BaseSAML2BindingBuilder;
import org.keycloak.saml.SAML2ErrorResponseBuilder;
import org.keycloak.saml.common.constants.JBossSAMLURIConstants;
import org.keycloak.testsuite.adapter.AbstractServletsAdapterTest;
import org.keycloak.testsuite.adapter.page.BadAssertionSalesPostSig;
import org.keycloak.testsuite.adapter.page.BadClientSalesPostSigServlet;
import org.keycloak.testsuite.adapter.page.BadRealmSalesPostSigServlet;
import org.keycloak.testsuite.adapter.page.Employee2Servlet;
import org.keycloak.testsuite.adapter.page.EmployeeServlet;
import org.keycloak.testsuite.adapter.page.EmployeeSigFrontServlet;
import org.keycloak.testsuite.adapter.page.EmployeeSigPostNoIdpKeyServlet;
import org.keycloak.testsuite.adapter.page.EmployeeSigRedirNoIdpKeyServlet;
import org.keycloak.testsuite.adapter.page.EmployeeSigRedirOptNoIdpKeyServlet;
import org.keycloak.testsuite.adapter.page.EmployeeSigServlet;
import org.keycloak.testsuite.adapter.page.InputPortal;
import org.keycloak.testsuite.adapter.page.MissingAssertionSig;
import org.keycloak.testsuite.adapter.page.SAMLServlet;
import org.keycloak.testsuite.adapter.page.SalesMetadataServlet;
import org.keycloak.testsuite.adapter.page.SalesPost2Servlet;
import org.keycloak.testsuite.adapter.page.SalesPostAssertionAndResponseSig;
import org.keycloak.testsuite.adapter.page.SalesPostEncServlet;
import org.keycloak.testsuite.adapter.page.SalesPostPassiveServlet;
import org.keycloak.testsuite.adapter.page.SalesPostServlet;
import org.keycloak.testsuite.adapter.page.SalesPostSigEmailServlet;
import org.keycloak.testsuite.adapter.page.SalesPostSigPersistentServlet;
import org.keycloak.testsuite.adapter.page.SalesPostSigServlet;
import org.keycloak.testsuite.adapter.page.SalesPostSigTransientServlet;
import org.keycloak.testsuite.admin.ApiUtil;
import org.keycloak.testsuite.auth.page.login.Login;
import org.keycloak.testsuite.auth.page.login.SAMLIDPInitiatedLogin;
import org.keycloak.testsuite.page.AbstractPage;
import org.keycloak.testsuite.util.IOUtil;
import org.openqa.selenium.By;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.xml.XMLConstants;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URL;
import java.security.KeyPair;
import java.security.PublicKey;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.keycloak.testsuite.auth.page.AuthRealm.SAMLSERVLETDEMO;
import static org.keycloak.testsuite.util.IOUtil.loadRealm;
import static org.keycloak.testsuite.util.IOUtil.loadXML;
import static org.keycloak.testsuite.util.IOUtil.modifyDocElementAttribute;
import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith;
import static org.keycloak.testsuite.util.WaitUtils.waitUntilElement;
/**
* @author mhajas
*/
public abstract class AbstractSAMLServletsAdapterTest extends AbstractServletsAdapterTest {
@Page
protected BadClientSalesPostSigServlet badClientSalesPostSigServletPage;
@Page
protected BadRealmSalesPostSigServlet badRealmSalesPostSigServletPage;
@Page
protected Employee2Servlet employee2ServletPage;
@Page
protected EmployeeSigServlet employeeSigServletPage;
@Page
protected EmployeeSigPostNoIdpKeyServlet employeeSigPostNoIdpKeyServletPage;
@Page
protected EmployeeSigRedirNoIdpKeyServlet employeeSigRedirNoIdpKeyServletPage;
@Page
protected EmployeeSigRedirOptNoIdpKeyServlet employeeSigRedirOptNoIdpKeyServletPage;
@Page
protected EmployeeSigFrontServlet employeeSigFrontServletPage;
@Page
protected SalesMetadataServlet salesMetadataServletPage;
@Page
protected SalesPostServlet salesPostServletPage;
@Page
private SalesPost2Servlet salesPost2ServletPage;
@Page
protected SalesPostEncServlet salesPostEncServletPage;
@Page
protected SalesPostPassiveServlet salesPostPassiveServletPage;
@Page
protected SalesPostSigServlet salesPostSigServletPage;
@Page
protected SalesPostSigEmailServlet salesPostSigEmailServletPage;
@Page
protected SalesPostSigPersistentServlet salesPostSigPersistentServletPage;
@Page
protected SalesPostSigTransientServlet salesPostSigTransientServletPage;
@Page
protected SAMLIDPInitiatedLogin samlidpInitiatedLogin;
protected boolean forbiddenIfNotAuthenticated = true;
@Page
protected SalesPostAssertionAndResponseSig salesPostAssertionAndResponseSigPage;
@Page
protected BadAssertionSalesPostSig badAssertionSalesPostSigPage;
@Page
protected MissingAssertionSig missingAssertionSigPage;
@Page
protected EmployeeServlet employeeServletPage;
@Page
private InputPortal inputPortalPage;
@Page
private SAMLIDPInitiatedLogin samlidpInitiatedLoginPage;
public static final String FORBIDDEN_TEXT = "HTTP status code: 403";
@Deployment(name = BadClientSalesPostSigServlet.DEPLOYMENT_NAME)
protected static WebArchive badClientSalesPostSig() {
return samlServletDeployment(BadClientSalesPostSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = BadRealmSalesPostSigServlet.DEPLOYMENT_NAME)
protected static WebArchive badRealmSalesPostSig() {
return samlServletDeployment(BadRealmSalesPostSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = Employee2Servlet.DEPLOYMENT_NAME)
protected static WebArchive employee2() {
return samlServletDeployment(Employee2Servlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = EmployeeSigServlet.DEPLOYMENT_NAME)
protected static WebArchive employeeSig() {
return samlServletDeployment(EmployeeSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = EmployeeSigPostNoIdpKeyServlet.DEPLOYMENT_NAME)
protected static WebArchive employeeSigPostNoIdpKeyServlet() {
return samlServletDeployment(EmployeeSigPostNoIdpKeyServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = EmployeeSigRedirNoIdpKeyServlet.DEPLOYMENT_NAME)
protected static WebArchive employeeSigRedirNoIdpKeyServlet() {
return samlServletDeployment(EmployeeSigRedirNoIdpKeyServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = EmployeeSigRedirOptNoIdpKeyServlet.DEPLOYMENT_NAME)
protected static WebArchive employeeSigRedirOptNoIdpKeyServlet() {
return samlServletDeployment(EmployeeSigRedirOptNoIdpKeyServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = EmployeeSigFrontServlet.DEPLOYMENT_NAME)
protected static WebArchive employeeSigFront() {
return samlServletDeployment(EmployeeSigFrontServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesMetadataServlet.DEPLOYMENT_NAME)
protected static WebArchive salesMetadata() {
return samlServletDeployment(SalesMetadataServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPost() {
return samlServletDeployment(SalesPostServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostEncServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPostEnc() {
return samlServletDeployment(SalesPostEncServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostPassiveServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPostPassive() {
return samlServletDeployment(SalesPostPassiveServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostSigServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPostSig() {
return samlServletDeployment(SalesPostSigServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostSigEmailServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPostSigEmail() {
return samlServletDeployment(SalesPostSigEmailServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostSigPersistentServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPostSigPersistent() {
return samlServletDeployment(SalesPostSigPersistentServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostSigTransientServlet.DEPLOYMENT_NAME)
protected static WebArchive salesPostSigTransient() {
return samlServletDeployment(SalesPostSigTransientServlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = InputPortal.DEPLOYMENT_NAME)
protected static WebArchive inputPortal() {
return samlServletDeployment(InputPortal.DEPLOYMENT_NAME, "input-portal/WEB-INF/web.xml" , InputServlet.class);
}
@Deployment(name = SalesPost2Servlet.DEPLOYMENT_NAME)
protected static WebArchive salesPost2() {
return samlServletDeployment(SalesPost2Servlet.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = SalesPostAssertionAndResponseSig.DEPLOYMENT_NAME)
protected static WebArchive salesPostAssertionAndResponseSig() {
return samlServletDeployment(SalesPostAssertionAndResponseSig.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = BadAssertionSalesPostSig.DEPLOYMENT_NAME)
protected static WebArchive badAssertionSalesPostSig() {
return samlServletDeployment(BadAssertionSalesPostSig.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = MissingAssertionSig.DEPLOYMENT_NAME)
protected static WebArchive missingAssertionSig() {
return samlServletDeployment(MissingAssertionSig.DEPLOYMENT_NAME, SendUsernameServlet.class);
}
@Deployment(name = EmployeeServlet.DEPLOYMENT_NAME)
protected static WebArchive employeeServlet() {
return samlServletDeployment(EmployeeServlet.DEPLOYMENT_NAME, "employee/WEB-INF/web.xml", SamlSPFacade.class);
}
@Override
public void addAdapterTestRealms(List<RealmRepresentation> testRealms) {
testRealms.add(loadRealm("/adapter-test/keycloak-saml/testsaml.json"));
}
@Override
public void setDefaultPageUriParameters() {
super.setDefaultPageUriParameters();
testRealmPage.setAuthRealm(SAMLSERVLETDEMO);
testRealmSAMLRedirectLoginPage.setAuthRealm(SAMLSERVLETDEMO);
testRealmSAMLPostLoginPage.setAuthRealm(SAMLSERVLETDEMO);
}
private void assertForbidden(AbstractPage page, String expectedNotContains) {
page.navigateTo();
waitUntilElement(By.xpath("//body")).text().not().contains(expectedNotContains);
assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT));
}
private void assertSuccessfullyLoggedIn(AbstractPage page, String expectedText) {
page.navigateTo();
waitUntilElement(By.xpath("//body")).text().contains(expectedText);
}
private void assertForbiddenLogin(AbstractPage page, String username, String password, Login loginPage, String expectedNotContains) {
page.navigateTo();
assertCurrentUrlStartsWith(loginPage);
loginPage.form().login(username, password);
waitUntilElement(By.xpath("//body")).text().not().contains(expectedNotContains);
//Different 403 status page on EAP and Wildfly
assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT));
}
private void assertSuccessfulLogin(AbstractPage page, UserRepresentation user, Login loginPage, String expectedString) {
page.navigateTo();
assertCurrentUrlStartsWith(loginPage);
loginPage.form().login(user);
waitUntilElement(By.xpath("//body")).text().contains(expectedString);
}
private void testSuccessfulAndUnauthorizedLogin(SAMLServlet page, Login loginPage) {
testSuccessfulAndUnauthorizedLogin(page, loginPage, "principal=bburke");
}
private void testSuccessfulAndUnauthorizedLogin(SAMLServlet page, Login loginPage, String expectedText) {
testSuccessfulAndUnauthorizedLogin(page, loginPage, expectedText, "principal=");
}
private void testSuccessfulAndUnauthorizedLogin(SAMLServlet page, Login loginPage, String expectedText, String expectedNotContains) {
assertSuccessfulLogin(page, bburkeUser, loginPage, expectedText);
page.logout();
checkLoggedOut(page, loginPage);
assertForbiddenLogin(page, "unauthorized", "password", loginPage, expectedNotContains);
page.logout();
checkLoggedOut(page, loginPage);
}
private void checkLoggedOut(AbstractPage page, Login loginPage) {
page.navigateTo();
waitUntilElement(By.xpath("//body")).is().present();
assertCurrentUrlStartsWith(loginPage);
}
@Test
public void disabledClientTest() {
ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "http://localhost:8081/sales-post-sig/");
ClientRepresentation client = clientResource.toRepresentation();
client.setEnabled(false);
clientResource.update(client);
salesPostSigServletPage.navigateTo();
waitUntilElement(By.xpath("//body")).text().contains("Login requester not enabled");
client.setEnabled(true);
clientResource.update(client);
}
@Test
public void unauthorizedSSOTest() {
assertForbiddenLogin(salesPostServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal=");
assertForbidden(employee2ServletPage, "principal=");
assertForbidden(employeeSigFrontServletPage, "principal=");
assertForbidden(salesPostSigPersistentServletPage, "principal=");
salesPostServletPage.logout();
checkLoggedOut(salesPostServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void singleLoginAndLogoutSAMLTest() {
assertSuccessfulLogin(salesPostServletPage, bburkeUser, testRealmSAMLPostLoginPage, "principal=bburke");
assertSuccessfullyLoggedIn(salesPostSigServletPage, "principal=bburke");
assertSuccessfullyLoggedIn(employee2ServletPage, "principal=bburke");
assertSuccessfullyLoggedIn(salesPostEncServletPage, "principal=bburke");
employeeSigFrontServletPage.logout();
checkLoggedOut(employeeSigFrontServletPage, testRealmSAMLRedirectLoginPage);
checkLoggedOut(employeeSigServletPage, testRealmSAMLRedirectLoginPage);
salesPostPassiveServletPage.navigateTo();
if (forbiddenIfNotAuthenticated) {
assertOnForbiddenPage();
} else {
waitUntilElement(By.xpath("//body")).text().contains("principal=null");
}
checkLoggedOut(salesPostSigEmailServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void badClientSalesPostSigTest() {
badClientSalesPostSigServletPage.navigateTo();
waitUntilElement(By.xpath("//body")).text().contains("Invalid requester");
}
@Test
public void badRealmSalesPostSigTest() {
badRealmSalesPostSigServletPage.navigateTo();
testRealmSAMLRedirectLoginPage.form().login(bburkeUser);
waitUntilElement(By.xpath("//body")).text().not().contains("principal=");
//Different 403 status page on EAP and Wildfly
assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT));
}
@Test
public void employee2Test() {
testSuccessfulAndUnauthorizedLogin(employee2ServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void employeeSigTest() {
testSuccessfulAndUnauthorizedLogin(employeeSigServletPage, testRealmSAMLRedirectLoginPage);
}
private PublicKey createKeys(String priority) throws Exception {
KeyPair keyPair = KeyUtils.generateRsaKeyPair(1024);
String privateKeyPem = PemUtils.encodeKey(keyPair.getPrivate());
PublicKey publicKey = keyPair.getPublic();
ComponentRepresentation rep = new ComponentRepresentation();
rep.setName("mycomponent");
rep.setParentId("demo");
rep.setProviderId(RsaKeyProviderFactory.ID);
rep.setProviderType(KeyProvider.class.getName());
org.keycloak.common.util.MultivaluedHashMap config = new org.keycloak.common.util.MultivaluedHashMap();
config.addFirst("priority", priority);
config.addFirst(Attributes.PRIVATE_KEY_KEY, privateKeyPem);
rep.setConfig(config);
testRealmResource().components().add(rep);
return publicKey;
}
private void dropKeys(String priority) {
for (ComponentRepresentation c : testRealmResource().components().query("demo", KeyProvider.class.getName())) {
if (c.getConfig().getFirst("priority").equals(priority)) {
testRealmResource().components().component(c.getId()).remove();
return;
}
}
throw new RuntimeException("Failed to find keys");
}
private void testRotatedKeysPropagated(SAMLServlet servletPage, Login loginPage) throws Exception {
boolean keyDropped = false;
try {
log.info("Creating new key");
createKeys("1000");
testSuccessfulAndUnauthorizedLogin(servletPage, loginPage);
log.info("Dropping new key");
dropKeys("1000");
keyDropped = true;
testSuccessfulAndUnauthorizedLogin(servletPage, loginPage);
} finally {
if (! keyDropped) {
dropKeys("1000");
}
}
}
@Test
public void employeeSigPostNoIdpKeyTest() throws Exception {
testRotatedKeysPropagated(employeeSigPostNoIdpKeyServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void employeeSigRedirNoIdpKeyTest() throws Exception {
testRotatedKeysPropagated(employeeSigRedirNoIdpKeyServletPage, testRealmSAMLRedirectLoginPage);
}
@Test
public void employeeSigRedirOptNoIdpKeyTest() throws Exception {
testRotatedKeysPropagated(employeeSigRedirOptNoIdpKeyServletPage, testRealmSAMLRedirectLoginPage);
}
@Test
public void employeeSigFrontTest() {
testSuccessfulAndUnauthorizedLogin(employeeSigFrontServletPage, testRealmSAMLRedirectLoginPage);
}
@Test
public void salesMetadataTest() throws Exception {
Document doc = loadXML(AbstractSAMLServletsAdapterTest.class.getResourceAsStream("/adapter-test/keycloak-saml/sp-metadata.xml"));
modifyDocElementAttribute(doc, "SingleLogoutService", "Location", "8080", System.getProperty("app.server.http.port", null));
modifyDocElementAttribute(doc, "AssertionConsumerService", "Location", "8080", System.getProperty("app.server.http.port", null));
ClientRepresentation clientRep = testRealmResource().convertClientDescription(IOUtil.documentToString(doc));
String appServerUrl;
if (Boolean.parseBoolean(System.getProperty("app.server.ssl.required"))) {
appServerUrl = "https://localhost:" + System.getProperty("app.server.https.port", "8543") + "/";
} else {
appServerUrl = "http://localhost:" + System.getProperty("app.server.http.port", "8280") + "/";
}
clientRep.setAdminUrl(appServerUrl + "sales-metadata/saml");
Response response = testRealmResource().clients().create(clientRep);
assertEquals(201, response.getStatus());
response.close();
testSuccessfulAndUnauthorizedLogin(salesMetadataServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void salesPostTest() {
testSuccessfulAndUnauthorizedLogin(salesPostServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void salesPostEncTest() {
testSuccessfulAndUnauthorizedLogin(salesPostEncServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void salesPostPassiveTest() {
salesPostPassiveServletPage.navigateTo();
if (forbiddenIfNotAuthenticated) {
assertOnForbiddenPage();
} else {
waitUntilElement(By.xpath("//body")).text().contains("principal=null");
}
assertSuccessfulLogin(salesPostServletPage, bburkeUser, testRealmSAMLPostLoginPage, "principal=bburke");
assertSuccessfullyLoggedIn(salesPostPassiveServletPage, "principal=bburke");
salesPostPassiveServletPage.logout();
salesPostPassiveServletPage.navigateTo();
if (forbiddenIfNotAuthenticated) {
assertOnForbiddenPage();
} else {
waitUntilElement(By.xpath("//body")).text().contains("principal=null");
}
assertForbiddenLogin(salesPostServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal=");
assertForbidden(salesPostPassiveServletPage, "principal=");
salesPostPassiveServletPage.logout();
}
@Test
public void salesPostSigTest() {
testSuccessfulAndUnauthorizedLogin(salesPostSigServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void salesPostSigEmailTest() {
testSuccessfulAndUnauthorizedLogin(salesPostSigEmailServletPage, testRealmSAMLPostLoginPage, "principal=bburke@redhat.com");
}
@Test
public void salesPostSigPersistentTest() {
salesPostSigPersistentServletPage.navigateTo();
testRealmSAMLPostLoginPage.form().login(bburkeUser);
waitUntilElement(By.xpath("//body")).text().not().contains("bburke");
waitUntilElement(By.xpath("//body")).text().contains("principal=G-");
salesPostSigPersistentServletPage.logout();
checkLoggedOut(salesPostSigPersistentServletPage, testRealmSAMLPostLoginPage);
assertForbiddenLogin(salesPostSigPersistentServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal=");
salesPostSigPersistentServletPage.logout();
checkLoggedOut(salesPostSigPersistentServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void salesPostSigTransientTest() {
salesPostSigTransientServletPage.navigateTo();
testRealmSAMLPostLoginPage.form().login(bburkeUser);
waitUntilElement(By.xpath("//body")).text().not().contains("bburke");
waitUntilElement(By.xpath("//body")).text().contains("principal=G-");
salesPostSigTransientServletPage.logout();
checkLoggedOut(salesPostSigTransientServletPage, testRealmSAMLPostLoginPage);
assertForbiddenLogin(salesPostSigTransientServletPage, "unauthorized", "password", testRealmSAMLPostLoginPage, "principal=");
salesPostSigTransientServletPage.logout();
checkLoggedOut(salesPostSigTransientServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void idpInitiatedLogin() {
samlidpInitiatedLoginPage.setAuthRealm(SAMLSERVLETDEMO);
samlidpInitiatedLoginPage.setUrlName("employee2");
samlidpInitiatedLoginPage.navigateTo();
samlidpInitiatedLoginPage.form().login(bburkeUser);
waitUntilElement(By.xpath("//body")).text().contains("principal=bburke");
assertSuccessfullyLoggedIn(salesPostSigServletPage, "principal=bburke");
employee2ServletPage.logout();
checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void idpInitiatedUnauthorizedLoginTest() {
samlidpInitiatedLoginPage.setAuthRealm(SAMLSERVLETDEMO);
samlidpInitiatedLoginPage.setUrlName("employee2");
samlidpInitiatedLoginPage.navigateTo();
samlidpInitiatedLoginPage.form().login("unauthorized", "password");
waitUntilElement(By.xpath("//body")).text().not().contains("bburke");
assertTrue(driver.getPageSource().contains("Forbidden") || driver.getPageSource().contains(FORBIDDEN_TEXT));
assertForbidden(employee2ServletPage, "principal=");
employee2ServletPage.logout();
checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void testSavedPostRequest() {
inputPortalPage.navigateTo();
assertCurrentUrlStartsWith(inputPortalPage);
inputPortalPage.execute("hello");
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmLoginPage.form().login("bburke@redhat.com", "password");
Assert.assertEquals(driver.getCurrentUrl(), inputPortalPage + "/secured/post");
waitUntilElement(By.xpath("//body")).text().contains("parameter=hello");
// test that user principal and KeycloakSecurityContext available
driver.navigate().to(inputPortalPage + "/insecure");
waitUntilElement(By.xpath("//body")).text().contains("Insecure Page");
if (System.getProperty("insecure.user.principal.unsupported") == null) waitUntilElement(By.xpath("//body")).text().contains("UserPrincipal");
// test logout
inputPortalPage.logout();
// test unsecured POST KEYCLOAK-901
Client client = ClientBuilder.newClient();
Form form = new Form();
form.param("parameter", "hello");
String text = client.target(inputPortalPage + "/unsecured").request().post(Entity.form(form), String.class);
Assert.assertTrue(text.contains("parameter=hello"));
client.close();
}
@Test
public void testPostSimpleLoginLogoutIdpInitiatedRedirectTo() {
samlidpInitiatedLoginPage.setAuthRealm(SAMLSERVLETDEMO);
samlidpInitiatedLoginPage.setUrlName("sales-post2");
samlidpInitiatedLoginPage.navigateTo();
samlidpInitiatedLoginPage.form().login(bburkeUser);
assertCurrentUrlStartsWith(salesPost2ServletPage);
assertTrue(driver.getCurrentUrl().endsWith("/foo"));
waitUntilElement(By.xpath("//body")).text().contains("principal=bburke");
salesPost2ServletPage.logout();
checkLoggedOut(salesPost2ServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void salesPostAssertionAndResponseSigTest() {
testSuccessfulAndUnauthorizedLogin(salesPostAssertionAndResponseSigPage, testRealmSAMLPostLoginPage);
}
@Test
public void testPostBadAssertionSignature() {
badAssertionSalesPostSigPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login("bburke", "password");
waitUntilElement(By.xpath("//body")).text().contains("Error info: SamlAuthenticationError [reason=INVALID_SIGNATURE, status=null]");
assertEquals(driver.getCurrentUrl(), badAssertionSalesPostSigPage + "/saml");
}
@Test
public void testMissingAssertionSignature() {
missingAssertionSigPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login("bburke", "password");
waitUntilElement(By.xpath("//body")).text().contains("Error info: SamlAuthenticationError [reason=INVALID_SIGNATURE, status=null]");
assertEquals(driver.getCurrentUrl(), missingAssertionSigPage + "/saml");
}
@Test
public void testErrorHandling() throws Exception {
Client client = ClientBuilder.newClient();
// make sure
Response response = client.target(employeeSigServletPage.toString()).request().get();
response.close();
SAML2ErrorResponseBuilder builder = new SAML2ErrorResponseBuilder()
.destination(employeeSigServletPage.toString() + "/saml")
.issuer("http://localhost:" + System.getProperty("auth.server.http.port", "8180") + "/realms/demo")
.status(JBossSAMLURIConstants.STATUS_REQUEST_DENIED.get());
BaseSAML2BindingBuilder binding = new BaseSAML2BindingBuilder()
.relayState(null);
Document document = builder.buildDocument();
URI uri = binding.redirectBinding(document).generateURI(employeeSigServletPage.toString() + "/saml", false);
response = client.target(uri).request().get();
String errorPage = response.readEntity(String.class);
response.close();
Assert.assertTrue(errorPage.contains("Error info: SamlAuthenticationError [reason=ERROR_STATUS"));
Assert.assertFalse(errorPage.contains("status=null"));
client.close();
}
@Test
public void testRelayStateEncoding() throws Exception {
// this test has a hardcoded SAMLRequest and we hack a SP face servlet to get the SAMLResponse so we can look
// at the relay state
employeeServletPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login("bburke", "password");
assertCurrentUrlStartsWith(employeeServletPage);
waitUntilElement(By.xpath("//body")).text().contains("Relay state: " + SamlSPFacade.RELAY_STATE);
waitUntilElement(By.xpath("//body")).text().not().contains("SAML response: null");
}
@Test
public void testAttributes() throws Exception {
ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "http://localhost:8081/employee2/");
ProtocolMappersResource protocolMappersResource = clientResource.getProtocolMappers();
Map<String, String> config = new LinkedHashMap<>();
config.put("attribute.nameformat", "Basic");
config.put("user.attribute", "topAttribute");
config.put("attribute.name", "topAttribute");
createProtocolMapper(protocolMappersResource, "topAttribute", "saml", "saml-user-attribute-mapper", config);
config = new LinkedHashMap<>();
config.put("attribute.nameformat", "Basic");
config.put("user.attribute", "level2Attribute");
config.put("attribute.name", "level2Attribute");
createProtocolMapper(protocolMappersResource, "level2Attribute", "saml", "saml-user-attribute-mapper", config);
config = new LinkedHashMap<>();
config.put("attribute.nameformat", "Basic");
config.put("single", "true");
config.put("attribute.name", "group");
createProtocolMapper(protocolMappersResource, "groups", "saml", "saml-group-membership-mapper", config);
setRolesToCheck("manager,user");
employee2ServletPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login("level2GroupUser", "password");
driver.navigate().to(employee2ServletPage.toString() + "/getAttributes");
waitUntilElement(By.xpath("//body")).text().contains("topAttribute: true");
waitUntilElement(By.xpath("//body")).text().contains("level2Attribute: true");
waitUntilElement(By.xpath("//body")).text().contains("attribute email: level2@redhat.com");
waitUntilElement(By.xpath("//body")).text().not().contains("group: []");
waitUntilElement(By.xpath("//body")).text().not().contains("group: null");
waitUntilElement(By.xpath("//body")).text().contains("group: [level2]");
employee2ServletPage.logout();
checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage);
setRolesToCheck("manager,employee,user");
employee2ServletPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login(bburkeUser);
driver.navigate().to(employee2ServletPage.toString() + "/getAttributes");
waitUntilElement(By.xpath("//body")).text().contains("attribute email: bburke@redhat.com");
waitUntilElement(By.xpath("//body")).text().contains("friendlyAttribute email: bburke@redhat.com");
waitUntilElement(By.xpath("//body")).text().contains("phone: 617");
waitUntilElement(By.xpath("//body")).text().contains("friendlyAttribute phone: null");
employee2ServletPage.logout();
checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage);
config = new LinkedHashMap<>();
config.put("attribute.value", "hard");
config.put("attribute.nameformat", "Basic");
config.put("attribute.name", "hardcoded-attribute");
createProtocolMapper(protocolMappersResource, "hardcoded-attribute", "saml", "saml-hardcode-attribute-mapper", config);
config = new LinkedHashMap<>();
config.put("role", "hardcoded-role");
createProtocolMapper(protocolMappersResource, "hardcoded-role", "saml", "saml-hardcode-role-mapper", config);
config = new LinkedHashMap<>();
config.put("new.role.name", "pee-on");
config.put("role", "http://localhost:8081/employee/.employee");
createProtocolMapper(protocolMappersResource, "renamed-employee-role", "saml", "saml-role-name-mapper", config);
for (ProtocolMapperRepresentation mapper : clientResource.toRepresentation().getProtocolMappers()) {
if (mapper.getName().equals("role-list")) {
protocolMappersResource.delete(mapper.getId());
mapper.setId(null);
mapper.getConfig().put(RoleListMapper.SINGLE_ROLE_ATTRIBUTE, "true");
mapper.getConfig().put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, "memberOf");
protocolMappersResource.createMapper(mapper);
}
}
setRolesToCheck("pee-on,el-jefe,manager,hardcoded-role");
config = new LinkedHashMap<>();
config.put("new.role.name", "el-jefe");
config.put("role", "user");
createProtocolMapper(protocolMappersResource, "renamed-role", "saml", "saml-role-name-mapper", config);
employee2ServletPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login(bburkeUser);
driver.navigate().to(employee2ServletPage.toString() + "/getAttributes");
waitUntilElement(By.xpath("//body")).text().contains("hardcoded-attribute: hard");
employee2ServletPage.checkRolesEndPoint(false);
employee2ServletPage.logout();
checkLoggedOut(employee2ServletPage, testRealmSAMLPostLoginPage);
}
@Test
public void idpMetadataValidation() throws Exception {
driver.navigate().to(authServerPage.toString() + "/realms/" + SAMLSERVLETDEMO + "/protocol/saml/descriptor");
validateXMLWithSchema(driver.getPageSource(), "/adapter-test/keycloak-saml/metadata-schema/saml-schema-metadata-2.0.xsd");
}
@Test
public void spMetadataValidation() throws Exception {
ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "http://localhost:8081/sales-post-sig/");
ClientRepresentation representation = clientResource.toRepresentation();
Client client = ClientBuilder.newClient();
WebTarget target = client.target(authServerPage.toString() + "/admin/realms/" + SAMLSERVLETDEMO + "/clients/" + representation.getId() + "/installation/providers/saml-sp-descriptor");
Response response = target.request().header(HttpHeaders.AUTHORIZATION, "Bearer " + adminClient.tokenManager().getAccessToken().getToken()).get();
validateXMLWithSchema(response.readEntity(String.class), "/adapter-test/keycloak-saml/metadata-schema/saml-schema-metadata-2.0.xsd");
response.close();
}
private void validateXMLWithSchema(String xml, String schemaFileName) throws SAXException, IOException {
URL schemaFile = getClass().getResource(schemaFileName);
Source xmlFile = new StreamSource(new ByteArrayInputStream(xml.getBytes()), xml);
SchemaFactory schemaFactory = SchemaFactory
.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = schemaFactory.newSchema(schemaFile);
Validator validator = schema.newValidator();
try {
validator.validate(xmlFile);
System.out.println(xmlFile.getSystemId() + " is valid");
} catch (SAXException e) {
System.out.println(xmlFile.getSystemId() + " is NOT valid");
System.out.println("Reason: " + e.getLocalizedMessage());
Assert.fail();
}
}
private void createProtocolMapper(ProtocolMappersResource resource, String name, String protocol, String protocolMapper, Map<String, String> config) {
ProtocolMapperRepresentation representation = new ProtocolMapperRepresentation();
representation.setName(name);
representation.setProtocol(protocol);
representation.setProtocolMapper(protocolMapper);
representation.setConfig(config);
resource.createMapper(representation);
}
private void setRolesToCheck(String roles) {
employee2ServletPage.navigateTo();
assertCurrentUrlStartsWith(testRealmSAMLPostLoginPage);
testRealmSAMLPostLoginPage.form().login(bburkeUser);
driver.navigate().to(employee2ServletPage.toString() + "/setCheckRoles?roles=" + roles);
employee2ServletPage.logout();
}
private void assertOnForbiddenPage() {
switch (System.getProperty("app.server")) {
case "eap6":
waitUntilElement(By.xpath("//body")).text().not().contains("principal=");
String source = driver.getPageSource();
assertTrue(source.isEmpty() || source.contains("<body></body>"));
break;
default:
waitUntilElement(By.xpath("//body")).text().contains(FORBIDDEN_TEXT);
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.metadata;
import com.facebook.presto.operator.WindowFunctionDefinition;
import com.facebook.presto.operator.aggregation.InternalAggregationFunction;
import com.facebook.presto.operator.window.AggregateWindowFunction;
import com.facebook.presto.operator.window.WindowFunctionSupplier;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.spi.type.TypeSignature;
import com.facebook.presto.sql.tree.QualifiedName;
import com.google.common.collect.ImmutableList;
import java.lang.invoke.MethodHandle;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static com.facebook.presto.operator.WindowFunctionDefinition.window;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
public final class FunctionInfo
implements ParametricFunction
{
private final Signature signature;
private final String description;
private final boolean hidden;
private final boolean nullable;
private final List<Boolean> nullableArguments;
private final boolean isAggregate;
private final TypeSignature intermediateType;
private final InternalAggregationFunction aggregationFunction;
private final boolean isApproximate;
private final MethodHandle methodHandle;
private final boolean deterministic;
private final boolean isWindow;
private final WindowFunctionSupplier windowFunctionSupplier;
public FunctionInfo(Signature signature, String description, WindowFunctionSupplier windowFunctionSupplier)
{
this.signature = signature;
this.description = description;
this.hidden = false;
this.deterministic = true;
this.nullable = false;
this.nullableArguments = ImmutableList.copyOf(Collections.nCopies(signature.getArgumentTypes().size(), false));
this.isAggregate = false;
this.intermediateType = null;
this.aggregationFunction = null;
this.isApproximate = false;
this.methodHandle = null;
this.isWindow = true;
this.windowFunctionSupplier = checkNotNull(windowFunctionSupplier, "windowFunction is null");
}
public FunctionInfo(Signature signature, String description, InternalAggregationFunction function)
{
this.signature = signature;
this.description = description;
this.isApproximate = function.isApproximate();
this.hidden = false;
this.intermediateType = function.getIntermediateType().getTypeSignature();
this.aggregationFunction = function;
this.isAggregate = true;
this.methodHandle = null;
this.deterministic = true;
this.nullable = false;
this.nullableArguments = ImmutableList.copyOf(Collections.nCopies(signature.getArgumentTypes().size(), false));
this.isWindow = true;
this.windowFunctionSupplier = AggregateWindowFunction.supplier(signature, function);
}
public FunctionInfo(Signature signature, String description, boolean hidden, MethodHandle function, boolean deterministic, boolean nullableResult, List<Boolean> nullableArguments)
{
this.signature = signature;
this.description = description;
this.hidden = hidden;
this.deterministic = deterministic;
this.nullable = nullableResult;
this.nullableArguments = ImmutableList.copyOf(checkNotNull(nullableArguments, "nullableArguments is null"));
checkArgument(nullableArguments.size() == signature.getArgumentTypes().size(), String.format("nullableArguments size (%d) does not match signature %s", nullableArguments.size(), signature));
this.isAggregate = false;
this.intermediateType = null;
this.aggregationFunction = null;
this.isApproximate = false;
this.isWindow = false;
this.windowFunctionSupplier = null;
this.methodHandle = checkNotNull(function, "function is null");
}
@Override
public Signature getSignature()
{
return signature;
}
public QualifiedName getName()
{
return QualifiedName.of(signature.getName());
}
@Override
public String getDescription()
{
return description;
}
@Override
public boolean isHidden()
{
return hidden;
}
@Override
public boolean isAggregate()
{
return isAggregate;
}
@Override
public boolean isWindow()
{
return isWindow;
}
@Override
public boolean isScalar()
{
return !isWindow && !isAggregate;
}
@Override
public boolean isUnbound()
{
return false;
}
@Override
public boolean isApproximate()
{
return isApproximate;
}
public TypeSignature getReturnType()
{
return signature.getReturnType();
}
public List<TypeSignature> getArgumentTypes()
{
return signature.getArgumentTypes();
}
public TypeSignature getIntermediateType()
{
return intermediateType;
}
@Override
public FunctionInfo specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry)
{
return this;
}
public WindowFunctionDefinition bindWindowFunction(Type type, List<Integer> inputs)
{
checkState(isWindow, "not a window function");
return window(windowFunctionSupplier, type, inputs);
}
public InternalAggregationFunction getAggregationFunction()
{
checkState(aggregationFunction != null, "not an aggregation function");
return aggregationFunction;
}
public MethodHandle getMethodHandle()
{
checkState(methodHandle != null, "not a scalar function or operator");
return methodHandle;
}
@Override
public boolean isDeterministic()
{
return deterministic;
}
public boolean isNullable()
{
return nullable;
}
public List<Boolean> getNullableArguments()
{
return nullableArguments;
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
FunctionInfo other = (FunctionInfo) obj;
return Objects.equals(this.signature, other.signature) &&
Objects.equals(this.isAggregate, other.isAggregate) &&
Objects.equals(this.isWindow, other.isWindow);
}
@Override
public int hashCode()
{
return Objects.hash(signature, isAggregate, isWindow);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("signature", signature)
.add("isAggregate", isAggregate)
.add("isWindow", isWindow)
.toString();
}
}
|
|
package introspector;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.annotation.Annotation;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.logging.Logger;
/**
* Utility to find classes in the classpath.
*
* <pre>
* import introspector.ClassQuery;
*
* Set<Class> annotatedClasses = ClassQuery.
* from("any.package.you.want").
* searchClassesAnnotatedWith(SomeAnnotation.class);
*
* Set<Class> implementationClasses = ClassQuery.
* from("any.package.you.want").
* searchClassesImplementing(SomeInterface.class);
* </pre>
*
* @author Tiago Fernandez
* @since 1.0.0
*/
public final class ClassQuery {
private static final Logger logger = Logger.getLogger("introspector.ClassQuery");
private static final byte[] JAR_HEADER = {'P', 'K', 3, 4};
private final Set<Class> matches = new HashSet<Class>();
private String[] packageNames;
private ClassQuery(String... packageNames) {
this.packageNames = new String[packageNames.length];
System.arraycopy(packageNames, 0, this.packageNames, 0, packageNames.length);
}
public static ClassQuery from(String... packageNames) {
if (packageNames == null)
throw new IllegalArgumentException("The package names must be provided.");
else
return new ClassQuery(packageNames);
}
public Set<Class> searchClassesAnnotatedWith(Class<? extends Annotation> annotation) {
Matcher matcher = new AnnotationMatcher(annotation);
for (String packageName : packageNames) {
find(matcher, packageName);
}
return Collections.unmodifiableSet(matches);
}
public Set<Class> searchClassesImplementing(Class parent) {
Matcher matcher = new ImplementationMatcher(parent);
for (String packageName : packageNames) {
find(matcher, packageName);
}
return Collections.unmodifiableSet(matches);
}
private void find(Matcher matcher, String packageName) {
String path = getPackagePath(packageName);
try {
List<String> resources = listResources(path);
for (String resource : resources) {
if (isClass(resource))
examine(resource, matcher);
}
}
catch (IOException ex) {
logger.warning("Could not read package: " + packageName);
}
}
private void examine(String className, Matcher matcher) {
try {
String externalName = getExternalName(className);
Class type = getClassLoader().loadClass(externalName);
if (matcher.matches(type))
matches.add(type);
}
catch (Throwable ex) {
logger.warning("Could not examine class: " + className);
}
}
private List<String> listResources(String path) throws IOException {
List<String> resources = new ArrayList<String>();
for (URL url : listUrlResources(path)) {
List<String> packagedResources = listResources(url, path);
resources.addAll(packagedResources);
}
return resources;
}
private List<String> listResources(URL url, String path) throws IOException {
InputStream stream = null;
try {
List<String> resources = new ArrayList<String>();
URL jarUrl = findJarUrl(url);
if (jarUrl != null) {
stream = jarUrl.openStream();
resources.addAll(listPackagedResources(new JarInputStream(stream), path));
}
else {
stream = url.openStream();
String prefix = includeTrailingSlash(url.toExternalForm());
for (String resource : listChildrenResources(path, stream)) {
String resourcePath = path + '/' + resource;
resources.add(resourcePath);
URL childUrl = new URL(prefix + resource);
resources.addAll(listResources(childUrl, resourcePath));
}
}
return resources;
}
finally {
close(stream);
}
}
private List<String> listPackagedResources(JarInputStream jar, String path) throws IOException {
List<String> resources = new ArrayList<String>();
path = includeLeadingAndTrailingSlashes(path);
for (JarEntry entry; (entry = jar.getNextJarEntry()) != null;) {
if (!entry.isDirectory()) {
String name = includeLeadingSlash(entry.getName());
if (name.startsWith(path))
resources.add(name.substring(1)); // Trim leading slash
}
}
return resources;
}
private List<String> listChildrenResources(String path, InputStream stream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
List<String> children = new ArrayList<String>();
for (String line; (line = reader.readLine()) != null;) {
children.add(line);
List<URL> urlResources = listUrlResources(path + '/' + line);
if (urlResources.isEmpty()) {
children.clear();
return children;
}
}
return children;
}
private List<URL> listUrlResources(String path) throws IOException {
return Collections.list(getClassLoader().getResources(path));
}
private URL findJarUrl(URL url) throws MalformedURLException {
String externalJarUrl = findExternalJarUrl(url);
StringBuilder externalUrl = new StringBuilder(externalJarUrl);
int indexOfJar = externalUrl.lastIndexOf(".jar");
if (indexOfJar >= 0) {
externalUrl.setLength(indexOfJar + 4);
URL jarUrl = new URL(externalUrl.toString());
if (isJar(jarUrl))
return jarUrl;
}
return null;
}
@SuppressWarnings({"InfiniteLoopStatement"})
private String findExternalJarUrl(URL url) {
try {
while (true) {
// If the file part of the URL is itself a URL, then that URL probably points to the Jar
String file = url.getFile();
url = new URL(file);
}
}
catch (MalformedURLException ex) {
// This will happen at some point and serves as a break in the loop
}
return url.toExternalForm();
}
@SuppressWarnings({"ResultOfMethodCallIgnored"})
private boolean isJar(URL url) {
InputStream inputStream = null;
try {
byte[] buffer = new byte[JAR_HEADER.length];
inputStream = url.openStream();
inputStream.read(buffer, 0, JAR_HEADER.length);
return Arrays.equals(buffer, JAR_HEADER);
}
catch (Exception ex) {
// Not a JAR
}
finally {
close(inputStream);
}
return false;
}
private String includeLeadingAndTrailingSlashes(String path) {
path = includeLeadingSlash(path);
path = includeTrailingSlash(path);
return path;
}
private String includeLeadingSlash(String path) {
return path.startsWith("/") ? path : '/' + path;
}
private String includeTrailingSlash(String path) {
return path.endsWith("/") ? path : path + '/';
}
private boolean isClass(String resource) {
return resource.endsWith(".class");
}
private String getPackagePath(String packageName) {
return packageName == null ? null : packageName.replace('.', '/');
}
private String getExternalName(String className) {
return className.substring(0, className.indexOf('.')).replace('/', '.');
}
private ClassLoader getClassLoader() {
return Thread.currentThread().getContextClassLoader();
}
private void close(InputStream inputStream) {
try {
if (inputStream != null)
inputStream.close();
}
catch (Exception ex) {
// Ignore
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ctakes.constituency.parser.treekernel;
import java.util.ArrayList;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.cas.FSArray;
import org.apache.ctakes.typesystem.type.syntax.TerminalTreebankNode;
import org.apache.ctakes.typesystem.type.syntax.TopTreebankNode;
import org.apache.ctakes.typesystem.type.syntax.TreebankNode;
import org.apache.ctakes.utils.tree.SimpleTree;
/*
* This class extracts tree relations between two nodes in a tree (or from 2 separate trees).
* This has been used to great effect in relation extraction with the use of tree kernels.
* Two are implemented here, path trees used in coreference (a tree whose nodes are simply the nodes in the
* path between the anaphor and antecedent) and the Path-enclosed Tree from Moschitti 2004 (acl) on SRL and
* Zhang, Zhang, and Su 2006 (Naacl-hlt) for relex, which in that paper got the best performance, though
* they also implemented several other tree types which might be worth pursuign for other
* tasks.
*/
public class TreeExtractor {
// public static TopTreebankNode extractPathTree(TreebankNode t1, TreebankNode t2, JCas jcas){
// TopTreebankNode node = new TopTreebankNode(jcas);
//
// // edge cases that don't really make sense....
// // 1) Same tree
// // 2) overlapping trees
// if(t1 == t2 || (t1.getBegin() >= t2.getBegin() && t1.getEnd() <= t2.getEnd()) || (t2.getBegin() >= t1.getBegin() && t2.getEnd() <= t1.getEnd())){
// return sameTree(t1, t2, jcas);
// }
// TreebankNode lca = getLCA(t1, t2);
//
// node.setNodeType(lca == null ? "TOP" : lca.getNodeType());
// FSArray children = new FSArray(jcas,2);
// node.setChildren(children);
//
// ArrayList<TreebankNode> antePath = getAnaphoraPath(lca, t1);
// TreebankNode parent = node;
// for(TreebankNode child : antePath){
// TreebankNode newChild = new TreebankNode(jcas);
// newChild.setNodeType(child.getNodeType());
// if(parent != node){
// FSArray tempChildren = new FSArray(jcas, 1);
// parent.setChildren(tempChildren);
// }
// parent.setChildren(0,newChild);
// parent = newChild;
// }
// TerminalTreebankNode fakeWord = new TerminalTreebankNode(jcas);
// fakeWord.setNodeType("antecedent");
// fakeWord.setLeaf(true);
// fakeWord.setChildren(new FSArray(jcas, 0));
// if(parent != node){
// FSArray termChildren = new FSArray(jcas, 1);
// parent.setChildren(termChildren);
// }
// parent.setChildren(0, fakeWord);
//
// parent = node;
// ArrayList<TreebankNode> anaphPath = getAnaphoraPath(lca, t2);
// for(TreebankNode child : anaphPath){
// TreebankNode newChild = new TreebankNode(jcas);
// newChild.setNodeType(child.getNodeType());
// if(parent != node){
// FSArray tempChildren = new FSArray(jcas, 1);
// parent.setChildren(tempChildren);
// parent.setChildren(0,newChild);
// }else{
// parent.setChildren(1,newChild);
// }
// parent = newChild;
// }
// fakeWord = new TerminalTreebankNode(jcas);
// fakeWord.setNodeType("anaphor");
// fakeWord.setLeaf(true);
// fakeWord.setChildren(new FSArray(jcas, 0));
// if(parent != node){
// FSArray termChildren = new FSArray(jcas, 1);
// parent.setChildren(termChildren);
// parent.setChildren(0, fakeWord);
// }else{
// parent.setChildren(1, fakeWord);
// }
// return node;
// }
public static SimpleTree extractPathTree(TreebankNode t1, TreebankNode t2){
// swap the ordering if necessary
if(t1.getBegin() > t2.getBegin()){
TreebankNode temp = t2;
t2 = t1;
t1 = temp;
}
// edge cases that don't really make sense....
// 1) Same tree
// 2) overlapping trees
if(t1 == t2 || (t1.getBegin() >= t2.getBegin() && t1.getEnd() <= t2.getEnd()) || (t2.getBegin() >= t1.getBegin() && t2.getEnd() <= t1.getEnd())){
return sameTree(t1, t2);
}
SimpleTree node = null;
TreebankNode lca = getLCA(t1, t2);
if(lca == null) node = new SimpleTree("TOP");
else node = new SimpleTree(lca.getNodeType());
ArrayList<TreebankNode> antePath = getUpwardPath(lca, t1);
SimpleTree parent = node;
for(TreebankNode child : antePath){
SimpleTree newChild = new SimpleTree(child.getNodeType());
parent.addChild(newChild);
parent = newChild;
}
parent.addChild(new SimpleTree("arg1"));
ArrayList<TreebankNode> anaPath = getUpwardPath(lca, t2);
parent = node;
for(TreebankNode child : anaPath){
SimpleTree newChild = new SimpleTree(child.getNodeType());
parent.addChild(newChild);
parent = newChild;
}
parent.addChild(new SimpleTree("arg2"));
return node;
}
public static SimpleTree extractPathEnclosedTree(TreebankNode t1, TreebankNode t2, JCas jcas){
SimpleTree node = null;
// swap them if wrong order:
if(t1.getBegin() > t2.getBegin()){
TreebankNode temp = t1;
t1 = t2;
t2 = temp;
}
if(t1 == t2 || (t1.getBegin() >= t2.getBegin() && t1.getEnd() <= t2.getEnd()) || (t2.getBegin() >= t1.getBegin() && t2.getEnd() <= t1.getEnd())){
node = sameTree(t1,t2);
}else{
TreebankNode lca = getLCA(t1,t2);
ArrayList<TreebankNode> l1 = getUpwardPath(lca, t1);
ArrayList<TreebankNode> l2 = getUpwardPath(lca, t2);
if(lca == null){
lca = new TopTreebankNode(jcas);
lca.setNodeType("TOP");
lca.setChildren(new FSArray(jcas,2));
if(l1.size()==0){
l1.add(t1);
}
if(l2.size() == 0){
l2.add(t2);
}
lca.setChildren(0, l1.get(0));
lca.setChildren(1, l2.get(0));
}
node = buildSimpleClonePET(lca, t1, t2);
}
return node;
}
private static SimpleTree buildSimpleClonePET(TreebankNode lca, TreebankNode t1, TreebankNode t2){
SimpleTree t = new SimpleTree(lca.getNodeType());
if(lca instanceof TerminalTreebankNode){
t.addChild(new SimpleTree(lca.getNodeValue()));
}else{
for(int i = 0; i < lca.getChildren().size(); i++){
TreebankNode tn = lca.getChildren(i);
if(tn.getEnd() > t1.getBegin() && tn.getBegin() < t2.getEnd()){
t.addChild(buildSimpleClonePET(lca.getChildren(i), t1, t2));
}
}
}
return t;
}
// Find the least common ancestor of two other nodes, or null (top node) if they are in different sentences
public static TreebankNode getLCA(TreebankNode t1, TreebankNode t2){
TreebankNode temp = null;
if(t2.getBegin() < t1.getBegin()){
temp = t1;
t1 = t2;
t2 = temp;
}
TreebankNode lca = t2;
while(lca != null && (lca.getBegin() > t1.getBegin() || lca.getEnd() < t1.getEnd())){
lca = lca.getParent();
}
return lca;
}
// private static ArrayList<TreebankNode> getAnaphoraPath(TreebankNode lca, TreebankNode t){
// return getUpwardPath(lca,t);
// }
//
// private static ArrayList<TreebankNode> getAntecedentPath(TreebankNode lca, TreebankNode t){
// return getUpwardPath(lca,t);
// }
private static ArrayList<TreebankNode> getUpwardPath(TreebankNode lca, TreebankNode t){
ArrayList<TreebankNode> list = new ArrayList<TreebankNode>();
while(t != null && t != lca && t.getParent() != null){
list.add(0,t);
t = t.getParent();
}
return list;
}
// private static TopTreebankNode sameTree(TreebankNode t1, TreebankNode t2, JCas jcas){
// TopTreebankNode node = new TopTreebankNode(jcas);
// node.setNodeType(t1.getNodeType());
// node.setChildren(new FSArray(jcas,2));
// TreebankNode pt1 = new TreebankNode(jcas);
// pt1.setNodeType("ANTECEDENT");
// pt1.setLeaf(false);
// pt1.setChildren(new FSArray(jcas,1));
// TerminalTreebankNode c1 = new TerminalTreebankNode(jcas);
// c1.setNodeType("antecedent");
// c1.setLeaf(true);
// c1.setChildren(new FSArray(jcas,0));
// pt1.setChildren(0, c1);
// node.setChildren(0,pt1);
//
// TreebankNode pt2 = new TreebankNode(jcas);
// pt2.setNodeType("ANAPHOR");
// pt2.setLeaf(false);
// pt2.setChildren(new FSArray(jcas,1));
// TerminalTreebankNode c2 = new TerminalTreebankNode(jcas);
// c2.setNodeType("anaphor");
// c2.setLeaf(true);
// c2.setChildren(new FSArray(jcas,0));
// pt2.setChildren(0,c2);
// node.setChildren(1,pt2);
// return node;
// }
private static SimpleTree sameTree(TreebankNode t1, TreebankNode t2){
SimpleTree node = new SimpleTree(t1.getNodeType());
node.addChild(new SimpleTree("ANTECEDENT"));
node.children.get(0).addChild(new SimpleTree("antecedent"));
node.addChild(new SimpleTree("ANAPHOR"));
node.children.get(1).addChild(new SimpleTree("anaphor"));
return node;
}
/* This method is used to extract trees for finding _properties_ rather than relations, or for
* finding relations where only one argument is known (and a label), and the other argument
* will be learned as an important tree fragment (e.g., relations like negation or uncertainty).
* The object returned is the largest subtree surrounding the context, with an extra node above it
* labeled with the argument in the variable string.
* For example, given the arguments:
* (NP (DT a) (NN dog))) from a tree representing the sentence "That is not a dog"
* and the string "NEGATION"; this method will return the tree:
* (S (NP (DT that)) (VP (VBZ is) (RB not) (NEGATION (NP (DT a) (NN dog)))))
*
* It uses the method getSurroundingTree in a somewhat clever way to do the additional annotation
* on the output string instead of the tree object.
*/
public static SimpleTree getSurroundingTreeWithAnnotation(TreebankNode node, String string) {
SimpleTree inner = getSimpleClone(node);
SimpleTree outer = getSurroundingTree(node);
String innerString = inner.toString();
String outerString = outer.toString();
String fullString = outerString.replace(innerString, "(" + string + " " + innerString + ")");
return SimpleTree.fromString(fullString);
}
public static SimpleTree getSurroundingTree(TreebankNode node){
SimpleTree tree = null;
while(node.getParent() != null){
node = node.getParent();
}
tree = getSimpleClone(node);
return tree;
}
public static SimpleTree getSimpleClone(TreebankNode node) {
SimpleTree t = new SimpleTree(node.getNodeType());
if(node instanceof TerminalTreebankNode){
t.addChild(new SimpleTree(node.getNodeValue()));
}else{
for(int i = 0; i < node.getChildren().size(); i++){
t.addChild(getSimpleClone(node.getChildren(i)));
}
}
return t;
}
public static void lowercaseWords(SimpleTree t){
if(t.children.size() == 0){
t.cat = t.cat.toLowerCase();
}else{
for(SimpleTree child : t.children){
lowercaseWords(child);
}
}
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: extensions_ext_same_package.proto
package polybuf.core.test;
public final class ExtensionsExtSamePackage {
private ExtensionsExtSamePackage() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registry.add(polybuf.core.test.ExtensionsExtSamePackage.Scope.id);
}
public interface ScopeOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
public static final class Scope extends
com.google.protobuf.GeneratedMessage
implements ScopeOrBuilder {
// Use Scope.newBuilder() to construct.
private Scope(Builder builder) {
super(builder);
}
private Scope(boolean noInit) {}
private static final Scope defaultInstance;
public static Scope getDefaultInstance() {
return defaultInstance;
}
public Scope getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return polybuf.core.test.ExtensionsExtSamePackage.internal_static_base_Scope_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return polybuf.core.test.ExtensionsExtSamePackage.internal_static_base_Scope_fieldAccessorTable;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static polybuf.core.test.ExtensionsExtSamePackage.Scope parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(polybuf.core.test.ExtensionsExtSamePackage.Scope prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements polybuf.core.test.ExtensionsExtSamePackage.ScopeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return polybuf.core.test.ExtensionsExtSamePackage.internal_static_base_Scope_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return polybuf.core.test.ExtensionsExtSamePackage.internal_static_base_Scope_fieldAccessorTable;
}
// Construct using polybuf.core.test.ExtensionsExtSamePackage.Scope.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return polybuf.core.test.ExtensionsExtSamePackage.Scope.getDescriptor();
}
public polybuf.core.test.ExtensionsExtSamePackage.Scope getDefaultInstanceForType() {
return polybuf.core.test.ExtensionsExtSamePackage.Scope.getDefaultInstance();
}
public polybuf.core.test.ExtensionsExtSamePackage.Scope build() {
polybuf.core.test.ExtensionsExtSamePackage.Scope result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
private polybuf.core.test.ExtensionsExtSamePackage.Scope buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
polybuf.core.test.ExtensionsExtSamePackage.Scope result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public polybuf.core.test.ExtensionsExtSamePackage.Scope buildPartial() {
polybuf.core.test.ExtensionsExtSamePackage.Scope result = new polybuf.core.test.ExtensionsExtSamePackage.Scope(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof polybuf.core.test.ExtensionsExtSamePackage.Scope) {
return mergeFrom((polybuf.core.test.ExtensionsExtSamePackage.Scope)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(polybuf.core.test.ExtensionsExtSamePackage.Scope other) {
if (other == polybuf.core.test.ExtensionsExtSamePackage.Scope.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:base.Scope)
}
static {
defaultInstance = new Scope(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:base.Scope)
public static final int ID_FIELD_NUMBER = 100;
public static final
com.google.protobuf.GeneratedMessage.GeneratedExtension<
polybuf.core.test.ExtensionsBase.Message1,
java.lang.Integer> id = com.google.protobuf.GeneratedMessage
.newMessageScopedGeneratedExtension(
polybuf.core.test.ExtensionsExtSamePackage.Scope.getDefaultInstance(),
0,
java.lang.Integer.class,
null);
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_base_Scope_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_base_Scope_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n!extensions_ext_same_package.proto\022\004bas" +
"e\032\rpolybuf.proto\032\025extensions_base.proto\"" +
"#\n\005Scope2\032\n\002id\022\016.base.Message1\030d \002(\005BX\n\021" +
"polybuf.core.test\222\223!A\n?http://www.exampl" +
"e.org/polybuf-test/extensions/ext_same_p" +
"ackage"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_base_Scope_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_base_Scope_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_base_Scope_descriptor,
new java.lang.String[] { },
polybuf.core.test.ExtensionsExtSamePackage.Scope.class,
polybuf.core.test.ExtensionsExtSamePackage.Scope.Builder.class);
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registerAllExtensions(registry);
polybuf.core.proto.Polybuf.registerAllExtensions(registry);
polybuf.core.test.ExtensionsBase.registerAllExtensions(registry);
return registry;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
polybuf.core.proto.Polybuf.getDescriptor(),
polybuf.core.test.ExtensionsBase.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.actions.context;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupAdapter;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.LightweightWindowEvent;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.tasks.LocalTask;
import com.intellij.tasks.TaskManager;
import com.intellij.tasks.actions.BaseTaskAction;
import com.intellij.tasks.actions.SwitchTaskAction;
import com.intellij.tasks.context.ContextInfo;
import com.intellij.tasks.context.LoadContextUndoableAction;
import com.intellij.tasks.context.WorkingContextManager;
import com.intellij.tasks.impl.TaskUtil;
import com.intellij.ui.popup.list.ListPopupImpl;
import com.intellij.util.Function;
import com.intellij.util.NullableFunction;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.DateFormatUtil;
import icons.TasksIcons;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.util.*;
/**
* @author Dmitry Avdeev
*/
public class LoadContextAction extends BaseTaskAction {
private static final int MAX_ROW_COUNT = 10;
@Override
public void actionPerformed(AnActionEvent e) {
final Project project = getProject(e);
assert project != null;
DefaultActionGroup group = new DefaultActionGroup();
final WorkingContextManager manager = WorkingContextManager.getInstance(project);
List<ContextInfo> history = manager.getContextHistory();
List<ContextHolder> infos =
new ArrayList<>(ContainerUtil.map2List(history, (Function<ContextInfo, ContextHolder>)info -> new ContextHolder() {
@Override
void load(final boolean clear) {
LoadContextUndoableAction undoableAction = LoadContextUndoableAction.createAction(manager, clear, info.name);
UndoableCommand.execute(project, undoableAction, "Load context " + info.comment, "Context");
}
@Override
void remove() {
manager.removeContext(info.name);
}
@Override
Date getDate() {
return new Date(info.date);
}
@Override
String getComment() {
return info.comment;
}
@Override
Icon getIcon() {
return TasksIcons.SavedContext;
}
}));
final TaskManager taskManager = TaskManager.getManager(project);
List<LocalTask> tasks = taskManager.getLocalTasks();
infos.addAll(ContainerUtil.mapNotNull(tasks, (NullableFunction<LocalTask, ContextHolder>)task -> {
if (task.isActive()) {
return null;
}
return new ContextHolder() {
@Override
void load(boolean clear) {
LoadContextUndoableAction undoableAction = LoadContextUndoableAction.createAction(manager, clear, task);
UndoableCommand.execute(project, undoableAction, "Load context " + TaskUtil.getTrimmedSummary(task), "Context");
}
@Override
void remove() {
SwitchTaskAction.removeTask(project, task, taskManager);
}
@Override
Date getDate() {
return task.getUpdated();
}
@Override
String getComment() {
return TaskUtil.getTrimmedSummary(task);
}
@Override
Icon getIcon() {
return task.getIcon();
}
};
}));
Collections.sort(infos, (o1, o2) -> o2.getDate().compareTo(o1.getDate()));
final Ref<Boolean> shiftPressed = Ref.create(false);
boolean today = true;
Calendar now = Calendar.getInstance();
for (int i = 0, historySize = Math.min(MAX_ROW_COUNT, infos.size()); i < historySize; i++) {
final ContextHolder info = infos.get(i);
Calendar calendar = Calendar.getInstance();
calendar.setTime(info.getDate());
if (today &&
(calendar.get(Calendar.YEAR) != now.get(Calendar.YEAR) ||
calendar.get(Calendar.DAY_OF_YEAR) != now.get(Calendar.DAY_OF_YEAR))) {
group.addSeparator();
today = false;
}
group.add(createItem(info, shiftPressed));
}
final ListPopupImpl popup = (ListPopupImpl)JBPopupFactory.getInstance()
.createActionGroupPopup("Load Context", group, e.getDataContext(), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false, null,
MAX_ROW_COUNT);
popup.setAdText("Press SHIFT to merge with current context");
popup.registerAction("shiftPressed", KeyStroke.getKeyStroke("shift pressed SHIFT"), new AbstractAction() {
public void actionPerformed(ActionEvent e) {
shiftPressed.set(true);
popup.setCaption("Merge with Current Context");
}
});
popup.registerAction("shiftReleased", KeyStroke.getKeyStroke("released SHIFT"), new AbstractAction() {
public void actionPerformed(ActionEvent e) {
shiftPressed.set(false);
popup.setCaption("Load Context");
}
});
popup.registerAction("invoke", KeyStroke.getKeyStroke("shift ENTER"), new AbstractAction() {
public void actionPerformed(ActionEvent e) {
popup.handleSelect(true);
}
});
popup.addPopupListener(new JBPopupAdapter() {
@Override
public void onClosed(LightweightWindowEvent event) {
}
});
popup.showCenteredInCurrentWindow(project);
}
abstract static class ContextHolder {
abstract void load(boolean clear);
abstract void remove();
abstract Date getDate();
abstract String getComment();
abstract Icon getIcon();
}
private static ActionGroup createItem(final ContextHolder holder, final Ref<Boolean> shiftPressed) {
String text = DateFormatUtil.formatPrettyDateTime(holder.getDate());
String comment = holder.getComment();
if (!StringUtil.isEmpty(comment)) {
text = comment + " (" + text + ")";
}
final AnAction loadAction = new AnAction("Load") {
@Override
public void actionPerformed(AnActionEvent e) {
holder.load(!shiftPressed.get());
}
};
ActionGroup contextGroup = new ActionGroup(text, text, holder.getIcon()) {
@Override
public void actionPerformed(AnActionEvent e) {
loadAction.actionPerformed(e);
}
@NotNull
@Override
public AnAction[] getChildren(@Nullable AnActionEvent e) {
return new AnAction[]{loadAction,
new AnAction("Remove") {
@Override
public void actionPerformed(AnActionEvent e) {
holder.remove();
}
}};
}
@Override
public boolean canBePerformed(DataContext context) {
return true;
}
};
contextGroup.setPopup(true);
return contextGroup;
}
}
|
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* CreateKeyPairResponseType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* CreateKeyPairResponseType bean class
*/
public class CreateKeyPairResponseType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = CreateKeyPairResponseType
Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for RequestId
*/
protected java.lang.String localRequestId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getRequestId(){
return localRequestId;
}
/**
* Auto generated setter method
* @param param RequestId
*/
public void setRequestId(java.lang.String param){
this.localRequestId=param;
}
/**
* field for KeyName
*/
protected java.lang.String localKeyName ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getKeyName(){
return localKeyName;
}
/**
* Auto generated setter method
* @param param KeyName
*/
public void setKeyName(java.lang.String param){
this.localKeyName=param;
}
/**
* field for KeyFingerprint
*/
protected java.lang.String localKeyFingerprint ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getKeyFingerprint(){
return localKeyFingerprint;
}
/**
* Auto generated setter method
* @param param KeyFingerprint
*/
public void setKeyFingerprint(java.lang.String param){
this.localKeyFingerprint=param;
}
/**
* field for KeyMaterial
*/
protected java.lang.String localKeyMaterial ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getKeyMaterial(){
return localKeyMaterial;
}
/**
* Auto generated setter method
* @param param KeyMaterial
*/
public void setKeyMaterial(java.lang.String param){
this.localKeyMaterial=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
CreateKeyPairResponseType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":CreateKeyPairResponseType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"CreateKeyPairResponseType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"requestId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"requestId");
}
} else {
xmlWriter.writeStartElement("requestId");
}
if (localRequestId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}else{
xmlWriter.writeCharacters(localRequestId);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"keyName", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"keyName");
}
} else {
xmlWriter.writeStartElement("keyName");
}
if (localKeyName==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("keyName cannot be null!!");
}else{
xmlWriter.writeCharacters(localKeyName);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"keyFingerprint", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"keyFingerprint");
}
} else {
xmlWriter.writeStartElement("keyFingerprint");
}
if (localKeyFingerprint==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("keyFingerprint cannot be null!!");
}else{
xmlWriter.writeCharacters(localKeyFingerprint);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"keyMaterial", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"keyMaterial");
}
} else {
xmlWriter.writeStartElement("keyMaterial");
}
if (localKeyMaterial==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("keyMaterial cannot be null!!");
}else{
xmlWriter.writeCharacters(localKeyMaterial);
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"requestId"));
if (localRequestId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRequestId));
} else {
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"keyName"));
if (localKeyName != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localKeyName));
} else {
throw new org.apache.axis2.databinding.ADBException("keyName cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"keyFingerprint"));
if (localKeyFingerprint != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localKeyFingerprint));
} else {
throw new org.apache.axis2.databinding.ADBException("keyFingerprint cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"keyMaterial"));
if (localKeyMaterial != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localKeyMaterial));
} else {
throw new org.apache.axis2.databinding.ADBException("keyMaterial cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static CreateKeyPairResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
CreateKeyPairResponseType object =
new CreateKeyPairResponseType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"CreateKeyPairResponseType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (CreateKeyPairResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","requestId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setRequestId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","keyName").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setKeyName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","keyFingerprint").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setKeyFingerprint(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","keyMaterial").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setKeyMaterial(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
|
|
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.extractor;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import org.apache.poi.POIOLE2TextExtractor;
import org.apache.poi.hssf.record.formula.eval.ErrorEval;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFComment;
import org.apache.poi.hssf.usermodel.HSSFRichTextString;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.HeaderFooter;
/**
* A text extractor for Excel files.
* <p>
* Returns the textual content of the file, suitable for
* indexing by something like Lucene, but not really
* intended for display to the user.
* </p>
* <p>
* To turn an excel file into a CSV or similar, then see
* the XLS2CSVmra example
* </p>
* <link href="http://svn.apache.org/repos/asf/poi/trunk/src/examples/src/org/apache/poi/hssf/eventusermodel/examples/XLS2CSVmra.java">
* http://svn.apache.org/repos/asf/poi/trunk/src/examples/src/org/apache/poi/hssf/eventusermodel/examples/XLS2CSVmra.java</link>
*/
public class ExcelExtractor extends POIOLE2TextExtractor implements org.apache.poi.ss.extractor.ExcelExtractor {
private HSSFWorkbook _wb;
private boolean _includeSheetNames = true;
private boolean _shouldEvaluateFormulas = true;
private boolean _includeCellComments = false;
private boolean _includeBlankCells = false;
private boolean _includeHeadersFooters = true;
public ExcelExtractor(HSSFWorkbook wb) {
super(wb);
_wb = wb;
}
public ExcelExtractor(POIFSFileSystem fs) throws IOException {
this(fs.getRoot(), fs);
}
public ExcelExtractor(DirectoryNode dir, POIFSFileSystem fs) throws IOException {
this(new HSSFWorkbook(dir, fs, true));
}
private static final class CommandParseException extends Exception {
public CommandParseException(String msg) {
super(msg);
}
}
private static final class CommandArgs {
private final boolean _requestHelp;
private final File _inputFile;
private final boolean _showSheetNames;
private final boolean _evaluateFormulas;
private final boolean _showCellComments;
private final boolean _showBlankCells;
private final boolean _headersFooters;
public CommandArgs(String[] args) throws CommandParseException {
int nArgs = args.length;
File inputFile = null;
boolean requestHelp = false;
boolean showSheetNames = true;
boolean evaluateFormulas = true;
boolean showCellComments = false;
boolean showBlankCells = false;
boolean headersFooters = true;
for (int i=0; i<nArgs; i++) {
String arg = args[i];
if ("-help".equalsIgnoreCase(arg)) {
requestHelp = true;
break;
}
if ("-i".equals(arg)) {
// step to next arg
if (++i >= nArgs) {
throw new CommandParseException("Expected filename after '-i'");
}
arg = args[i];
if (inputFile != null) {
throw new CommandParseException("Only one input file can be supplied");
}
inputFile = new File(arg);
if (!inputFile.exists()) {
throw new CommandParseException("Specified input file '" + arg + "' does not exist");
}
if (inputFile.isDirectory()) {
throw new CommandParseException("Specified input file '" + arg + "' is a directory");
}
continue;
}
if ("--show-sheet-names".equals(arg)) {
showSheetNames = parseBoolArg(args, ++i);
continue;
}
if ("--evaluate-formulas".equals(arg)) {
evaluateFormulas = parseBoolArg(args, ++i);
continue;
}
if ("--show-comments".equals(arg)) {
showCellComments = parseBoolArg(args, ++i);
continue;
}
if ("--show-blanks".equals(arg)) {
showBlankCells = parseBoolArg(args, ++i);
continue;
}
if ("--headers-footers".equals(arg)) {
headersFooters = parseBoolArg(args, ++i);
continue;
}
throw new CommandParseException("Invalid argument '" + arg + "'");
}
_requestHelp = requestHelp;
_inputFile = inputFile;
_showSheetNames = showSheetNames;
_evaluateFormulas = evaluateFormulas;
_showCellComments = showCellComments;
_showBlankCells = showBlankCells;
_headersFooters = headersFooters;
}
private static boolean parseBoolArg(String[] args, int i) throws CommandParseException {
if (i >= args.length) {
throw new CommandParseException("Expected value after '" + args[i-1] + "'");
}
String value = args[i].toUpperCase();
if ("Y".equals(value) || "YES".equals(value) || "ON".equals(value) || "TRUE".equals(value)) {
return true;
}
if ("N".equals(value) || "NO".equals(value) || "OFF".equals(value) || "FALSE".equals(value)) {
return false;
}
throw new CommandParseException("Invalid value '" + args[i] + "' for '" + args[i-1] + "'. Expected 'Y' or 'N'");
}
public boolean isRequestHelp() {
return _requestHelp;
}
public File getInputFile() {
return _inputFile;
}
public boolean shouldShowSheetNames() {
return _showSheetNames;
}
public boolean shouldEvaluateFormulas() {
return _evaluateFormulas;
}
public boolean shouldShowCellComments() {
return _showCellComments;
}
public boolean shouldShowBlankCells() {
return _showBlankCells;
}
public boolean shouldIncludeHeadersFooters() {
return _headersFooters;
}
}
private static void printUsageMessage(PrintStream ps) {
ps.println("Use:");
ps.println(" " + ExcelExtractor.class.getName() + " [<flag> <value> [<flag> <value> [...]]] [-i <filename.xls>]");
ps.println(" -i <filename.xls> specifies input file (default is to use stdin)");
ps.println(" Flags can be set on or off by using the values 'Y' or 'N'.");
ps.println(" Following are available flags and their default values:");
ps.println(" --show-sheet-names Y");
ps.println(" --evaluate-formulas Y");
ps.println(" --show-comments N");
ps.println(" --show-blanks Y");
ps.println(" --headers-footers Y");
}
/**
* Command line extractor.
*/
public static void main(String[] args) {
CommandArgs cmdArgs;
try {
cmdArgs = new CommandArgs(args);
} catch (CommandParseException e) {
System.err.println(e.getMessage());
printUsageMessage(System.err);
System.exit(1);
return; // suppress compiler error
}
if (cmdArgs.isRequestHelp()) {
printUsageMessage(System.out);
return;
}
try {
InputStream is;
if(cmdArgs.getInputFile() == null) {
is = System.in;
} else {
is = new FileInputStream(cmdArgs.getInputFile());
}
HSSFWorkbook wb = new HSSFWorkbook(is);
ExcelExtractor extractor = new ExcelExtractor(wb);
extractor.setIncludeSheetNames(cmdArgs.shouldShowSheetNames());
extractor.setFormulasNotResults(!cmdArgs.shouldEvaluateFormulas());
extractor.setIncludeCellComments(cmdArgs.shouldShowCellComments());
extractor.setIncludeBlankCells(cmdArgs.shouldShowBlankCells());
extractor.setIncludeHeadersFooters(cmdArgs.shouldIncludeHeadersFooters());
System.out.println(extractor.getText());
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
/**
* Should sheet names be included? Default is true
*/
public void setIncludeSheetNames(boolean includeSheetNames) {
_includeSheetNames = includeSheetNames;
}
/**
* Should we return the formula itself, and not
* the result it produces? Default is false
*/
public void setFormulasNotResults(boolean formulasNotResults) {
_shouldEvaluateFormulas = !formulasNotResults;
}
/**
* Should cell comments be included? Default is false
*/
public void setIncludeCellComments(boolean includeCellComments) {
_includeCellComments = includeCellComments;
}
/**
* Should blank cells be output? Default is to only
* output cells that are present in the file and are
* non-blank.
*/
public void setIncludeBlankCells(boolean includeBlankCells) {
_includeBlankCells = includeBlankCells;
}
/**
* Should headers and footers be included in the output?
* Default is to include them.
*/
public void setIncludeHeadersFooters(boolean includeHeadersFooters) {
_includeHeadersFooters = includeHeadersFooters;
}
/**
* Retrieves the text contents of the file
*/
public String getText() {
StringBuffer text = new StringBuffer();
// We don't care about the difference between
// null (missing) and blank cells
_wb.setMissingCellPolicy(HSSFRow.RETURN_BLANK_AS_NULL);
// Process each sheet in turn
for(int i=0;i<_wb.getNumberOfSheets();i++) {
HSSFSheet sheet = _wb.getSheetAt(i);
if(sheet == null) { continue; }
if(_includeSheetNames) {
String name = _wb.getSheetName(i);
if(name != null) {
text.append(name);
text.append("\n");
}
}
// Header text, if there is any
if(_includeHeadersFooters) {
text.append(_extractHeaderFooter(sheet.getHeader()));
}
int firstRow = sheet.getFirstRowNum();
int lastRow = sheet.getLastRowNum();
for(int j=firstRow;j<=lastRow;j++) {
HSSFRow row = sheet.getRow(j);
if(row == null) { continue; }
// Check each cell in turn
int firstCell = row.getFirstCellNum();
int lastCell = row.getLastCellNum();
if(_includeBlankCells) {
firstCell = 0;
}
for(int k=firstCell;k<lastCell;k++) {
HSSFCell cell = row.getCell(k);
boolean outputContents = true;
if(cell == null) {
// Only output if requested
outputContents = _includeBlankCells;
} else {
switch(cell.getCellType()) {
case HSSFCell.CELL_TYPE_STRING:
text.append(cell.getRichStringCellValue().getString());
break;
case HSSFCell.CELL_TYPE_NUMERIC:
// Note - we don't apply any formatting!
text.append(cell.getNumericCellValue());
break;
case HSSFCell.CELL_TYPE_BOOLEAN:
text.append(cell.getBooleanCellValue());
break;
case HSSFCell.CELL_TYPE_ERROR:
text.append(ErrorEval.getText(cell.getErrorCellValue()));
break;
case HSSFCell.CELL_TYPE_FORMULA:
if(!_shouldEvaluateFormulas) {
text.append(cell.getCellFormula());
} else {
switch(cell.getCachedFormulaResultType()) {
case HSSFCell.CELL_TYPE_STRING:
HSSFRichTextString str = cell.getRichStringCellValue();
if(str != null && str.length() > 0) {
text.append(str.toString());
}
break;
case HSSFCell.CELL_TYPE_NUMERIC:
text.append(cell.getNumericCellValue());
break;
case HSSFCell.CELL_TYPE_BOOLEAN:
text.append(cell.getBooleanCellValue());
break;
case HSSFCell.CELL_TYPE_ERROR:
text.append(ErrorEval.getText(cell.getErrorCellValue()));
break;
}
}
break;
default:
throw new RuntimeException("Unexpected cell type (" + cell.getCellType() + ")");
}
// Output the comment, if requested and exists
HSSFComment comment = cell.getCellComment();
if(_includeCellComments && comment != null) {
// Replace any newlines with spaces, otherwise it
// breaks the output
String commentText = comment.getString().getString().replace('\n', ' ');
text.append(" Comment by "+comment.getAuthor()+": "+commentText);
}
}
// Output a tab if we're not on the last cell
if(outputContents && k < (lastCell-1)) {
text.append("\t");
}
}
// Finish off the row
text.append("\n");
}
// Finally Footer text, if there is any
if(_includeHeadersFooters) {
text.append(_extractHeaderFooter(sheet.getFooter()));
}
}
return text.toString();
}
public static String _extractHeaderFooter(HeaderFooter hf) {
StringBuffer text = new StringBuffer();
if(hf.getLeft() != null) {
text.append(hf.getLeft());
}
if(hf.getCenter() != null) {
if(text.length() > 0)
text.append("\t");
text.append(hf.getCenter());
}
if(hf.getRight() != null) {
if(text.length() > 0)
text.append("\t");
text.append(hf.getRight());
}
if(text.length() > 0)
text.append("\n");
return text.toString();
}
}
|
|
/*******************************************************************************
* Copyright 2016 Adobe Systems Incorporated.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.adobe.aem.demomachine.gui;
import java.awt.Desktop;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.WindowEvent;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.SequenceInputStream;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.TreeSet;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.DefaultListModel;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.JRootPane;
import javax.swing.KeyStroke;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.log4j.Logger;
import org.apache.tools.ant.ProjectHelper;
public class AemDemoUtils {
static Logger logger = Logger.getLogger(AemDemoUtils.class);
public static void main(String[] args) {
}
public static DefaultListModel<String> listDemoMachines(String demoMachineRootFolder) {
DefaultListModel<String> demoMachines = new DefaultListModel<String>();
File folder = new File(demoMachineRootFolder + File.separator + "demos");
if (folder.exists()) {
File[] listOfFiles = folder.listFiles();
Arrays.sort(listOfFiles);
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isDirectory()) {
demoMachines.addElement(listOfFiles[i].getName());
}
}
}
return demoMachines;
}
// Retrieves the list of AEM/CQ .jar files in /dist/bin
public static AemDemoProperty[] listAEMjars(File buildFile) {
List<AemDemoProperty> aemJars = new ArrayList<AemDemoProperty>();
// First, loading the .jar files from the /dist/bin folder
File folder = new File(buildFile.getParentFile().getAbsolutePath() + File.separator + "dist" + File.separator + "bin");
File[] listOfFiles = folder.listFiles();
Arrays.sort(listOfFiles);
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile() && listOfFiles[i].getName().endsWith(".jar")) {
// Name value pair: aem.jar / aem
aemJars.add(new AemDemoProperty(listOfFiles[i].getName().substring(0, listOfFiles[i].getName().indexOf(".jar")),listOfFiles[i].getName()));
}
}
// Second, loading the docker images from the config files
Properties defaultProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "build.properties");
Properties personalProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "conf" + File.separator + "build-personal.properties");
Properties mergedProps = new Properties();
mergedProps.putAll(defaultProps);
mergedProps.putAll(personalProps);
Enumeration<?> e = mergedProps.keys();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
if (key.startsWith("demo.docker.images.") & !(key.endsWith("help") || key.endsWith("label"))) {
int pos = mergedProps.getProperty(key).indexOf(',');
if (pos>0) {
String keyValue = mergedProps.getProperty(key).substring(pos+1);
String keyName = mergedProps.getProperty(key).substring(0,pos);;
aemJars.add(new AemDemoProperty(keyValue, keyName));
}
}
}
AemDemoProperty[] aemPropertyArray = new AemDemoProperty[ aemJars.size() ];
aemJars.toArray( aemPropertyArray );
return aemPropertyArray;
}
// Retrieves the list of Demo Addons from build.properties
public static int[] getDemoAddons(File buildFile) {
List<Integer> listIndices = new ArrayList<Integer>();
Properties defaultProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "build.properties");
Properties personalProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "conf" + File.separator + "build-personal.properties");
@SuppressWarnings("serial")
Properties sortedProps = new Properties() {
@Override
public synchronized Enumeration<Object> keys() {
return Collections.enumeration(new TreeSet<Object>(super.keySet()));
}
};
sortedProps.putAll(defaultProps);
// Looping through all possible options
Enumeration<?> e = sortedProps.keys();
int currentIndice = 0;
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
if (key.startsWith("demo.addons.") & !(key.endsWith("help") || key.endsWith("label"))) {
String value = sortedProps.getProperty(key);
if (personalProps.containsKey(key)) {
value = personalProps.getProperty(key);
}
if (value.equals("true")) {
listIndices.add(currentIndice);
}
currentIndice = currentIndice + 1;
}
}
int[] array = new int[listIndices.size()];
for(int i = 0; i < listIndices.size(); i++) array[i] = listIndices.get(i);
return array;
}
// Retrieves the list of Demo Addons from build.properties
public static AemDemoProperty[] listDemoAddons(File buildFile) {
List<AemDemoProperty> addons = new ArrayList<AemDemoProperty>();
Properties defaultProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "build.properties");
@SuppressWarnings("serial")
Properties sortedProps = new Properties() {
@Override
public synchronized Enumeration<Object> keys() {
return Collections.enumeration(new TreeSet<Object>(super.keySet()));
}
};
sortedProps.putAll(defaultProps);
// Looping through all possible options
Enumeration<?> e = sortedProps.keys();
// List of paths to demo packages
List<String[]> listPaths = Arrays.asList(AemDemoConstants.demoPaths);
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
if (key.startsWith("demo.addons.") & !(key.endsWith("help") || key.endsWith("label"))) {
String newKey = key.substring(1 + key.lastIndexOf("."));
// Check if downloads are required
boolean downloadRequired = false;
for (String[] path:listPaths) {
if (path.length==5 && path[4]!=null && path[4].equals(newKey)) {
File pathFolder = new File(buildFile.getParentFile().getAbsolutePath() + (path[1].length()>0?(File.separator + path[1]):""));
if (!pathFolder.exists()) {
downloadRequired=true;
}
}
}
addons.add(new AemDemoProperty(newKey, sortedProps.getProperty(key + ".label") + (downloadRequired?" (*)":"")));
}
}
AemDemoProperty[] aemPropertyArray = new AemDemoProperty[ addons.size() ];
addons.toArray( aemPropertyArray );
return aemPropertyArray;
}
// Retrieves the list of Topologies from build.properties
public static AemDemoProperty[] listTopologies(File buildFile) {
return listOptions(buildFile,AemDemoConstants.OPTIONS_TOPOLOGIES);
}
// Retrieves the list of SRPs from build.properties
public static AemDemoProperty[] listSRPs(File buildFile) {
return listOptions(buildFile,AemDemoConstants.OPTIONS_SRPS);
}
// Retrieves the list of MKs from build.properties
public static AemDemoProperty[] listMKs(File buildFile) {
return listOptions(buildFile,AemDemoConstants.OPTIONS_STORES);
}
// Retrieves a particular property from build.properties
public static String getPropertyValue(File buildFile, String propertyName) {
String propertyValue = null;
Properties defaultProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "build.properties");
Properties personalProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "conf" + File.separator + "build-personal.properties");
if (personalProps.containsKey(propertyName)) {
propertyValue = personalProps.getProperty(propertyName);
} else {
propertyValue = defaultProps.getProperty(propertyName);
}
return propertyValue;
}
public static AemDemoProperty[] listOptions(File buildFile, String property) {
List<AemDemoProperty> aemMKs = new ArrayList<AemDemoProperty>();
Properties defaultProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "build.properties");
Properties personalProps = loadProperties (buildFile.getParentFile().getAbsolutePath() + File.separator + "conf" + File.separator + "build-personal.properties");
if (personalProps.containsKey(property)) {
addPropertyFromString(aemMKs,personalProps.getProperty(property));
} else {
addPropertyFromString(aemMKs,defaultProps.getProperty(property));
}
AemDemoProperty[] aemPropertyArray = new AemDemoProperty[ aemMKs.size() ];
aemMKs.toArray( aemPropertyArray );
return aemPropertyArray;
}
public static Properties loadProperties(String path) {
Properties prop = new Properties();
try {
InputStream input = new FileInputStream(path);
prop.load(input);
input.close();
} catch (Exception e) {
logger.error(e.getMessage());
}
return prop;
}
private static void addPropertyFromString(List<AemDemoProperty> aemProperties, String propertyString) {
// Pattern is comma separated [ name / value ]
if (propertyString!=null) {
Pattern pattern = Pattern.compile("\\[(.*?)\\]");
Matcher matcher = pattern.matcher(propertyString);
while (matcher.find()) {
int sep = matcher.group().indexOf("/");
if (sep > 0) {
aemProperties.add(new AemDemoProperty(matcher.group().substring(1,sep),matcher.group().substring(sep+1,matcher.group().length()-1)));
}
}
}
}
public static int getSelectedIndex(@SuppressWarnings("rawtypes") JList list, Properties defaultProperties, Properties personalProperties, String propertyString) {
int index = 0;
String defaultProperty = defaultProperties.getProperty(propertyString);
String personalProperty = personalProperties.getProperty(propertyString);
String actualProperty = (personalProperty!=null)?personalProperty:defaultProperty;
for (int i=0;i<list.getModel().getSize();i++) {
if (list.getModel().getElementAt(i) instanceof AemDemoProperty) {
AemDemoProperty aemProperty = (AemDemoProperty) list.getModel().getElementAt(i);
if (aemProperty.getValue()!=null && actualProperty!=null && aemProperty.getValue().equals(actualProperty)) {
return i;
}
}
}
return index;
}
public static String getActualPropertyValue(Properties defaultProperties, Properties personalProperties, String propertyString) {
String defaultProperty = defaultProperties.getProperty(propertyString);
String personalProperty = personalProperties.getProperty(propertyString);
return (personalProperty!=null)?personalProperty:defaultProperty;
}
public static void antTarget(AemDemo aemDemo, String targetName) {
String selectedDemoMachine = (String) aemDemo.getListDemoMachines().getSelectedValue();
if (Arrays.asList(AemDemoConstants.INSTANCE_ACTIONS).contains(targetName) && (selectedDemoMachine==null || selectedDemoMachine.toString().length()==0)) {
JOptionPane.showMessageDialog(null, "Please select a demo environment before running this command");
} else {
// New ANT project
AemDemoProject p = new AemDemoProject(aemDemo);
if (selectedDemoMachine!=null && selectedDemoMachine.length()>0) p.setUserProperty("demo.build", selectedDemoMachine.toString());
// Make sure host name is there
try {
p.setUserProperty("demo.hostname", InetAddress.getLocalHost().getHostName());
} catch (UnknownHostException ex) {
logger.error(ex.getMessage());
}
p.init();
ProjectHelper helper = ProjectHelper.getProjectHelper();
p.addReference("ant.projectHelper", helper);
helper.parse(p, aemDemo.getBuildFile());
// Running the target name as a new Thread
System.out.println("Running ANT target: " + targetName);
Thread t = new Thread(new AemDemoRunnable(aemDemo, p, targetName));
t.start();
}
}
public static void openWebpage(URI uri) {
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) {
try {
desktop.browse(uri);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public static void openWebpage(URL url) {
try {
openWebpage(url.toURI());
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
public static void openWebpage(String sUrl) {
try {
openWebpage(new URL(sUrl));
} catch (Exception e) {
e.printStackTrace();
}
}
private static final KeyStroke escapeStroke =
KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0);
public static final String dispatchWindowClosingActionMapKey =
"com.spodding.tackline.dispatch:WINDOW_CLOSING";
public static void installEscapeCloseOperation(final JDialog dialog) {
Action dispatchClosing = new AbstractAction() {
private static final long serialVersionUID = 1L;
public void actionPerformed(ActionEvent event) {
dialog.dispatchEvent(new WindowEvent(
dialog, WindowEvent.WINDOW_CLOSING
));
}
};
JRootPane root = dialog.getRootPane();
root.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(
escapeStroke, dispatchWindowClosingActionMapKey
);
root.getActionMap().put( dispatchWindowClosingActionMapKey, dispatchClosing
);
}
public static String humanReadableByteCount(long bytes, boolean si) {
int unit = si ? 1000 : 1024;
if (bytes < unit) return bytes + " B";
int exp = (int) (Math.log(bytes) / Math.log(unit));
String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp-1) + (si ? "" : "i");
return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
}
public static String calcMD5HashForDir(File dirToHash, boolean includeSubFolders, boolean includeHiddenFiles) {
assert (dirToHash.isDirectory());
Vector<FileInputStream> fileStreams = new Vector<FileInputStream>();
logger.debug("Found files for hashing:");
collectInputStreams(dirToHash, fileStreams, includeSubFolders, includeHiddenFiles);
SequenceInputStream seqStream =
new SequenceInputStream(fileStreams.elements());
try {
String md5Hash = DigestUtils.md5Hex(seqStream);
seqStream.close();
return md5Hash;
}
catch (IOException e) {
throw new RuntimeException("Error reading files to hash in "
+ dirToHash.getAbsolutePath(), e);
}
}
public static void collectInputStreams(File dir,
List<FileInputStream> foundStreams,
boolean includeSubFolders,
boolean includeHiddenFiles) {
File[] fileList = dir.listFiles();
Arrays.sort(fileList, // Need in reproducible order
new Comparator<File>() {
public int compare(File f1, File f2) {
return f1.getName().compareTo(f2.getName());
}
});
for (File f : fileList) {
if (!includeHiddenFiles && f.getName().startsWith(".")) continue;
if (f.isDirectory() && !includeSubFolders) continue;
if (f.isDirectory()) {
collectInputStreams(f, foundStreams, includeSubFolders, includeHiddenFiles);
}
else {
try {
logger.debug(f.getAbsolutePath());
foundStreams.add(new FileInputStream(f));
}
catch (FileNotFoundException e) {
throw new AssertionError(e.getMessage()
+ ": file should never not be found!");
}
}
}
}
}
|
|
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.lwjgl;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.EXTFramebufferObject;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL13;
import org.lwjgl.opengl.GL14;
import org.lwjgl.opengl.GL15;
import org.lwjgl.opengl.GL20;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.utils.GdxRuntimeException;
/** An implementation of the {@link GL20} interface based on LWJGL. Note that LWJGL shaders and OpenGL ES shaders will not be 100%
* compatible. Some glGetXXX methods are not implemented.
*
* @author mzechner */
final class LwjglGL20 implements com.badlogic.gdx.graphics.GL20 {
public void glActiveTexture (int texture) {
GL13.glActiveTexture(texture);
}
public void glAttachShader (int program, int shader) {
GL20.glAttachShader(program, shader);
}
public void glBindAttribLocation (int program, int index, String name) {
GL20.glBindAttribLocation(program, index, name);
}
public void glBindBuffer (int target, int buffer) {
GL15.glBindBuffer(target, buffer);
}
public void glBindFramebuffer (int target, int framebuffer) {
EXTFramebufferObject.glBindFramebufferEXT(target, framebuffer);
}
public void glBindRenderbuffer (int target, int renderbuffer) {
EXTFramebufferObject.glBindRenderbufferEXT(target, renderbuffer);
}
public void glBindTexture (int target, int texture) {
GL11.glBindTexture(target, texture);
}
public void glBlendColor (float red, float green, float blue, float alpha) {
GL14.glBlendColor(red, green, blue, alpha);
}
public void glBlendEquation (int mode) {
GL14.glBlendEquation(mode);
}
public void glBlendEquationSeparate (int modeRGB, int modeAlpha) {
GL20.glBlendEquationSeparate(modeRGB, modeAlpha);
}
public void glBlendFunc (int sfactor, int dfactor) {
GL11.glBlendFunc(sfactor, dfactor);
}
public void glBlendFuncSeparate (int srcRGB, int dstRGB, int srcAlpha, int dstAlpha) {
GL14.glBlendFuncSeparate(srcRGB, dstRGB, srcAlpha, dstAlpha);
}
public void glBufferData (int target, int size, Buffer data, int usage) {
if(data == null)
throw new GdxRuntimeException("Using null for the data not possible, blame LWJGL");
else if (data instanceof ByteBuffer)
GL15.glBufferData(target, (ByteBuffer)data, usage);
else if (data instanceof IntBuffer)
GL15.glBufferData(target, (IntBuffer)data, usage);
else if (data instanceof FloatBuffer)
GL15.glBufferData(target, (FloatBuffer)data, usage);
else if (data instanceof DoubleBuffer)
GL15.glBufferData(target, (DoubleBuffer)data, usage);
else if (data instanceof ShortBuffer) //
GL15.glBufferData(target, (ShortBuffer)data, usage);
}
public void glBufferSubData (int target, int offset, int size, Buffer data) {
if(data == null)
throw new GdxRuntimeException("Using null for the data not possible, blame LWJGL");
else if (data instanceof ByteBuffer)
GL15.glBufferSubData(target, offset, (ByteBuffer)data);
else if (data instanceof IntBuffer)
GL15.glBufferSubData(target, offset, (IntBuffer)data);
else if (data instanceof FloatBuffer)
GL15.glBufferSubData(target, offset, (FloatBuffer)data);
else if (data instanceof DoubleBuffer)
GL15.glBufferSubData(target, offset, (DoubleBuffer)data);
else if (data instanceof ShortBuffer) //
GL15.glBufferSubData(target, offset, (ShortBuffer)data);
}
public int glCheckFramebufferStatus (int target) {
return EXTFramebufferObject.glCheckFramebufferStatusEXT(target);
}
public void glClear (int mask) {
GL11.glClear(mask);
}
public void glClearColor (float red, float green, float blue, float alpha) {
GL11.glClearColor(red, green, blue, alpha);
}
public void glClearDepthf (float depth) {
GL11.glClearDepth(depth);
}
public void glClearStencil (int s) {
GL11.glClearStencil(s);
}
public void glColorMask (boolean red, boolean green, boolean blue, boolean alpha) {
GL11.glColorMask(red, green, blue, alpha);
}
public void glCompileShader (int shader) {
GL20.glCompileShader(shader);
}
public void glCompressedTexImage2D (int target, int level, int internalformat, int width, int height, int border,
int imageSize, Buffer data) {
if (data instanceof ByteBuffer) {
GL13.glCompressedTexImage2D(target, level, internalformat, width, height, border, (ByteBuffer)data);
} else {
throw new GdxRuntimeException("Can't use " + data.getClass().getName()
+ " with this method. Use ByteBuffer instead.");
}
}
public void glCompressedTexSubImage2D (int target, int level, int xoffset, int yoffset, int width, int height, int format,
int imageSize, Buffer data) {
throw new GdxRuntimeException("not implemented");
}
public void glCopyTexImage2D (int target, int level, int internalformat, int x, int y, int width, int height, int border) {
GL11.glCopyTexImage2D(target, level, internalformat, x, y, width, height, border);
}
public void glCopyTexSubImage2D (int target, int level, int xoffset, int yoffset, int x, int y, int width, int height) {
GL11.glCopyTexSubImage2D(target, level, xoffset, yoffset, x, y, width, height);
}
public int glCreateProgram () {
return GL20.glCreateProgram();
}
public int glCreateShader (int type) {
return GL20.glCreateShader(type);
}
public void glCullFace (int mode) {
GL11.glCullFace(mode);
}
public void glDeleteBuffers (int n, IntBuffer buffers) {
GL15.glDeleteBuffers(buffers);
}
public void glDeleteFramebuffers (int n, IntBuffer framebuffers) {
EXTFramebufferObject.glDeleteFramebuffersEXT(framebuffers);
}
public void glDeleteProgram (int program) {
GL20.glDeleteProgram(program);
}
public void glDeleteRenderbuffers (int n, IntBuffer renderbuffers) {
EXTFramebufferObject.glDeleteRenderbuffersEXT(renderbuffers);
}
public void glDeleteShader (int shader) {
GL20.glDeleteShader(shader);
}
public void glDeleteTextures (int n, IntBuffer textures) {
GL11.glDeleteTextures(textures);
}
public void glDepthFunc (int func) {
GL11.glDepthFunc(func);
}
public void glDepthMask (boolean flag) {
GL11.glDepthMask(flag);
}
public void glDepthRangef (float zNear, float zFar) {
GL11.glDepthRange(zNear, zFar);
}
public void glDetachShader (int program, int shader) {
GL20.glDetachShader(program, shader);
}
public void glDisable (int cap) {
GL11.glDisable(cap);
}
public void glDisableVertexAttribArray (int index) {
GL20.glDisableVertexAttribArray(index);
}
public void glDrawArrays (int mode, int first, int count) {
GL11.glDrawArrays(mode, first, count);
}
public void glDrawElements (int mode, int count, int type, Buffer indices) {
if (indices instanceof ShortBuffer && type == GL10.GL_UNSIGNED_SHORT)
GL11.glDrawElements(mode, (ShortBuffer)indices);
else if (indices instanceof ByteBuffer && type == GL10.GL_UNSIGNED_SHORT)
GL11.glDrawElements(mode, ((ByteBuffer)indices).asShortBuffer()); // FIXME yay...
else if (indices instanceof ByteBuffer && type == GL10.GL_UNSIGNED_BYTE)
GL11.glDrawElements(mode, (ByteBuffer)indices);
else
throw new GdxRuntimeException("Can't use " + indices.getClass().getName()
+ " with this method. Use ShortBuffer or ByteBuffer instead. Blame LWJGL");
}
public void glEnable (int cap) {
GL11.glEnable(cap);
}
public void glEnableVertexAttribArray (int index) {
GL20.glEnableVertexAttribArray(index);
}
public void glFinish () {
GL11.glFinish();
}
public void glFlush () {
GL11.glFlush();
}
public void glFramebufferRenderbuffer (int target, int attachment, int renderbuffertarget, int renderbuffer) {
EXTFramebufferObject.glFramebufferRenderbufferEXT(target, attachment, renderbuffertarget, renderbuffer);
}
public void glFramebufferTexture2D (int target, int attachment, int textarget, int texture, int level) {
EXTFramebufferObject.glFramebufferTexture2DEXT(target, attachment, textarget, texture, level);
}
public void glFrontFace (int mode) {
GL11.glFrontFace(mode);
}
public void glGenBuffers (int n, IntBuffer buffers) {
GL15.glGenBuffers(buffers);
}
public void glGenFramebuffers (int n, IntBuffer framebuffers) {
EXTFramebufferObject.glGenFramebuffersEXT(framebuffers);
}
public void glGenRenderbuffers (int n, IntBuffer renderbuffers) {
EXTFramebufferObject.glGenRenderbuffersEXT(renderbuffers);
}
public void glGenTextures (int n, IntBuffer textures) {
GL11.glGenTextures(textures);
}
public void glGenerateMipmap (int target) {
EXTFramebufferObject.glGenerateMipmapEXT(target);
}
public String glGetActiveAttrib (int program, int index, IntBuffer size, Buffer type) {
// FIXME this is less than ideal of course...
IntBuffer typeTmp = BufferUtils.createIntBuffer(2);
String name = GL20.glGetActiveAttrib(program, index, 256, typeTmp);
size.put(typeTmp.get(0));
if (type instanceof IntBuffer) ((IntBuffer)type).put(typeTmp.get(1));
return name;
}
public String glGetActiveUniform (int program, int index, IntBuffer size, Buffer type) {
// FIXME this is less than ideal of course...
IntBuffer typeTmp = BufferUtils.createIntBuffer(2);
String name = GL20.glGetActiveUniform(program, index, 256, typeTmp);
size.put(typeTmp.get(0));
if (type instanceof IntBuffer) ((IntBuffer)type).put(typeTmp.get(1));
return name;
}
public void glGetAttachedShaders (int program, int maxcount, Buffer count, IntBuffer shaders) {
GL20.glGetAttachedShaders(program, (IntBuffer)count, shaders);
}
public int glGetAttribLocation (int program, String name) {
return GL20.glGetAttribLocation(program, name);
}
public void glGetBooleanv (int pname, Buffer params) {
GL11.glGetBoolean(pname, (ByteBuffer)params);
}
public void glGetBufferParameteriv (int target, int pname, IntBuffer params) {
GL15.glGetBufferParameter(target, pname, params);
}
public int glGetError () {
return GL11.glGetError();
}
public void glGetFloatv (int pname, FloatBuffer params) {
GL11.glGetFloat(pname, params);
}
public void glGetFramebufferAttachmentParameteriv (int target, int attachment, int pname, IntBuffer params) {
EXTFramebufferObject.glGetFramebufferAttachmentParameterEXT(target, attachment, pname, params);
}
public void glGetIntegerv (int pname, IntBuffer params) {
GL11.glGetInteger(pname, params);
}
public String glGetProgramInfoLog (int program) {
ByteBuffer buffer = ByteBuffer.allocateDirect(1024 * 10);
buffer.order(ByteOrder.nativeOrder());
ByteBuffer tmp = ByteBuffer.allocateDirect(4);
tmp.order(ByteOrder.nativeOrder());
IntBuffer intBuffer = tmp.asIntBuffer();
GL20.glGetProgramInfoLog(program, intBuffer, buffer);
int numBytes = intBuffer.get(0);
byte[] bytes = new byte[numBytes];
buffer.get(bytes);
return new String(bytes);
}
public void glGetProgramiv (int program, int pname, IntBuffer params) {
GL20.glGetProgram(program, pname, params);
}
public void glGetRenderbufferParameteriv (int target, int pname, IntBuffer params) {
EXTFramebufferObject.glGetRenderbufferParameterEXT(target, pname, params);
}
public String glGetShaderInfoLog (int shader) {
ByteBuffer buffer = ByteBuffer.allocateDirect(1024 * 10);
buffer.order(ByteOrder.nativeOrder());
ByteBuffer tmp = ByteBuffer.allocateDirect(4);
tmp.order(ByteOrder.nativeOrder());
IntBuffer intBuffer = tmp.asIntBuffer();
GL20.glGetShaderInfoLog(shader, intBuffer, buffer);
int numBytes = intBuffer.get(0);
byte[] bytes = new byte[numBytes];
buffer.get(bytes);
return new String(bytes);
}
public void glGetShaderPrecisionFormat (int shadertype, int precisiontype, IntBuffer range, IntBuffer precision) {
throw new UnsupportedOperationException("unsupported, won't implement");
}
public void glGetShaderiv (int shader, int pname, IntBuffer params) {
GL20.glGetShader(shader, pname, params);
}
public String glGetString (int name) {
return GL11.glGetString(name);
}
public void glGetTexParameterfv (int target, int pname, FloatBuffer params) {
GL11.glGetTexParameter(target, pname, params);
}
public void glGetTexParameteriv (int target, int pname, IntBuffer params) {
GL11.glGetTexParameter(target, pname, params);
}
public int glGetUniformLocation (int program, String name) {
return GL20.glGetUniformLocation(program, name);
}
public void glGetUniformfv (int program, int location, FloatBuffer params) {
GL20.glGetUniform(program, location, params);
}
public void glGetUniformiv (int program, int location, IntBuffer params) {
GL20.glGetUniform(program, location, params);
}
public void glGetVertexAttribPointerv (int index, int pname, Buffer pointer) {
throw new UnsupportedOperationException("unsupported, won't implement");
}
public void glGetVertexAttribfv (int index, int pname, FloatBuffer params) {
GL20.glGetVertexAttrib(index, pname, params);
}
public void glGetVertexAttribiv (int index, int pname, IntBuffer params) {
GL20.glGetVertexAttrib(index, pname, params);
}
public void glHint (int target, int mode) {
GL11.glHint(target, mode);
}
public boolean glIsBuffer (int buffer) {
return GL15.glIsBuffer(buffer);
}
public boolean glIsEnabled (int cap) {
return GL11.glIsEnabled(cap);
}
public boolean glIsFramebuffer (int framebuffer) {
return EXTFramebufferObject.glIsFramebufferEXT(framebuffer);
}
public boolean glIsProgram (int program) {
return GL20.glIsProgram(program);
}
public boolean glIsRenderbuffer (int renderbuffer) {
return EXTFramebufferObject.glIsRenderbufferEXT(renderbuffer);
}
public boolean glIsShader (int shader) {
return GL20.glIsShader(shader);
}
public boolean glIsTexture (int texture) {
return GL11.glIsTexture(texture);
}
public void glLineWidth (float width) {
GL11.glLineWidth(width);
}
public void glLinkProgram (int program) {
GL20.glLinkProgram(program);
}
public void glPixelStorei (int pname, int param) {
GL11.glPixelStorei(pname, param);
}
public void glPolygonOffset (float factor, float units) {
GL11.glPolygonOffset(factor, units);
}
public void glReadPixels (int x, int y, int width, int height, int format, int type, Buffer pixels) {
if (pixels instanceof ByteBuffer)
GL11.glReadPixels(x, y, width, height, format, type, (ByteBuffer)pixels);
else if (pixels instanceof ShortBuffer)
GL11.glReadPixels(x, y, width, height, format, type, (ShortBuffer)pixels);
else if (pixels instanceof IntBuffer)
GL11.glReadPixels(x, y, width, height, format, type, (IntBuffer)pixels);
else if (pixels instanceof FloatBuffer)
GL11.glReadPixels(x, y, width, height, format, type, (FloatBuffer)pixels);
else
throw new GdxRuntimeException("Can't use " + pixels.getClass().getName()
+ " with this method. Use ByteBuffer, ShortBuffer, IntBuffer or FloatBuffer instead. Blame LWJGL");
}
public void glReleaseShaderCompiler () {
// nothing to do here
}
public void glRenderbufferStorage (int target, int internalformat, int width, int height) {
EXTFramebufferObject.glRenderbufferStorageEXT(target, internalformat, width, height);
}
public void glSampleCoverage (float value, boolean invert) {
GL13.glSampleCoverage(value, invert);
}
public void glScissor (int x, int y, int width, int height) {
GL11.glScissor(x, y, width, height);
}
public void glShaderBinary (int n, IntBuffer shaders, int binaryformat, Buffer binary, int length) {
throw new UnsupportedOperationException("unsupported, won't implement");
}
public void glShaderSource (int shader, String string) {
GL20.glShaderSource(shader, string);
}
public void glStencilFunc (int func, int ref, int mask) {
GL11.glStencilFunc(func, ref, mask);
}
public void glStencilFuncSeparate (int face, int func, int ref, int mask) {
GL20.glStencilFuncSeparate(face, func, ref, mask);
}
public void glStencilMask (int mask) {
GL11.glStencilMask(mask);
}
public void glStencilMaskSeparate (int face, int mask) {
GL20.glStencilMaskSeparate(face, mask);
}
public void glStencilOp (int fail, int zfail, int zpass) {
GL11.glStencilOp(fail, zfail, zpass);
}
public void glStencilOpSeparate (int face, int fail, int zfail, int zpass) {
GL20.glStencilOpSeparate(face, fail, zfail, zpass);
}
public void glTexImage2D (int target, int level, int internalformat, int width, int height, int border, int format, int type,
Buffer pixels) {
if (pixels == null)
GL11.glTexImage2D(target, level, internalformat, width, height, border, format, type, (ByteBuffer)null);
else if (pixels instanceof ByteBuffer)
GL11.glTexImage2D(target, level, internalformat, width, height, border, format, type, (ByteBuffer)pixels);
else if (pixels instanceof ShortBuffer)
GL11.glTexImage2D(target, level, internalformat, width, height, border, format, type, (ShortBuffer)pixels);
else if (pixels instanceof IntBuffer)
GL11.glTexImage2D(target, level, internalformat, width, height, border, format, type, (IntBuffer)pixels);
else if (pixels instanceof FloatBuffer)
GL11.glTexImage2D(target, level, internalformat, width, height, border, format, type, (FloatBuffer)pixels);
else if (pixels instanceof DoubleBuffer)
GL11.glTexImage2D(target, level, internalformat, width, height, border, format, type, (DoubleBuffer)pixels);
else
throw new GdxRuntimeException("Can't use " + pixels.getClass().getName()
+ " with this method. Use ByteBuffer, ShortBuffer, IntBuffer, FloatBuffer or DoubleBuffer instead. Blame LWJGL");
}
public void glTexParameterf (int target, int pname, float param) {
GL11.glTexParameterf(target, pname, param);
}
public void glTexParameterfv (int target, int pname, FloatBuffer params) {
GL11.glTexParameter(target, pname, params);
}
public void glTexParameteri (int target, int pname, int param) {
GL11.glTexParameteri(target, pname, param);
}
public void glTexParameteriv (int target, int pname, IntBuffer params) {
GL11.glTexParameter(target, pname, params);
}
public void glTexSubImage2D (int target, int level, int xoffset, int yoffset, int width, int height, int format, int type,
Buffer pixels) {
if (pixels instanceof ByteBuffer)
GL11.glTexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, (ByteBuffer)pixels);
else if (pixels instanceof ShortBuffer)
GL11.glTexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, (ShortBuffer)pixels);
else if (pixels instanceof IntBuffer)
GL11.glTexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, (IntBuffer)pixels);
else if (pixels instanceof FloatBuffer)
GL11.glTexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, (FloatBuffer)pixels);
else if (pixels instanceof DoubleBuffer)
GL11.glTexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, (DoubleBuffer)pixels);
else
throw new GdxRuntimeException("Can't use " + pixels.getClass().getName()
+ " with this method. Use ByteBuffer, ShortBuffer, IntBuffer, FloatBuffer or DoubleBuffer instead. Blame LWJGL");
}
public void glUniform1f (int location, float x) {
GL20.glUniform1f(location, x);
}
public void glUniform1fv (int location, int count, FloatBuffer v) {
GL20.glUniform1(location, v);
}
public void glUniform1i (int location, int x) {
GL20.glUniform1i(location, x);
}
public void glUniform1iv (int location, int count, IntBuffer v) {
GL20.glUniform1(location, v);
}
public void glUniform2f (int location, float x, float y) {
GL20.glUniform2f(location, x, y);
}
public void glUniform2fv (int location, int count, FloatBuffer v) {
GL20.glUniform2(location, v);
}
public void glUniform2i (int location, int x, int y) {
GL20.glUniform2i(location, x, y);
}
public void glUniform2iv (int location, int count, IntBuffer v) {
GL20.glUniform2(location, v);
}
public void glUniform3f (int location, float x, float y, float z) {
GL20.glUniform3f(location, x, y, z);
}
public void glUniform3fv (int location, int count, FloatBuffer v) {
GL20.glUniform3(location, v);
}
public void glUniform3i (int location, int x, int y, int z) {
GL20.glUniform3i(location, x, y, z);
}
public void glUniform3iv (int location, int count, IntBuffer v) {
GL20.glUniform3(location, v);
}
public void glUniform4f (int location, float x, float y, float z, float w) {
GL20.glUniform4f(location, x, y, z, w);
}
public void glUniform4fv (int location, int count, FloatBuffer v) {
GL20.glUniform4(location, v);
}
public void glUniform4i (int location, int x, int y, int z, int w) {
GL20.glUniform4i(location, x, y, z, w);
}
public void glUniform4iv (int location, int count, IntBuffer v) {
GL20.glUniform4(location, v);
}
public void glUniformMatrix2fv (int location, int count, boolean transpose, FloatBuffer value) {
GL20.glUniformMatrix2(location, transpose, value);
}
public void glUniformMatrix3fv (int location, int count, boolean transpose, FloatBuffer value) {
GL20.glUniformMatrix3(location, transpose, value);
}
public void glUniformMatrix4fv (int location, int count, boolean transpose, FloatBuffer value) {
GL20.glUniformMatrix4(location, transpose, value);
}
public void glUseProgram (int program) {
GL20.glUseProgram(program);
}
public void glValidateProgram (int program) {
GL20.glValidateProgram(program);
}
public void glVertexAttrib1f (int indx, float x) {
GL20.glVertexAttrib1f(indx, x);
}
public void glVertexAttrib1fv (int indx, FloatBuffer values) {
GL20.glVertexAttrib1f(indx, values.get());
}
public void glVertexAttrib2f (int indx, float x, float y) {
GL20.glVertexAttrib2f(indx, x, y);
}
public void glVertexAttrib2fv (int indx, FloatBuffer values) {
GL20.glVertexAttrib2f(indx, values.get(), values.get());
}
public void glVertexAttrib3f (int indx, float x, float y, float z) {
GL20.glVertexAttrib3f(indx, x, y, z);
}
public void glVertexAttrib3fv (int indx, FloatBuffer values) {
GL20.glVertexAttrib3f(indx, values.get(), values.get(), values.get());
}
public void glVertexAttrib4f (int indx, float x, float y, float z, float w) {
GL20.glVertexAttrib4f(indx, x, y, z, w);
}
public void glVertexAttrib4fv (int indx, FloatBuffer values) {
GL20.glVertexAttrib4f(indx, values.get(), values.get(), values.get(), values.get());
}
public void glVertexAttribPointer (int indx, int size, int type, boolean normalized, int stride, Buffer buffer) {
if (buffer instanceof ByteBuffer) {
if (type == GL_BYTE)
GL20.glVertexAttribPointer(indx, size, false, normalized, stride, (ByteBuffer)buffer);
else if (type == GL_UNSIGNED_BYTE)
GL20.glVertexAttribPointer(indx, size, true, normalized, stride, (ByteBuffer)buffer);
else if (type == GL_SHORT)
GL20.glVertexAttribPointer(indx, size, false, normalized, stride, ((ByteBuffer)buffer).asShortBuffer());
else if (type == GL_UNSIGNED_SHORT)
GL20.glVertexAttribPointer(indx, size, true, normalized, stride, ((ByteBuffer)buffer).asShortBuffer());
else if (type == GL_FLOAT)
GL20.glVertexAttribPointer(indx, size, normalized, stride, ((ByteBuffer)buffer).asFloatBuffer());
else
throw new GdxRuntimeException(
"Can't use "
+ buffer.getClass().getName()
+ " with type "
+ type
+ " with this method. Use ByteBuffer and one of GL_BYTE, GL_UNSIGNED_BYTE, GL_SHORT, GL_UNSIGNED_SHORT or GL_FLOAT for type. Blame LWJGL");
} else
throw new GdxRuntimeException("Can't use " + buffer.getClass().getName()
+ " with this method. Use ByteBuffer instead. Blame LWJGL");
}
public void glViewport (int x, int y, int width, int height) {
GL11.glViewport(x, y, width, height);
}
public void glDrawElements (int mode, int count, int type, int indices) {
GL11.glDrawElements(mode, count, type, indices);
}
public void glVertexAttribPointer (int indx, int size, int type, boolean normalized, int stride, int ptr) {
GL20.glVertexAttribPointer(indx, size, type, normalized, stride, ptr);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Aleksei V. Ivaschenko
* @version $Revision: 1.2 $
*/
package org.apache.harmony.x.print;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import javax.print.DocFlavor;
import javax.print.DocPrintJob;
import javax.print.PrintService;
import javax.print.ServiceUIFactory;
import javax.print.StreamPrintService;
import javax.print.StreamPrintServiceFactory;
import javax.print.attribute.Attribute;
import javax.print.attribute.AttributeSet;
import javax.print.attribute.AttributeSetUtilities;
import javax.print.attribute.HashAttributeSet;
import javax.print.attribute.PrintServiceAttribute;
import javax.print.attribute.PrintServiceAttributeSet;
import javax.print.event.PrintServiceAttributeListener;
public class DefaultPrintService implements PrintService {
//= Fields ===============================================================//
private PrintClient client = null;
private EventNotifier notifier = null;
private String serviceName = null;
//= Constructors =========================================================//
public DefaultPrintService(String servicename, PrintClient printclient) {
if (printclient == null || servicename == null) {
throw new NullPointerException("Argument is null");
}
this.client = printclient;
this.serviceName = servicename;
notifier = EventNotifier.getNotifier();
}
//= Basic methods ======================================================//
PrintClient getPrintClient() {
return client;
}
public String getName() {
return serviceName;
}
public boolean equals(Object obj) {
if (obj instanceof DefaultPrintService) {
DefaultPrintService service = (DefaultPrintService) obj;
if (service.getName().equals(serviceName)) {
return true;
}
}
return false;
}
public int hashCode() {
return serviceName.hashCode();
}
public String toString() {
return "Printer : " + serviceName;
}
//= Print service attributes ===========================================//
public PrintServiceAttribute getAttribute(Class category) {
if (!PrintServiceAttribute.class.isAssignableFrom(category)) {
throw new IllegalArgumentException();
}
PrintServiceAttributeSet attributes = getAttributes();
if (attributes.containsKey(category)) {
PrintServiceAttribute attribute = (PrintServiceAttribute) attributes
.get(category);
return attribute;
}
return null;
}
public PrintServiceAttributeSet getAttributes() {
return AttributeSetUtilities.unmodifiableView(client.getAttributes());
}
//= Print request attributes =============================================//
public Class[] getSupportedAttributeCategories() {
return client.getSupportedAttributeCategories();
}
public boolean isAttributeCategorySupported(Class category) {
if (category == null) {
throw new NullPointerException("Argument 'category' is null");
}
if (!(Attribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException(
"Argument 'category' must implement interface Attribute");
}
Class[] categories = getSupportedAttributeCategories();
for (int i = 0; i < categories.length; i++) {
if (categories[i].equals(category)) {
return true;
}
}
return false;
}
public AttributeSet getUnsupportedAttributes(DocFlavor flavor,
AttributeSet attributes) {
if (attributes == null) {
return null;
}
if (flavor != null && !isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException("Flavor " + flavor.getMimeType()
+ " is not supported by print service");
}
Attribute[] attrs = attributes.toArray();
HashAttributeSet unsupported = new HashAttributeSet();
for (int i = 0; i < attrs.length; i++) {
if (!isAttributeValueSupported(attrs[i], flavor, attributes)) {
unsupported.add(attrs[i]);
}
}
if (unsupported.size() > 0) {
return unsupported;
}
return null;
}
public Object getDefaultAttributeValue(Class category) {
if (category == null) {
throw new NullPointerException("Argument 'category' is null");
}
if (!(Attribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException(
"Argument 'category' must implement interface Attribute");
}
return client.getDefaultAttributeValue(category);
}
public Object getSupportedAttributeValues(Class category, DocFlavor flavor,
AttributeSet attributes) {
if (category == null) {
throw new NullPointerException("Argument is null");
}
if (!(Attribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException(
"Argument must implement interface Attribute");
}
if (flavor == null) {
return client.getSupportedAttributeValues(category, flavor,
attributes);
}
DocFlavor clientFlavors[] = client.getSupportedDocFlavors();
if (isDocFlavorSupportedByClient(flavor, clientFlavors)) {
return client.getSupportedAttributeValues(category, flavor,
attributes);
}
/*
* Searching stream print service factories, which
* able to convert print data to flavor supported by
* PrintClient (both user and internal). And then,
* return supported attributes by created stream print
* service
*/
for (int i = 0; i < clientFlavors.length; i++) {
StreamPrintServiceFactory[] factories = StreamPrintServiceFactory
.lookupStreamPrintServiceFactories(flavor, clientFlavors[i]
.getMimeType());
for (int j = 0; j < factories.length; j++) {
StreamPrintService sps = factories[j]
.getPrintService(new ByteArrayOutputStream());
if (sps != null) {
try {
sps.getOutputStream().close();
} catch (IOException e) {
// just ignore
}
sps.dispose();
//return sps.getSupportedAttributeValues(category,
// flavor, attributes);
return client.getSupportedAttributeValues(category,
clientFlavors[i], attributes);
}
}
}
throw new IllegalArgumentException("DocFlavor '" + flavor
+ "' is not supported by the print service");
}
public boolean isAttributeValueSupported(Attribute attrval,
DocFlavor flavor, AttributeSet attributes) {
if (attrval == null) {
throw new NullPointerException("Argument is null");
}
if (flavor == null) {
return client
.isAttributeValueSupported(attrval, flavor, attributes);
}
DocFlavor clientFlavors[] = client.getSupportedDocFlavors();
if (isDocFlavorSupportedByClient(flavor, clientFlavors)) {
return client
.isAttributeValueSupported(attrval, flavor, attributes);
}
/*
* Searching stream print service factories, which
* able to convert print data to flavor supported by
* PrintClient (both user and internal). And then,
* return supported attributes by created stream print
* service
*/
for (int i = 0; i < clientFlavors.length; i++) {
StreamPrintServiceFactory[] factories = StreamPrintServiceFactory
.lookupStreamPrintServiceFactories(flavor, clientFlavors[i]
.getMimeType());
for (int j = 0; j < factories.length; j++) {
StreamPrintService sps = factories[j]
.getPrintService(new ByteArrayOutputStream());
if (sps != null) {
try {
sps.getOutputStream().close();
} catch (IOException e) {
// just ignore
}
sps.dispose();
//return sps.isAttributeValueSupported(attrval, flavor, attributes);
return client.isAttributeValueSupported(attrval,
clientFlavors[i], attributes);
}
}
}
throw new IllegalArgumentException("DocFlavor '" + flavor
+ "' is not supported by the print service");
}
//= Listeners ============================================================//
public void addPrintServiceAttributeListener(
PrintServiceAttributeListener listener) {
notifier.addListener(this, listener);
}
public void removePrintServiceAttributeListener(
PrintServiceAttributeListener listener) {
notifier.removeListener(this, listener);
}
//= DocFlavors ===========================================================//
/*
* Returns two categories of DocFlavors:
* 1) DocFlavors supported by PrintClient
* 2) DocFlavors that can be converted by StreamPrintServices to
* PrintClient's DocFlavors
*
* If there is a DocFlavor that supported by PrintClient and by
* StreamPrintService, the method returns PrintClient's one only.
*/
public DocFlavor[] getSupportedDocFlavors() {
DocFlavor clientFlavors[] = client.getSupportedDocFlavors();
ArrayList flavors = new ArrayList();
/*
* Putting all PrintClient's supported flavors (except
* internal flavors) into list of flavors supported by
* this print service.
*/
for (int i = 0; i < clientFlavors.length; i++) {
if (!isInternalDocFlavor(clientFlavors[i])) {
flavors.add(clientFlavors[i]);
}
}
/*
* Searching stream print service factories, which
* able to convert print data to flavor supported by
* PrintClient (both user and internal). And then,
* gathering all flavors supported by those factories
* and putting them into list of flavors supported
* by this print service.
*/
for (int i = 0; i < clientFlavors.length; i++) {
StreamPrintServiceFactory[] factories = StreamPrintServiceFactory
.lookupStreamPrintServiceFactories(null, clientFlavors[i]
.getMimeType());
for (int j = 0; j < factories.length; j++) {
DocFlavor[] factoryFlavors = factories[j]
.getSupportedDocFlavors();
for (int k = 0; k < factoryFlavors.length; k++) {
if (!flavors.contains(factoryFlavors[k])) {
flavors.add(factoryFlavors[k]);
}
}
}
}
return (DocFlavor[]) flavors.toArray(new DocFlavor[0]);
}
public boolean isDocFlavorSupported(DocFlavor flavor) {
if (flavor == null) {
throw new NullPointerException("DocFlavor flavor is null");
}
DocFlavor[] flavors = getSupportedDocFlavors();
for (int i = 0; i < flavors.length; i++) {
if (flavors[i].equals(flavor)) {
return true;
}
}
return false;
}
/*
* Checks, whether specified falvor is internal or not.
*/
private boolean isInternalDocFlavor(DocFlavor flavor) {
if (flavor.getMimeType().toLowerCase().indexOf("internal") != -1) {
return true;
}
return false;
}
/*
* Checks, whether specified falvor is supported by
* PrintClient or not.
*/
boolean isDocFlavorSupportedByClient(DocFlavor flavor) {
DocFlavor clientFlavors[] = client.getSupportedDocFlavors();
for (int i = 0; i < clientFlavors.length; i++) {
if (clientFlavors[i].equals(flavor)) {
return true;
}
}
return false;
}
boolean isDocFlavorSupportedByClient(DocFlavor flavor,
DocFlavor[] clientFlavors) {
for (int i = 0; i < clientFlavors.length; i++) {
if (clientFlavors[i].equals(flavor)) {
return true;
}
}
return false;
}
//= Service user interface factory =======================================//
public ServiceUIFactory getServiceUIFactory() {
// We have not service user interface factory
return null;
}
//= DocPrintJob ==========================================================//
public DocPrintJob createPrintJob() {
return new DefaultPrintJob(this);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.common.buffercache;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.replication.IIOReplicationManager;
/**
* Implementation of an IBufferCache that counts the number of pins/unpins,
* latches/unlatches, and file create/delete/open/close called on it. It
* delegates the actual functionality to another IBufferCache set in the c'tor.
* The counters are updated in a thread-safe fashion using AtomicLong.
*/
public class DebugBufferCache implements IBufferCache {
// Actual BufferCache functionality is delegated to this bufferCache.
private final IBufferCache bufferCache;
private AtomicLong pinCount = new AtomicLong();
private AtomicLong unpinCount = new AtomicLong();
private AtomicLong readLatchCount = new AtomicLong();
private AtomicLong readUnlatchCount = new AtomicLong();
private AtomicLong writeLatchCount = new AtomicLong();
private AtomicLong writeUnlatchCount = new AtomicLong();
private AtomicLong createFileCount = new AtomicLong();
private AtomicLong deleteFileCount = new AtomicLong();
private AtomicLong openFileCount = new AtomicLong();
private AtomicLong closeFileCount = new AtomicLong();
public DebugBufferCache(IBufferCache bufferCache) {
this.bufferCache = bufferCache;
resetCounters();
}
@Override
public void createFile(FileReference fileRef) throws HyracksDataException {
bufferCache.createFile(fileRef);
createFileCount.addAndGet(1);
}
@Override
public void openFile(int fileId) throws HyracksDataException {
bufferCache.openFile(fileId);
openFileCount.addAndGet(1);
}
@Override
public void closeFile(int fileId) throws HyracksDataException {
bufferCache.closeFile(fileId);
closeFileCount.addAndGet(1);
}
@Override
public void deleteFile(int fileId, boolean flushDirtyPages) throws HyracksDataException {
bufferCache.deleteFile(fileId, flushDirtyPages);
deleteFileCount.addAndGet(1);
}
@Override
public ICachedPage tryPin(long dpid) throws HyracksDataException {
return bufferCache.tryPin(dpid);
}
@Override
public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
ICachedPage page = bufferCache.pin(dpid, newPage);
pinCount.addAndGet(1);
return page;
}
@Override
public void unpin(ICachedPage page) throws HyracksDataException {
bufferCache.unpin(page);
unpinCount.addAndGet(1);
}
@Override
public int getPageSize() {
return bufferCache.getPageSize();
}
@Override
public int getNumPages() {
return bufferCache.getNumPages();
}
@Override
public void close() throws HyracksDataException {
bufferCache.close();
}
public void resetCounters() {
pinCount.set(0);
unpinCount.set(0);
readLatchCount.set(0);
readUnlatchCount.set(0);
writeLatchCount.set(0);
writeUnlatchCount.set(0);
createFileCount.set(0);
deleteFileCount.set(0);
openFileCount.set(0);
closeFileCount.set(0);
}
public long getPinCount() {
return pinCount.get();
}
public long getUnpinCount() {
return unpinCount.get();
}
public long getReadLatchCount() {
return readLatchCount.get();
}
public long getReadUnlatchCount() {
return readUnlatchCount.get();
}
public long getWriteLatchCount() {
return writeLatchCount.get();
}
public long getWriteUnlatchCount() {
return writeUnlatchCount.get();
}
public long getCreateFileCount() {
return createFileCount.get();
}
public long getDeleteFileCount() {
return deleteFileCount.get();
}
public long getOpenFileCount() {
return openFileCount.get();
}
public long getCloseFileCount() {
return closeFileCount.get();
}
@Override
public void flushDirtyPage(ICachedPage page) throws HyracksDataException {
bufferCache.flushDirtyPage(page);
}
@Override
public void force(int fileId, boolean metadata) throws HyracksDataException {
bufferCache.force(fileId, metadata);
}
@Override
public int createMemFile() throws HyracksDataException {
return bufferCache.createMemFile();
}
@Override
public void deleteMemFile(int fileId) throws HyracksDataException {
bufferCache.deleteMemFile(fileId);
}
@Override
public ICachedPage pinVirtual(long vpid) throws HyracksDataException {
pinCount.addAndGet(1);
return bufferCache.pinVirtual(vpid);
}
@Override
public ICachedPage unpinVirtual(long vpid, long dpid) throws HyracksDataException {
unpinCount.addAndGet(1);
return bufferCache.unpinVirtual(vpid, dpid);
}
@Override
public int getFileReferenceCount(int fileId) {
return bufferCache.getFileReferenceCount(fileId);
}
@Override
public boolean isReplicationEnabled() {
return false;
}
@Override
public IIOReplicationManager getIIOReplicationManager() {
return null;
}
}
|
|
/* ==========================================================================
* Copyright 2006 Mevenide Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================================================================
*/
package org.jetbrains.idea.maven.execution;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.RunManager;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.execution.impl.EditConfigurationsDialog;
import com.intellij.execution.impl.RunManagerImpl;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.impl.JavaAwareProjectJdkTableImpl;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.encoding.EncodingProjectManager;
import com.intellij.util.PathUtil;
import com.intellij.util.io.ZipUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.artifactResolver.MavenArtifactResolvedM2RtMarker;
import org.jetbrains.idea.maven.artifactResolver.MavenArtifactResolvedM31RtMarker;
import org.jetbrains.idea.maven.artifactResolver.MavenArtifactResolvedM3RtMarker;
import org.jetbrains.idea.maven.artifactResolver.common.MavenModuleMap;
import org.jetbrains.idea.maven.project.MavenGeneralSettings;
import org.jetbrains.idea.maven.project.MavenProject;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import org.jetbrains.idea.maven.server.MavenServerUtil;
import org.jetbrains.idea.maven.utils.MavenSettings;
import org.jetbrains.idea.maven.utils.MavenUtil;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.io.*;
import java.util.*;
import java.util.zip.ZipOutputStream;
import static org.jetbrains.idea.maven.server.MavenServerManager.verifyMavenSdkRequirements;
/**
* @author Ralf Quebbemann
*/
public class MavenExternalParameters {
private static final Logger LOG = Logger.getInstance(MavenExternalParameters.class);
public static final String MAVEN_LAUNCHER_CLASS = "org.codehaus.classworlds.Launcher";
@NonNls private static final String MAVEN_OPTS = "MAVEN_OPTS";
@Deprecated // Use createJavaParameters(Project,MavenRunnerParameters, MavenGeneralSettings,MavenRunnerSettings,MavenRunConfiguration)
public static JavaParameters createJavaParameters(@Nullable final Project project,
@NotNull final MavenRunnerParameters parameters,
@Nullable MavenGeneralSettings coreSettings,
@Nullable MavenRunnerSettings runnerSettings) throws ExecutionException {
return createJavaParameters(project, parameters, coreSettings, runnerSettings, null);
}
public static JavaParameters createJavaParameters(@Nullable final Project project,
@NotNull final MavenRunnerParameters parameters) throws ExecutionException {
return createJavaParameters(project, parameters, null, null, null);
}
/**
* @param project
* @param parameters
* @param coreSettings
* @param runnerSettings
* @param runConfiguration used to creation fix if maven home not found
* @return
* @throws ExecutionException
*/
public static JavaParameters createJavaParameters(@Nullable final Project project,
@NotNull final MavenRunnerParameters parameters,
@Nullable MavenGeneralSettings coreSettings,
@Nullable MavenRunnerSettings runnerSettings,
@Nullable MavenRunConfiguration runConfiguration) throws ExecutionException {
final JavaParameters params = new JavaParameters();
ApplicationManager.getApplication().assertReadAccessAllowed();
if (coreSettings == null) {
coreSettings = project == null ? new MavenGeneralSettings() : MavenProjectsManager.getInstance(project).getGeneralSettings();
}
if (runnerSettings == null) {
runnerSettings = project == null ? new MavenRunnerSettings() : MavenRunner.getInstance(project).getState();
}
params.setWorkingDirectory(parameters.getWorkingDirFile());
Sdk jdk = getJdk(project, runnerSettings, project != null && MavenRunner.getInstance(project).getState() == runnerSettings);
params.setJdk(jdk);
final String mavenHome = resolveMavenHome(coreSettings, project, runConfiguration);
final String mavenVersion = MavenUtil.getMavenVersion(mavenHome);
String sdkConfigLocation = "Settings | Build, Execution, Deployment | Build Tools | Maven | Runner | JRE";
verifyMavenSdkRequirements(jdk, mavenVersion, sdkConfigLocation);
params.getProgramParametersList().add("-Didea.version=" + MavenUtil.getIdeaVersionToPassToMavenProcess());
if (StringUtil.compareVersionNumbers(mavenVersion, "3.3") >= 0) {
params.getVMParametersList().addProperty("maven.multiModuleProjectDirectory",
MavenServerUtil.findMavenBasedir(parameters.getWorkingDirFile()).getPath());
}
addVMParameters(params.getVMParametersList(), mavenHome, runnerSettings);
File confFile = MavenUtil.getMavenConfFile(new File(mavenHome));
if (!confFile.isFile()) {
throw new ExecutionException("Configuration file is not exists in maven home: " + confFile.getAbsolutePath());
}
if (project != null && parameters.isResolveToWorkspace()) {
try {
String resolverJar = getArtifactResolverJar(mavenVersion);
confFile = patchConfFile(confFile, resolverJar);
File modulesPathsFile = dumpModulesPaths(project);
params.getVMParametersList().addProperty(MavenModuleMap.PATHS_FILE_PROPERTY, modulesPathsFile.getAbsolutePath());
}
catch (IOException e) {
LOG.error(e);
throw new ExecutionException("Failed to run maven configuration", e);
}
}
params.getVMParametersList().addProperty("classworlds.conf", confFile.getPath());
for (String path : getMavenClasspathEntries(mavenHome)) {
params.getClassPath().add(path);
}
params.setEnv(new HashMap<>(runnerSettings.getEnvironmentProperties()));
params.setPassParentEnvs(runnerSettings.isPassParentEnv());
params.setMainClass(MAVEN_LAUNCHER_CLASS);
EncodingManager encodingManager = project == null
? EncodingManager.getInstance()
: EncodingProjectManager.getInstance(project);
params.setCharset(encodingManager.getDefaultCharset());
addMavenParameters(params.getProgramParametersList(), mavenHome, coreSettings, runnerSettings, parameters);
return params;
}
private static File patchConfFile(File conf, String library) throws IOException {
File tmpConf = FileUtil.createTempFile("idea-", "-mvn.conf");
tmpConf.deleteOnExit();
patchConfFile(conf, tmpConf, library);
return tmpConf;
}
private static void patchConfFile(File originalConf, File dest, String library) throws IOException {
Scanner sc = new Scanner(originalConf);
try {
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dest)));
try {
boolean patched = false;
while (sc.hasNextLine()) {
String line = sc.nextLine();
out.append(line);
out.newLine();
if (!patched && "[plexus.core]".equals(line)) {
out.append("load ").append(library);
out.newLine();
patched = true;
}
}
}
finally {
out.close();
}
}
finally {
sc.close();
}
}
private static String getArtifactResolverJar(@Nullable String mavenVersion) throws IOException {
Class marker;
if (mavenVersion != null && mavenVersion.compareTo("3.1.0") >= 0) {
marker = MavenArtifactResolvedM31RtMarker.class;
}
else if (mavenVersion != null && mavenVersion.compareTo("3.0.0") >= 0) {
marker = MavenArtifactResolvedM3RtMarker.class;
}
else {
marker = MavenArtifactResolvedM2RtMarker.class;
}
File classDirOrJar = new File(PathUtil.getJarPathForClass(marker));
if (!classDirOrJar.isDirectory()) {
return classDirOrJar.getAbsolutePath(); // it's a jar in IDEA installation.
}
// it's a classes directory, we are in development mode.
File tempFile = FileUtil.createTempFile("idea-", "-artifactResolver.jar");
tempFile.deleteOnExit();
ZipOutputStream zipOutput = new ZipOutputStream(new FileOutputStream(tempFile));
try {
ZipUtil.addDirToZipRecursively(zipOutput, null, classDirOrJar, "", null, null);
File m2Module = new File(PathUtil.getJarPathForClass(MavenModuleMap.class));
String commonClassesPath = MavenModuleMap.class.getPackage().getName().replace('.', '/');
ZipUtil.addDirToZipRecursively(zipOutput, null, new File(m2Module, commonClassesPath), commonClassesPath, null, null);
}
finally {
zipOutput.close();
}
return tempFile.getAbsolutePath();
}
private static File dumpModulesPaths(@NotNull Project project) throws IOException {
ApplicationManager.getApplication().assertReadAccessAllowed();
Properties res = new Properties();
MavenProjectsManager manager = MavenProjectsManager.getInstance(project);
for (Module module : ModuleManager.getInstance(project).getModules()) {
if (manager.isMavenizedModule(module)) {
MavenProject mavenProject = manager.findProject(module);
if (mavenProject != null && !manager.isIgnored(mavenProject)) {
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ":pom"
+ ':' + mavenProject.getMavenId().getVersion(),
mavenProject.getFile().getPath());
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ':' + mavenProject.getPackaging()
+ ':' + mavenProject.getMavenId().getVersion(),
mavenProject.getOutputDirectory());
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ":test-jar"
+ ':' + mavenProject.getMavenId().getVersion(),
mavenProject.getTestOutputDirectory());
addArtifactFileMapping(res, mavenProject, "sources");
addArtifactFileMapping(res, mavenProject, "test-sources");
addArtifactFileMapping(res, mavenProject, "javadoc");
addArtifactFileMapping(res, mavenProject, "test-javadoc");
}
}
}
File file = new File(PathManager.getSystemPath(), "Maven/idea-projects-state-" + project.getLocationHash() + ".properties");
FileUtil.ensureExists(file.getParentFile());
OutputStream out = new BufferedOutputStream(new FileOutputStream(file));
try {
res.store(out, null);
}
finally {
out.close();
}
return file;
}
private static void addArtifactFileMapping(@NotNull Properties res, @NotNull MavenProject mavenProject, @NotNull String classifier) {
File file = new File(mavenProject.getBuildDirectory(), mavenProject.getFinalName() + '-' + classifier + ".jar");
if (file.exists()) {
res.setProperty(mavenProject.getMavenId().getGroupId()
+ ':' + mavenProject.getMavenId().getArtifactId()
+ ':' + classifier
+ ':' + mavenProject.getMavenId().getVersion(),
file.getPath());
}
}
@NotNull
private static Sdk getJdk(@Nullable Project project, MavenRunnerSettings runnerSettings, boolean isGlobalRunnerSettings)
throws ExecutionException {
String name = runnerSettings.getJreName();
if (name.equals(MavenRunnerSettings.USE_INTERNAL_JAVA)) {
return JavaAwareProjectJdkTableImpl.getInstanceEx().getInternalJdk();
}
if (name.equals(MavenRunnerSettings.USE_PROJECT_JDK)) {
if (project != null) {
Sdk res = ProjectRootManager.getInstance(project).getProjectSdk();
if (res != null) {
return res;
}
Module[] modules = ModuleManager.getInstance(project).getModules();
for (Module module : modules) {
Sdk sdk = ModuleRootManager.getInstance(module).getSdk();
if (sdk != null && sdk.getSdkType() instanceof JavaSdkType) {
return sdk;
}
}
}
if (project == null) {
Sdk recent = ProjectJdkTable.getInstance().findMostRecentSdkOfType(JavaSdk.getInstance());
if (recent != null) return recent;
return JavaAwareProjectJdkTableImpl.getInstanceEx().getInternalJdk();
}
throw new ProjectJdkSettingsOpenerExecutionException("Project JDK is not specified. <a href=''>Configure</a>", project);
}
if (name.equals(MavenRunnerSettings.USE_JAVA_HOME)) {
final String javaHome = System.getenv("JAVA_HOME");
if (StringUtil.isEmptyOrSpaces(javaHome)) {
throw new ExecutionException(RunnerBundle.message("maven.java.home.undefined"));
}
final Sdk jdk = JavaSdk.getInstance().createJdk("", javaHome);
if (jdk == null) {
throw new ExecutionException(RunnerBundle.message("maven.java.home.invalid", javaHome));
}
return jdk;
}
for (Sdk projectJdk : ProjectJdkTable.getInstance().getAllJdks()) {
if (projectJdk.getName().equals(name)) {
return projectJdk;
}
}
if (isGlobalRunnerSettings) {
throw new ExecutionException(RunnerBundle.message("maven.java.not.found.default.config", name));
}
else {
throw new ExecutionException(RunnerBundle.message("maven.java.not.found", name));
}
}
public static void addVMParameters(ParametersList parametersList, String mavenHome, MavenRunnerSettings runnerSettings) {
parametersList.addParametersString(System.getenv(MAVEN_OPTS));
parametersList.addParametersString(runnerSettings.getVmOptions());
parametersList.addProperty("maven.home", mavenHome);
}
private static void addMavenParameters(ParametersList parametersList,
String mavenHome,
MavenGeneralSettings coreSettings,
MavenRunnerSettings runnerSettings,
MavenRunnerParameters parameters) {
encodeCoreAndRunnerSettings(coreSettings, mavenHome, parametersList);
if (runnerSettings.isSkipTests()) {
parametersList.addProperty("skipTests", "true");
}
for (Map.Entry<String, String> entry : runnerSettings.getMavenProperties().entrySet()) {
if (entry.getKey().length() > 0) {
parametersList.addProperty(entry.getKey(), entry.getValue());
}
}
for (String goal : parameters.getGoals()) {
parametersList.add(goal);
}
addOption(parametersList, "P", encodeProfiles(parameters.getProfilesMap()));
}
private static void addOption(ParametersList cmdList, @NonNls String key, @NonNls String value) {
if (!StringUtil.isEmptyOrSpaces(value)) {
cmdList.add("-" + key);
cmdList.add(value);
}
}
@NotNull
public static String resolveMavenHome(@NotNull MavenGeneralSettings coreSettings) throws ExecutionException {
return resolveMavenHome(coreSettings, null, null);
}
/**
* @param coreSettings
* @param project used to creation fix if maven home not found
* @param runConfiguration used to creation fix if maven home not found
* @return
* @throws ExecutionException
*/
@NotNull
public static String resolveMavenHome(@NotNull MavenGeneralSettings coreSettings,
@Nullable Project project,
@Nullable MavenRunConfiguration runConfiguration) throws ExecutionException {
final File file = MavenUtil.resolveMavenHomeDirectory(coreSettings.getMavenHome());
if (file == null) {
throw createExecutionException(RunnerBundle.message("external.maven.home.no.default"),
RunnerBundle.message("external.maven.home.no.default.with.fix"),
coreSettings, project, runConfiguration);
}
if (!file.exists()) {
throw createExecutionException(RunnerBundle.message("external.maven.home.does.not.exist", file.getPath()),
RunnerBundle.message("external.maven.home.does.not.exist.with.fix", file.getPath()),
coreSettings, project, runConfiguration);
}
if (!MavenUtil.isValidMavenHome(file)) {
throw createExecutionException(RunnerBundle.message("external.maven.home.invalid", file.getPath()),
RunnerBundle.message("external.maven.home.invalid.with.fix", file.getPath()),
coreSettings, project, runConfiguration);
}
try {
return file.getCanonicalPath();
}
catch (IOException e) {
throw new ExecutionException(e.getMessage(), e);
}
}
private static ExecutionException createExecutionException(String text,
String textWithFix,
@NotNull MavenGeneralSettings coreSettings,
@Nullable Project project,
@Nullable MavenRunConfiguration runConfiguration) {
Project notNullProject = project;
if (notNullProject == null) {
if (runConfiguration == null) return new ExecutionException(text);
notNullProject = runConfiguration.getProject();
if (notNullProject == null) return new ExecutionException(text);
}
if (coreSettings == MavenProjectsManager.getInstance(notNullProject).getGeneralSettings()) {
return new ProjectSettingsOpenerExecutionException(textWithFix, notNullProject);
}
if (runConfiguration != null) {
Project runCfgProject = runConfiguration.getProject();
if (runCfgProject != null) {
if (((RunManagerImpl)RunManager.getInstance(runCfgProject)).getSettings(runConfiguration) != null) {
return new RunConfigurationOpenerExecutionException(textWithFix, runConfiguration);
}
}
}
return new ExecutionException(text);
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static List<String> getMavenClasspathEntries(final String mavenHome) {
File mavenHomeBootAsFile = new File(new File(mavenHome, "core"), "boot");
// if the dir "core/boot" does not exist we are using a Maven version > 2.0.5
// in this case the classpath must be constructed from the dir "boot"
if (!mavenHomeBootAsFile.exists()) {
mavenHomeBootAsFile = new File(mavenHome, "boot");
}
List<String> classpathEntries = new ArrayList<>();
File[] files = mavenHomeBootAsFile.listFiles();
if (files != null) {
for (File file : files) {
if (file.getName().contains("classworlds")) {
classpathEntries.add(file.getAbsolutePath());
}
}
}
return classpathEntries;
}
private static void encodeCoreAndRunnerSettings(MavenGeneralSettings coreSettings, String mavenHome,
ParametersList cmdList) {
if (coreSettings.isWorkOffline()) {
cmdList.add("--offline");
}
boolean atLeastMaven3 = MavenUtil.isMaven3(mavenHome);
if (!atLeastMaven3) {
addIfNotEmpty(cmdList, coreSettings.getPluginUpdatePolicy().getCommandLineOption());
if (!coreSettings.isUsePluginRegistry()) {
cmdList.add("--no-plugin-registry");
}
}
if (coreSettings.getOutputLevel() == MavenExecutionOptions.LoggingLevel.DEBUG) {
cmdList.add("--debug");
}
if (coreSettings.isNonRecursive()) {
cmdList.add("--non-recursive");
}
if (coreSettings.isPrintErrorStackTraces()) {
cmdList.add("--errors");
}
if (coreSettings.isAlwaysUpdateSnapshots()) {
cmdList.add("--update-snapshots");
}
if (StringUtil.isNotEmpty(coreSettings.getThreads())) {
cmdList.add("-T", coreSettings.getThreads());
}
addIfNotEmpty(cmdList, coreSettings.getFailureBehavior().getCommandLineOption());
addIfNotEmpty(cmdList, coreSettings.getChecksumPolicy().getCommandLineOption());
addOption(cmdList, "s", coreSettings.getUserSettingsFile());
if (!StringUtil.isEmptyOrSpaces(coreSettings.getLocalRepository())) {
cmdList.addProperty("maven.repo.local", coreSettings.getLocalRepository());
}
}
private static void addIfNotEmpty(ParametersList parametersList, @Nullable String value) {
if (!StringUtil.isEmptyOrSpaces(value)) {
parametersList.add(value);
}
}
private static String encodeProfiles(Map<String, Boolean> profiles) {
StringBuilder stringBuilder = new StringBuilder();
for (Map.Entry<String, Boolean> entry : profiles.entrySet()) {
if (stringBuilder.length() != 0) {
stringBuilder.append(",");
}
if (!entry.getValue()) {
stringBuilder.append("!");
}
stringBuilder.append(entry.getKey());
}
return stringBuilder.toString();
}
private static class ProjectSettingsOpenerExecutionException extends WithHyperlinkExecutionException {
private final Project myProject;
public ProjectSettingsOpenerExecutionException(final String s, Project project) {
super(s);
myProject = project;
}
@Override
protected void hyperlinkClicked() {
ShowSettingsUtil.getInstance().showSettingsDialog(myProject, MavenSettings.DISPLAY_NAME);
}
}
private static class ProjectJdkSettingsOpenerExecutionException extends WithHyperlinkExecutionException {
private final Project myProject;
public ProjectJdkSettingsOpenerExecutionException(final String s, Project project) {
super(s);
myProject = project;
}
@Override
protected void hyperlinkClicked() {
ProjectSettingsService.getInstance(myProject).openProjectSettings();
}
}
private static class RunConfigurationOpenerExecutionException extends WithHyperlinkExecutionException {
private final MavenRunConfiguration myRunConfiguration;
public RunConfigurationOpenerExecutionException(final String s, MavenRunConfiguration runConfiguration) {
super(s);
myRunConfiguration = runConfiguration;
}
@Override
protected void hyperlinkClicked() {
Project project = myRunConfiguration.getProject();
EditConfigurationsDialog dialog = new EditConfigurationsDialog(project);
dialog.show();
}
}
private static abstract class WithHyperlinkExecutionException extends ExecutionException
implements HyperlinkListener, NotificationListener {
public WithHyperlinkExecutionException(String s) {
super(s);
}
protected abstract void hyperlinkClicked();
@Override
public final void hyperlinkUpdate(HyperlinkEvent e) {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
hyperlinkClicked();
}
}
@Override
public final void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
hyperlinkUpdate(event);
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License: Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing: software
* distributed under the License is distributed on an "AS IS" BASIS:
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND: either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.history.events;
import static org.junit.Assert.fail;
import java.nio.ByteBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.tez.common.ReflectionUtils;
import org.apache.tez.common.counters.TezCounters;
import org.apache.tez.dag.api.EdgeManagerPluginDescriptor;
import org.apache.tez.dag.api.UserPayload;
import org.apache.tez.dag.api.VertexLocationHint;
import org.apache.tez.dag.api.TaskLocationHint;
import org.apache.tez.dag.api.oldrecords.TaskAttemptState;
import org.apache.tez.dag.api.oldrecords.TaskState;
import org.apache.tez.dag.api.records.DAGProtos.DAGPlan;
import org.apache.tez.dag.app.dag.DAGState;
import org.apache.tez.dag.app.dag.VertexState;
import org.apache.tez.dag.app.dag.impl.VertexStats;
import org.apache.tez.dag.history.HistoryEvent;
import org.apache.tez.dag.history.HistoryEventType;
import org.apache.tez.dag.history.SummaryEvent;
import org.apache.tez.dag.records.TezDAGID;
import org.apache.tez.dag.records.TezTaskAttemptID;
import org.apache.tez.dag.records.TezTaskID;
import org.apache.tez.dag.records.TezVertexID;
import org.apache.tez.dag.recovery.records.RecoveryProtos.SummaryEventProto;
import org.apache.tez.runtime.api.InputSpecUpdate;
import org.apache.tez.runtime.api.events.DataMovementEvent;
import org.apache.tez.runtime.api.impl.EventMetaData;
import org.apache.tez.runtime.api.impl.EventMetaData.EventProducerConsumerType;
import org.apache.tez.runtime.api.impl.TezEvent;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Lists;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class TestHistoryEventsProtoConversion {
private static final Log LOG = LogFactory.getLog(
TestHistoryEventsProtoConversion.class);
private HistoryEvent testProtoConversion(HistoryEvent event) throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
HistoryEvent deserializedEvent = null;
event.toProtoStream(os);
os.flush();
os.close();
deserializedEvent = ReflectionUtils.createClazzInstance(
event.getClass().getName());
LOG.info("Serialized event to byte array"
+ ", eventType=" + event.getEventType()
+ ", bufLen=" + os.toByteArray().length);
deserializedEvent.fromProtoStream(
new ByteArrayInputStream(os.toByteArray()));
return deserializedEvent;
}
private HistoryEvent testSummaryProtoConversion(HistoryEvent historyEvent)
throws IOException {
SummaryEvent event = (SummaryEvent) historyEvent;
ByteArrayOutputStream os = new ByteArrayOutputStream();
HistoryEvent deserializedEvent = null;
event.toSummaryProtoStream(os);
os.flush();
os.close();
LOG.info("Serialized event to byte array"
+ ", eventType=" + historyEvent.getEventType()
+ ", bufLen=" + os.toByteArray().length);
SummaryEventProto summaryEventProto =
SummaryEventProto.parseDelimitedFrom(
new ByteArrayInputStream(os.toByteArray()));
deserializedEvent = ReflectionUtils.createClazzInstance(
event.getClass().getName());
((SummaryEvent)deserializedEvent).fromSummaryProtoStream(summaryEventProto);
return deserializedEvent;
}
private void logEvents(HistoryEvent event,
HistoryEvent deserializedEvent) {
LOG.info("Initial Event toString: " + event.toString());
LOG.info("Deserialized Event toString: " + deserializedEvent.toString());
}
private void testAppLaunchedEvent() throws Exception {
AppLaunchedEvent event = new AppLaunchedEvent(ApplicationId.newInstance(0, 1),
100, 100, null, new Configuration(false));
try {
testProtoConversion(event);
fail("Expected to fail on conversion");
} catch (UnsupportedOperationException e) {
// Expected
}
LOG.info("Initial Event toString: " + event.toString());
}
private void testAMLaunchedEvent() throws Exception {
AMLaunchedEvent event = new AMLaunchedEvent(
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1),
100, 100, null);
AMLaunchedEvent deserializedEvent = (AMLaunchedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getApplicationAttemptId(),
deserializedEvent.getApplicationAttemptId());
Assert.assertEquals(event.getAppSubmitTime(),
deserializedEvent.getAppSubmitTime());
Assert.assertEquals(event.getLaunchTime(),
deserializedEvent.getLaunchTime());
logEvents(event, deserializedEvent);
}
private void testAMStartedEvent() throws Exception {
AMStartedEvent event = new AMStartedEvent(
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1), 100, "");
AMStartedEvent deserializedEvent = (AMStartedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getApplicationAttemptId(),
deserializedEvent.getApplicationAttemptId());
Assert.assertEquals(event.getStartTime(),
deserializedEvent.getStartTime());
logEvents(event, deserializedEvent);
}
private void testDAGSubmittedEvent() throws Exception {
DAGSubmittedEvent event = new DAGSubmittedEvent(TezDAGID.getInstance(
ApplicationId.newInstance(0, 1), 1), 1001l,
DAGPlan.newBuilder().setName("foo").build(),
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1), null, "");
DAGSubmittedEvent deserializedEvent = (DAGSubmittedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getApplicationAttemptId(),
deserializedEvent.getApplicationAttemptId());
Assert.assertEquals(event.getDagID(),
deserializedEvent.getDagID());
Assert.assertEquals(event.getDAGName(),
deserializedEvent.getDAGName());
Assert.assertEquals(event.getSubmitTime(),
deserializedEvent.getSubmitTime());
Assert.assertEquals(event.getDAGPlan(),
deserializedEvent.getDAGPlan());
logEvents(event, deserializedEvent);
}
private void testDAGInitializedEvent() throws Exception {
DAGInitializedEvent event = new DAGInitializedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 100334l,
"user", "dagName", null);
DAGInitializedEvent deserializedEvent = (DAGInitializedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getDagID(),
deserializedEvent.getDagID());
Assert.assertEquals(event.getInitTime(), deserializedEvent.getInitTime());
logEvents(event, deserializedEvent);
}
private void testDAGStartedEvent() throws Exception {
DAGStartedEvent event = new DAGStartedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 100334l,
"user", "dagName");
DAGStartedEvent deserializedEvent = (DAGStartedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getDagID(),
deserializedEvent.getDagID());
Assert.assertEquals(event.getStartTime(), deserializedEvent.getStartTime());
logEvents(event, deserializedEvent);
}
private void testDAGFinishedEvent() throws Exception {
{
DAGFinishedEvent event = new DAGFinishedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 1000l, 20000l,
DAGState.FAILED, null, null, "user", "dagName", null);
DAGFinishedEvent deserializedEvent = (DAGFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(
event.getDagID(),
deserializedEvent.getDagID());
Assert.assertEquals(event.getState(), deserializedEvent.getState());
Assert.assertNotEquals(event.getStartTime(), deserializedEvent.getStartTime());
Assert.assertEquals(event.getFinishTime(), deserializedEvent.getFinishTime());
Assert.assertEquals(event.getDiagnostics(), deserializedEvent.getDiagnostics());
Assert.assertEquals(event.getTezCounters(), deserializedEvent.getTezCounters());
logEvents(event, deserializedEvent);
}
{
TezCounters tezCounters = new TezCounters();
tezCounters.addGroup("foo", "bar");
tezCounters.getGroup("foo").addCounter("c1", "c1", 100);
tezCounters.getGroup("foo").findCounter("c1").increment(1);
DAGFinishedEvent event = new DAGFinishedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 1000l, 20000l,
DAGState.FAILED, "bad diagnostics", tezCounters,
"user", "dagName", null);
DAGFinishedEvent deserializedEvent = (DAGFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(
event.getDagID(),
deserializedEvent.getDagID());
Assert.assertEquals(event.getState(), deserializedEvent.getState());
Assert.assertNotEquals(event.getStartTime(), deserializedEvent.getStartTime());
Assert.assertEquals(event.getFinishTime(), deserializedEvent.getFinishTime());
Assert.assertEquals(event.getDiagnostics(), deserializedEvent.getDiagnostics());
Assert.assertEquals(event.getTezCounters(), deserializedEvent.getTezCounters());
Assert.assertEquals(101,
deserializedEvent.getTezCounters().getGroup("foo").findCounter("c1").getValue());
logEvents(event, deserializedEvent);
}
}
private void testVertexInitializedEvent() throws Exception {
VertexInitializedEvent event = new VertexInitializedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111),
"vertex1", 1000l, 15000l, 100, "procName", null);
VertexInitializedEvent deserializedEvent = (VertexInitializedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(event.getInitRequestedTime(),
deserializedEvent.getInitRequestedTime());
Assert.assertEquals(event.getInitedTime(),
deserializedEvent.getInitedTime());
Assert.assertEquals(event.getNumTasks(),
deserializedEvent.getNumTasks());
Assert.assertEquals(event.getAdditionalInputs(),
deserializedEvent.getAdditionalInputs());
Assert.assertNull(deserializedEvent.getProcessorName());
logEvents(event, deserializedEvent);
}
private void testVertexStartedEvent() throws Exception {
VertexStartedEvent event = new VertexStartedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111),
145553l, 12334455l);
VertexStartedEvent deserializedEvent = (VertexStartedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(event.getStartRequestedTime(),
deserializedEvent.getStartRequestedTime());
Assert.assertEquals(event.getStartTime(),
deserializedEvent.getStartTime());
logEvents(event, deserializedEvent);
}
private void testVertexParallelismUpdatedEvent() throws Exception {
{
InputSpecUpdate rootInputSpecUpdateBulk = InputSpecUpdate
.createAllTaskInputSpecUpdate(2);
InputSpecUpdate rootInputSpecUpdatePerTask = InputSpecUpdate
.createPerTaskInputSpecUpdate(Lists.newArrayList(1, 2, 3));
Map<String, InputSpecUpdate> rootInputSpecUpdates = new HashMap<String, InputSpecUpdate>();
rootInputSpecUpdates.put("input1", rootInputSpecUpdateBulk);
rootInputSpecUpdates.put("input2", rootInputSpecUpdatePerTask);
VertexParallelismUpdatedEvent event =
new VertexParallelismUpdatedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111),
100, null, null, rootInputSpecUpdates, 1);
VertexParallelismUpdatedEvent deserializedEvent = (VertexParallelismUpdatedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(event.getNumTasks(), deserializedEvent.getNumTasks());
Assert.assertEquals(event.getSourceEdgeManagers(),
deserializedEvent.getSourceEdgeManagers());
Assert.assertEquals(event.getVertexLocationHint(),
deserializedEvent.getVertexLocationHint());
Assert.assertEquals(event.getRootInputSpecUpdates().size(), deserializedEvent
.getRootInputSpecUpdates().size());
InputSpecUpdate deserializedBulk = deserializedEvent.getRootInputSpecUpdates().get("input1");
InputSpecUpdate deserializedPerTask = deserializedEvent.getRootInputSpecUpdates().get("input2");
Assert.assertEquals(rootInputSpecUpdateBulk.isForAllWorkUnits(),
deserializedBulk.isForAllWorkUnits());
Assert.assertEquals(rootInputSpecUpdateBulk.getAllNumPhysicalInputs(),
deserializedBulk.getAllNumPhysicalInputs());
Assert.assertEquals(rootInputSpecUpdatePerTask.isForAllWorkUnits(),
deserializedPerTask.isForAllWorkUnits());
Assert.assertEquals(rootInputSpecUpdatePerTask.getAllNumPhysicalInputs(),
deserializedPerTask.getAllNumPhysicalInputs());
logEvents(event, deserializedEvent);
}
{
Map<String,EdgeManagerPluginDescriptor> sourceEdgeManagers
= new LinkedHashMap<String, EdgeManagerPluginDescriptor>();
sourceEdgeManagers.put("foo", EdgeManagerPluginDescriptor.create("bar"));
sourceEdgeManagers.put("foo1", EdgeManagerPluginDescriptor.create("bar1")
.setUserPayload(
UserPayload.create(ByteBuffer.wrap(new String("payload").getBytes()), 100)));
VertexParallelismUpdatedEvent event =
new VertexParallelismUpdatedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111),
100, VertexLocationHint.create(Arrays.asList(TaskLocationHint.createTaskLocationHint(
new HashSet<String>(Arrays.asList("h1")),
new HashSet<String>(Arrays.asList("r1"))))),
sourceEdgeManagers, null, 1);
VertexParallelismUpdatedEvent deserializedEvent = (VertexParallelismUpdatedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(event.getNumTasks(), deserializedEvent.getNumTasks());
Assert.assertEquals(event.getSourceEdgeManagers().size(),
deserializedEvent.getSourceEdgeManagers().size());
Assert.assertEquals(event.getSourceEdgeManagers().get("foo").getClassName(),
deserializedEvent.getSourceEdgeManagers().get("foo").getClassName());
Assert.assertNull(deserializedEvent.getSourceEdgeManagers().get("foo").getUserPayload());
Assert.assertEquals(event.getSourceEdgeManagers().get("foo1").getClassName(),
deserializedEvent.getSourceEdgeManagers().get("foo1").getClassName());
Assert.assertEquals(event.getSourceEdgeManagers().get("foo1").getUserPayload().getVersion(),
deserializedEvent.getSourceEdgeManagers().get("foo1").getUserPayload().getVersion());
Assert.assertArrayEquals(
event.getSourceEdgeManagers().get("foo1").getUserPayload().deepCopyAsArray(),
deserializedEvent.getSourceEdgeManagers().get("foo1").getUserPayload().deepCopyAsArray());
Assert.assertEquals(event.getVertexLocationHint(),
deserializedEvent.getVertexLocationHint());
logEvents(event, deserializedEvent);
}
}
private void testVertexFinishedEvent() throws Exception {
{
VertexFinishedEvent event =
new VertexFinishedEvent(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111),
"vertex1", 1, 1000l, 15000l, 16000l, 20000l, 1344400l, VertexState.ERROR,
null, null, null, null);
VertexFinishedEvent deserializedEvent = (VertexFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(event.getFinishTime(),
deserializedEvent.getFinishTime());
Assert.assertEquals(event.getState(), deserializedEvent.getState());
Assert.assertEquals(event.getDiagnostics(), deserializedEvent.getDiagnostics());
logEvents(event, deserializedEvent);
}
{
VertexFinishedEvent event =
new VertexFinishedEvent(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111),
"vertex1", 1, 1000l, 15000l, 16000l, 20000l, 1344400l, VertexState.ERROR,
"diagnose", new TezCounters(), new VertexStats(), null);
VertexFinishedEvent deserializedEvent = (VertexFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(event.getFinishTime(),
deserializedEvent.getFinishTime());
Assert.assertEquals(event.getState(), deserializedEvent.getState());
Assert.assertEquals(event.getDiagnostics(), deserializedEvent.getDiagnostics());
logEvents(event, deserializedEvent);
}
}
private void testTaskStartedEvent() throws Exception {
TaskStartedEvent event = new TaskStartedEvent(
TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1),
"vertex1", 1000l, 100000l);
TaskStartedEvent deserializedEvent = (TaskStartedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getTaskID(), deserializedEvent.getTaskID());
Assert.assertEquals(event.getScheduledTime(),
deserializedEvent.getScheduledTime());
Assert.assertEquals(event.getStartTime(),
deserializedEvent.getStartTime());
logEvents(event, deserializedEvent);
}
private void testTaskFinishedEvent() throws Exception {
{
TaskFinishedEvent event = new TaskFinishedEvent(
TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1),
"vertex1", 11000l, 1000000l, null, TaskState.FAILED, null, null);
TaskFinishedEvent deserializedEvent = (TaskFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getTaskID(), deserializedEvent.getTaskID());
Assert.assertEquals(event.getFinishTime(),
deserializedEvent.getFinishTime());
Assert.assertEquals(event.getState(),
deserializedEvent.getState());
Assert.assertEquals(event.getSuccessfulAttemptID(),
deserializedEvent.getSuccessfulAttemptID());
Assert.assertEquals(event.getDiagnostics(), deserializedEvent.getDiagnostics());
logEvents(event, deserializedEvent);
}
{
TaskFinishedEvent event = new TaskFinishedEvent(
TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1),
"vertex1", 11000l, 1000000l,
TezTaskAttemptID.getInstance(TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1), 1),
TaskState.FAILED, "task_diagnostics", new TezCounters());
TaskFinishedEvent deserializedEvent = (TaskFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getTaskID(), deserializedEvent.getTaskID());
Assert.assertEquals(event.getFinishTime(),
deserializedEvent.getFinishTime());
Assert.assertEquals(event.getState(),
deserializedEvent.getState());
Assert.assertEquals(event.getSuccessfulAttemptID(),
deserializedEvent.getSuccessfulAttemptID());
Assert.assertEquals(event.getDiagnostics(), deserializedEvent.getDiagnostics());
logEvents(event, deserializedEvent);
}
}
private void testTaskAttemptStartedEvent() throws Exception {
TaskAttemptStartedEvent event = new TaskAttemptStartedEvent(
TezTaskAttemptID.getInstance(TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1), 1),
"vertex1", 10009l, ContainerId.newInstance(
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1), 1001), NodeId.newInstance(
"host1", 19999), "inProgress", "Completed", "nodeHttpAddress");
TaskAttemptStartedEvent deserializedEvent = (TaskAttemptStartedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getTaskAttemptID(),
deserializedEvent.getTaskAttemptID());
Assert.assertEquals(event.getContainerId(),
deserializedEvent.getContainerId());
Assert.assertEquals(event.getNodeId(),
deserializedEvent.getNodeId());
Assert.assertEquals(event.getStartTime(),
deserializedEvent.getStartTime());
logEvents(event, deserializedEvent);
}
private void testTaskAttemptFinishedEvent() throws Exception {
{
TaskAttemptFinishedEvent event = new TaskAttemptFinishedEvent(
TezTaskAttemptID.getInstance(TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1), 1),
"vertex1", 10001l, 1000434444l, TaskAttemptState.FAILED,
null, null);
TaskAttemptFinishedEvent deserializedEvent = (TaskAttemptFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getTaskAttemptID(),
deserializedEvent.getTaskAttemptID());
Assert.assertEquals(event.getFinishTime(),
deserializedEvent.getFinishTime());
Assert.assertEquals(event.getDiagnostics(),
deserializedEvent.getDiagnostics());
Assert.assertEquals(event.getState(),
deserializedEvent.getState());
Assert.assertEquals(event.getCounters(),
deserializedEvent.getCounters());
logEvents(event, deserializedEvent);
}
{
TaskAttemptFinishedEvent event = new TaskAttemptFinishedEvent(
TezTaskAttemptID.getInstance(TezTaskID.getInstance(TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 111), 1), 1),
"vertex1", 10001l, 1000434444l, TaskAttemptState.FAILED,
"diagnose", new TezCounters());
TaskAttemptFinishedEvent deserializedEvent = (TaskAttemptFinishedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getTaskAttemptID(),
deserializedEvent.getTaskAttemptID());
Assert.assertEquals(event.getFinishTime(),
deserializedEvent.getFinishTime());
Assert.assertEquals(event.getDiagnostics(),
deserializedEvent.getDiagnostics());
Assert.assertEquals(event.getState(),
deserializedEvent.getState());
Assert.assertEquals(event.getCounters(),
deserializedEvent.getCounters());
logEvents(event, deserializedEvent);
}
}
private void testContainerLaunchedEvent() throws Exception {
ContainerLaunchedEvent event = new ContainerLaunchedEvent(
ContainerId.newInstance(ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1), 1001), 100034566,
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1));
ContainerLaunchedEvent deserializedEvent = (ContainerLaunchedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getContainerId(),
deserializedEvent.getContainerId());
Assert.assertEquals(event.getLaunchTime(),
deserializedEvent.getLaunchTime());
Assert.assertEquals(event.getApplicationAttemptId(),
deserializedEvent.getApplicationAttemptId());
logEvents(event, deserializedEvent);
}
private void testContainerStoppedEvent() throws Exception {
ContainerStoppedEvent event = new ContainerStoppedEvent(
ContainerId.newInstance(ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1), 1001), 100034566,
ContainerExitStatus.SUCCESS, ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1));
ContainerStoppedEvent deserializedEvent = (ContainerStoppedEvent)
testProtoConversion(event);
Assert.assertEquals(event.getContainerId(),
deserializedEvent.getContainerId());
Assert.assertEquals(event.getStoppedTime(),
deserializedEvent.getStoppedTime());
Assert.assertEquals(event.getApplicationAttemptId(),
deserializedEvent.getApplicationAttemptId());
logEvents(event, deserializedEvent);
}
private void testVertexDataMovementEventsGeneratedEvent() throws Exception {
VertexRecoverableEventsGeneratedEvent event;
try {
event = new VertexRecoverableEventsGeneratedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 1), null);
fail("Invalid creation should have errored out");
} catch (RuntimeException e) {
// Expected
}
List<TezEvent> events =
Arrays.asList(new TezEvent(DataMovementEvent.create(1, null),
new EventMetaData(EventProducerConsumerType.SYSTEM, "foo", "bar", null)));
event = new VertexRecoverableEventsGeneratedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 1), events);
VertexRecoverableEventsGeneratedEvent deserializedEvent =
(VertexRecoverableEventsGeneratedEvent) testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
Assert.assertEquals(1,
deserializedEvent.getTezEvents().size());
Assert.assertEquals(event.getTezEvents().get(0).getEventType(),
deserializedEvent.getTezEvents().get(0).getEventType());
logEvents(event, deserializedEvent);
}
private void testDAGCommitStartedEvent() throws Exception {
DAGCommitStartedEvent event = new DAGCommitStartedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 100l);
DAGCommitStartedEvent deserializedEvent =
(DAGCommitStartedEvent) testProtoConversion(event);
Assert.assertEquals(event.getDagID(), deserializedEvent.getDagID());
logEvents(event, deserializedEvent);
}
private void testVertexCommitStartedEvent() throws Exception {
VertexCommitStartedEvent event = new VertexCommitStartedEvent(
TezVertexID.getInstance(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1), 1), 100l);
VertexCommitStartedEvent deserializedEvent =
(VertexCommitStartedEvent) testProtoConversion(event);
Assert.assertEquals(event.getVertexID(), deserializedEvent.getVertexID());
logEvents(event, deserializedEvent);
}
private void testVertexGroupCommitStartedEvent() throws Exception {
VertexGroupCommitStartedEvent event = new VertexGroupCommitStartedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1),
"fooGroup", 1000344l);
{
VertexGroupCommitStartedEvent deserializedEvent =
(VertexGroupCommitStartedEvent) testProtoConversion(event);
Assert.assertEquals(event.getDagID(), deserializedEvent.getDagID());
Assert.assertEquals(event.getVertexGroupName(),
deserializedEvent.getVertexGroupName());
logEvents(event, deserializedEvent);
}
{
VertexGroupCommitStartedEvent deserializedEvent =
(VertexGroupCommitStartedEvent) testSummaryProtoConversion(event);
Assert.assertEquals(event.getVertexGroupName(),
deserializedEvent.getVertexGroupName());
logEvents(event, deserializedEvent);
}
}
private void testVertexGroupCommitFinishedEvent() throws Exception {
VertexGroupCommitFinishedEvent event = new VertexGroupCommitFinishedEvent(
TezDAGID.getInstance(ApplicationId.newInstance(0, 1), 1),
"fooGroup", 1000344l);
{
VertexGroupCommitFinishedEvent deserializedEvent =
(VertexGroupCommitFinishedEvent) testProtoConversion(event);
Assert.assertEquals(event.getDagID(), deserializedEvent.getDagID());
Assert.assertEquals(event.getVertexGroupName(),
deserializedEvent.getVertexGroupName());
logEvents(event, deserializedEvent);
}
{
VertexGroupCommitFinishedEvent deserializedEvent =
(VertexGroupCommitFinishedEvent) testSummaryProtoConversion(event);
Assert.assertEquals(event.getVertexGroupName(),
deserializedEvent.getVertexGroupName());
logEvents(event, deserializedEvent);
}
}
@Test
public void testDefaultProtoConversion() throws Exception {
for (HistoryEventType eventType : HistoryEventType.values()) {
switch (eventType) {
case APP_LAUNCHED:
testAppLaunchedEvent();
break;
case AM_LAUNCHED:
testAMLaunchedEvent();
break;
case AM_STARTED:
testAMStartedEvent();
break;
case DAG_SUBMITTED:
testDAGSubmittedEvent();
break;
case DAG_INITIALIZED:
testDAGInitializedEvent();
break;
case DAG_STARTED:
testDAGStartedEvent();
break;
case DAG_FINISHED:
testDAGFinishedEvent();
break;
case VERTEX_INITIALIZED:
testVertexInitializedEvent();
break;
case VERTEX_STARTED:
testVertexStartedEvent();
break;
case VERTEX_PARALLELISM_UPDATED:
testVertexParallelismUpdatedEvent();
break;
case VERTEX_FINISHED:
testVertexFinishedEvent();
break;
case TASK_STARTED:
testTaskStartedEvent();
break;
case TASK_FINISHED:
testTaskFinishedEvent();
break;
case TASK_ATTEMPT_STARTED:
testTaskAttemptStartedEvent();
break;
case TASK_ATTEMPT_FINISHED:
testTaskAttemptFinishedEvent();
break;
case CONTAINER_LAUNCHED:
testContainerLaunchedEvent();
break;
case CONTAINER_STOPPED:
testContainerStoppedEvent();
break;
case VERTEX_DATA_MOVEMENT_EVENTS_GENERATED:
testVertexDataMovementEventsGeneratedEvent();
break;
case DAG_COMMIT_STARTED:
testDAGCommitStartedEvent();
break;
case VERTEX_COMMIT_STARTED:
testVertexCommitStartedEvent();
break;
case VERTEX_GROUP_COMMIT_STARTED:
testVertexGroupCommitStartedEvent();
break;
case VERTEX_GROUP_COMMIT_FINISHED:
testVertexGroupCommitFinishedEvent();
break;
default:
throw new Exception("Unhandled Event type in Unit tests: " + eventType);
}
}
}
}
|
|
/*
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
*/
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
public class JIS_X_0212_Solaris_Decoder extends DoubleByteDecoder
{
public JIS_X_0212_Solaris_Decoder(Charset cs) {
super(cs,
index1,
index2,
0x21,
0x7E);
}
private final static String innerIndex0=
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\u02D8\u02C7\u00B8\u02D9"+
"\u02DD\u00AF\u02DB\u02DA\uFF5E\u0384\u0385\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\u00A1"+
"\u00A6\u00BF\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\u00BA\u00AA\u00A9\u00AE\u2122\u00A4\u2116\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\u0386\u0388\u0389\u038A"+
"\u03AA\uFFFD\u038C\uFFFD\u038E\u03AB\uFFFD\u038F"+
"\uFFFD\uFFFD\uFFFD\uFFFD\u03AC\u03AD\u03AE\u03AF"+
"\u03CA\u0390\u03CC\u03C2\u03CD\u03CB\u03B0\u03CE"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\u0402\u0403\u0404\u0405\u0406"+
"\u0407\u0408\u0409\u040A\u040B\u040C\u040E\u040F"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\u0452\u0453\u0454\u0455\u0456"+
"\u0457\u0458\u0459\u045A\u045B\u045C\u045E\u045F"+
"\u00C6\u0110\uFFFD\u0126\uFFFD\u0132\uFFFD\u0141"+
"\u013F\uFFFD\u014A\u00D8\u0152\uFFFD\u0166\u00DE"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\u00E6\u0111\u00F0\u0127\u0131\u0133\u0138\u0142"+
"\u0140\u0149\u014B\u00F8\u0153\u00DF\u0167\u00FE"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\u00C1\u00C0"+
"\u00C4\u00C2\u0102\u01CD\u0100\u0104\u00C5\u00C3"+
"\u0106\u0108\u010C\u00C7\u010A\u010E\u00C9\u00C8"+
"\u00CB\u00CA\u011A\u0116\u0112\u0118\uFFFD\u011C"+
"\u011E\u0122\u0120\u0124\u00CD\u00CC\u00CF\u00CE"+
"\u01CF\u0130\u012A\u012E\u0128\u0134\u0136\u0139"+
"\u013D\u013B\u0143\u0147\u0145\u00D1\u00D3\u00D2"+
"\u00D6\u00D4\u01D1\u0150\u014C\u00D5\u0154\u0158"+
"\u0156\u015A\u015C\u0160\u015E\u0164\u0162\u00DA"+
"\u00D9\u00DC\u00DB\u016C\u01D3\u0170\u016A\u0172"+
"\u016E\u0168\u01D7\u01DB\u01D9\u01D5\u0174\u00DD"+
"\u0178\u0176\u0179\u017D\u017B\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\u00E1\u00E0\u00E4\u00E2"+
"\u0103\u01CE\u0101\u0105\u00E5\u00E3\u0107\u0109"+
"\u010D\u00E7\u010B\u010F\u00E9\u00E8\u00EB\u00EA"+
"\u011B\u0117\u0113\u0119\u01F5\u011D\u011F\uFFFD"+
"\u0121\u0125\u00ED\u00EC\u00EF\u00EE\u01D0\uFFFD"+
"\u012B\u012F\u0129\u0135\u0137\u013A\u013E\u013C"+
"\u0144\u0148\u0146\u00F1\u00F3\u00F2\u00F6\u00F4"+
"\u01D2\u0151\u014D\u00F5\u0155\u0159\u0157\u015B"+
"\u015D\u0161\u015F\u0165\u0163\u00FA\u00F9\u00FC"+
"\u00FB\u016D\u01D4\u0171\u016B\u0173\u016F\u0169"+
"\u01D8\u01DC\u01DA\u01D6\u0175\u00FD\u00FF\u0177"+
"\u017A\u017E\u017C\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\u4E02\u4E04\u4E05\u4E0C\u4E12\u4E1F"+
"\u4E23\u4E24\u4E28\u4E2B\u4E2E\u4E2F\u4E30\u4E35"+
"\u4E40\u4E41\u4E44\u4E47\u4E51\u4E5A\u4E5C\u4E63"+
"\u4E68\u4E69\u4E74\u4E75\u4E79\u4E7F\u4E8D\u4E96"+
"\u4E97\u4E9D\u4EAF\u4EB9\u4EC3\u4ED0\u4EDA\u4EDB"+
"\u4EE0\u4EE1\u4EE2\u4EE8\u4EEF\u4EF1\u4EF3\u4EF5"+
"\u4EFD\u4EFE\u4EFF\u4F00\u4F02\u4F03\u4F08\u4F0B"+
"\u4F0C\u4F12\u4F15\u4F16\u4F17\u4F19\u4F2E\u4F31"+
"\u4F60\u4F33\u4F35\u4F37\u4F39\u4F3B\u4F3E\u4F40"+
"\u4F42\u4F48\u4F49\u4F4B\u4F4C\u4F52\u4F54\u4F56"+
"\u4F58\u4F5F\u4F63\u4F6A\u4F6C\u4F6E\u4F71\u4F77"+
"\u4F78\u4F79\u4F7A\u4F7D\u4F7E\u4F81\u4F82\u4F84"+
"\u4F85\u4F89\u4F8A\u4F8C\u4F8E\u4F90\u4F92\u4F93"+
"\u4F94\u4F97\u4F99\u4F9A\u4F9E\u4F9F\u4FB2\u4FB7"+
"\u4FB9\u4FBB\u4FBC\u4FBD\u4FBE\u4FC0\u4FC1\u4FC5"+
"\u4FC6\u4FC8\u4FC9\u4FCB\u4FCC\u4FCD\u4FCF\u4FD2"+
"\u4FDC\u4FE0\u4FE2\u4FF0\u4FF2\u4FFC\u4FFD\u4FFF"+
"\u5000\u5001\u5004\u5007\u500A\u500C\u500E\u5010"+
"\u5013\u5017\u5018\u501B\u501C\u501D\u501E\u5022"+
"\u5027\u502E\u5030\u5032\u5033\u5035\u5040\u5041"+
"\u5042\u5045\u5046\u504A\u504C\u504E\u5051\u5052"+
"\u5053\u5057\u5059\u505F\u5060\u5062\u5063\u5066"+
"\u5067\u506A\u506D\u5070\u5071\u503B\u5081\u5083"+
"\u5084\u5086\u508A\u508E\u508F\u5090\u5092\u5093"+
"\u5094\u5096\u509B\u509C\u509E\u509F\u50A0\u50A1"+
"\u50A2\u50AA\u50AF\u50B0\u50B9\u50BA\u50BD\u50C0"+
"\u50C3\u50C4\u50C7\u50CC\u50CE\u50D0\u50D3\u50D4"+
"\u50D8\u50DC\u50DD\u50DF\u50E2\u50E4\u50E6\u50E8"+
"\u50E9\u50EF\u50F1\u50F6\u50FA\u50FE\u5103\u5106"+
"\u5107\u5108\u510B\u510C\u510D\u510E\u50F2\u5110"+
"\u5117\u5119\u511B\u511C\u511D\u511E\u5123\u5127"+
"\u5128\u512C\u512D\u512F\u5131\u5133\u5134\u5135"+
"\u5138\u5139\u5142\u514A\u514F\u5153\u5155\u5157"+
"\u5158\u515F\u5164\u5166\u517E\u5183\u5184\u518B"+
"\u518E\u5198\u519D\u51A1\u51A3\u51AD\u51B8\u51BA"+
"\u51BC\u51BE\u51BF\u51C2\u51C8\u51CF\u51D1\u51D2"+
"\u51D3\u51D5\u51D8\u51DE\u51E2\u51E5\u51EE\u51F2"+
"\u51F3\u51F4\u51F7\u5201\u5202\u5205\u5212\u5213"+
"\u5215\u5216\u5218\u5222\u5228\u5231\u5232\u5235"+
"\u523C\u5245\u5249\u5255\u5257\u5258\u525A\u525C"+
"\u525F\u5260\u5261\u5266\u526E\u5277\u5278\u5279"+
"\u5280\u5282\u5285\u528A\u528C\u5293\u5295\u5296"+
"\u5297\u5298\u529A\u529C\u52A4\u52A5\u52A6\u52A7"+
"\u52AF\u52B0\u52B6\u52B7\u52B8\u52BA\u52BB\u52BD"+
"\u52C0\u52C4\u52C6\u52C8\u52CC\u52CF\u52D1\u52D4"+
"\u52D6\u52DB\u52DC\u52E1\u52E5\u52E8\u52E9\u52EA"+
"\u52EC\u52F0\u52F1\u52F4\u52F6\u52F7\u5300\u5303"+
"\u530A\u530B\u530C\u5311\u5313\u5318\u531B\u531C"+
"\u531E\u531F\u5325\u5327\u5328\u5329\u532B\u532C"+
"\u532D\u5330\u5332\u5335\u533C\u533D\u533E\u5342"+
"\u534C\u534B\u5359\u535B\u5361\u5363\u5365\u536C"+
"\u536D\u5372\u5379\u537E\u5383\u5387\u5388\u538E"+
"\u5393\u5394\u5399\u539D\u53A1\u53A4\u53AA\u53AB"+
"\u53AF\u53B2\u53B4\u53B5\u53B7\u53B8\u53BA\u53BD"+
"\u53C0\u53C5\u53CF\u53D2\u53D3\u53D5\u53DA\u53DD"+
"\u53DE\u53E0\u53E6\u53E7\u53F5\u5402\u5413\u541A"+
"\u5421\u5427\u5428\u542A\u542F\u5431\u5434\u5435"+
"\u5443\u5444\u5447\u544D\u544F\u545E\u5462\u5464"+
"\u5466\u5467\u5469\u546B\u546D\u546E\u5474\u547F"+
"\u5481\u5483\u5485\u5488\u5489\u548D\u5491\u5495"+
"\u5496\u549C\u549F\u54A1\u54A6\u54A7\u54A9\u54AA"+
"\u54AD\u54AE\u54B1\u54B7\u54B9\u54BA\u54BB\u54BF"+
"\u54C6\u54CA\u54CD\u54CE\u54E0\u54EA\u54EC\u54EF"+
"\u54F6\u54FC\u54FE\u54FF\u5500\u5501\u5505\u5508"+
"\u5509\u550C\u550D\u550E\u5515\u552A\u552B\u5532"+
"\u5535\u5536\u553B\u553C\u553D\u5541\u5547\u5549"+
"\u554A\u554D\u5550\u5551\u5558\u555A\u555B\u555E"+
"\u5560\u5561\u5564\u5566\u557F\u5581\u5582\u5586"+
"\u5588\u558E\u558F\u5591\u5592\u5593\u5594\u5597"+
"\u55A3\u55A4\u55AD\u55B2\u55BF\u55C1\u55C3\u55C6"+
"\u55C9\u55CB\u55CC\u55CE\u55D1\u55D2\u55D3\u55D7"+
"\u55D8\u55DB\u55DE\u55E2\u55E9\u55F6\u55FF\u5605"+
"\u5608\u560A\u560D\u560E\u560F\u5610\u5611\u5612"+
"\u5619\u562C\u5630\u5633\u5635\u5637\u5639\u563B"+
"\u563C\u563D\u563F\u5640\u5641\u5643\u5644\u5646"+
"\u5649\u564B\u564D\u564F\u5654\u565E\u5660\u5661"+
"\u5662\u5663\u5666\u5669\u566D\u566F\u5671\u5672"+
"\u5675\u5684\u5685\u5688\u568B\u568C\u5695\u5699"+
"\u569A\u569D\u569E\u569F\u56A6\u56A7\u56A8\u56A9"+
"\u56AB\u56AC\u56AD\u56B1\u56B3\u56B7\u56BE\u56C5"+
"\u56C9\u56CA\u56CB\u56CF\u56D0\u56CC\u56CD\u56D9"+
"\u56DC\u56DD\u56DF\u56E1\u56E4\u56E5\u56E6\u56E7"+
"\u56E8\u56F1\u56EB\u56ED\u56F6\u56F7\u5701\u5702"+
"\u5707\u570A\u570C\u5711\u5715\u571A\u571B\u571D"+
"\u5720\u5722\u5723\u5724\u5725\u5729\u572A\u572C"+
"\u572E\u572F\u5733\u5734\u573D\u573E\u573F\u5745"+
"\u5746\u574C\u574D\u5752\u5762\u5765\u5767\u5768"+
"\u576B\u576D\u576E\u576F\u5770\u5771\u5773\u5774"+
"\u5775\u5777\u5779\u577A\u577B\u577C\u577E\u5781"+
"\u5783\u578C\u5794\u5797\u5799\u579A\u579C\u579D"+
"\u579E\u579F\u57A1\u5795\u57A7\u57A8\u57A9\u57AC"+
"\u57B8\u57BD\u57C7\u57C8\u57CC\u57CF\u57D5\u57DD"+
"\u57DE\u57E4\u57E6\u57E7\u57E9\u57ED\u57F0\u57F5"+
"\u57F6\u57F8\u57FD\u57FE\u57FF\u5803\u5804\u5808"+
"\u5809\u57E1\u580C\u580D\u581B\u581E\u581F\u5820"+
"\u5826\u5827\u582D\u5832\u5839\u583F\u5849\u584C"+
"\u584D\u584F\u5850\u5855\u585F\u5861\u5864\u5867"+
"\u5868\u5878\u587C\u587F\u5880\u5881\u5887\u5888"+
"\u5889\u588A\u588C\u588D\u588F\u5890\u5894\u5896"+
"\u589D\u58A0\u58A1\u58A2\u58A6\u58A9\u58B1\u58B2"+
"\u58C4\u58BC\u58C2\u58C8\u58CD\u58CE\u58D0\u58D2"+
"\u58D4\u58D6\u58DA\u58DD\u58E1\u58E2\u58E9\u58F3"+
"\u5905\u5906\u590B\u590C\u5912\u5913\u5914\u8641"+
"\u591D\u5921\u5923\u5924\u5928\u592F\u5930\u5933"+
"\u5935\u5936\u593F\u5943\u5946\u5952\u5953\u5959"+
"\u595B\u595D\u595E\u595F\u5961\u5963\u596B\u596D";
private final static String innerIndex1=
"\u596F\u5972\u5975\u5976\u5979\u597B\u597C\u598B"+
"\u598C\u598E\u5992\u5995\u5997\u599F\u59A4\u59A7"+
"\u59AD\u59AE\u59AF\u59B0\u59B3\u59B7\u59BA\u59BC"+
"\u59C1\u59C3\u59C4\u59C8\u59CA\u59CD\u59D2\u59DD"+
"\u59DE\u59DF\u59E3\u59E4\u59E7\u59EE\u59EF\u59F1"+
"\u59F2\u59F4\u59F7\u5A00\u5A04\u5A0C\u5A0D\u5A0E"+
"\u5A12\u5A13\u5A1E\u5A23\u5A24\u5A27\u5A28\u5A2A"+
"\u5A2D\u5A30\u5A44\u5A45\u5A47\u5A48\u5A4C\u5A50"+
"\u5A55\u5A5E\u5A63\u5A65\u5A67\u5A6D\u5A77\u5A7A"+
"\u5A7B\u5A7E\u5A8B\u5A90\u5A93\u5A96\u5A99\u5A9C"+
"\u5A9E\u5A9F\u5AA0\u5AA2\u5AA7\u5AAC\u5AB1\u5AB2"+
"\u5AB3\u5AB5\u5AB8\u5ABA\u5ABB\u5ABF\u5AC4\u5AC6"+
"\u5AC8\u5ACF\u5ADA\u5ADC\u5AE0\u5AE5\u5AEA\u5AEE"+
"\u5AF5\u5AF6\u5AFD\u5B00\u5B01\u5B08\u5B17\u5B34"+
"\u5B19\u5B1B\u5B1D\u5B21\u5B25\u5B2D\u5B38\u5B41"+
"\u5B4B\u5B4C\u5B52\u5B56\u5B5E\u5B68\u5B6E\u5B6F"+
"\u5B7C\u5B7D\u5B7E\u5B7F\u5B81\u5B84\u5B86\u5B8A"+
"\u5B8E\u5B90\u5B91\u5B93\u5B94\u5B96\u5BA8\u5BA9"+
"\u5BAC\u5BAD\u5BAF\u5BB1\u5BB2\u5BB7\u5BBA\u5BBC"+
"\u5BC0\u5BC1\u5BCD\u5BCF\u5BD6\u5BD7\u5BD8\u5BD9"+
"\u5BDA\u5BE0\u5BEF\u5BF1\u5BF4\u5BFD\u5C0C\u5C17"+
"\u5C1E\u5C1F\u5C23\u5C26\u5C29\u5C2B\u5C2C\u5C2E"+
"\u5C30\u5C32\u5C35\u5C36\u5C59\u5C5A\u5C5C\u5C62"+
"\u5C63\u5C67\u5C68\u5C69\u5C6D\u5C70\u5C74\u5C75"+
"\u5C7A\u5C7B\u5C7C\u5C7D\u5C87\u5C88\u5C8A\u5C8F"+
"\u5C92\u5C9D\u5C9F\u5CA0\u5CA2\u5CA3\u5CA6\u5CAA"+
"\u5CB2\u5CB4\u5CB5\u5CBA\u5CC9\u5CCB\u5CD2\u5CDD"+
"\u5CD7\u5CEE\u5CF1\u5CF2\u5CF4\u5D01\u5D06\u5D0D"+
"\u5D12\u5D2B\u5D23\u5D24\u5D26\u5D27\u5D31\u5D34"+
"\u5D39\u5D3D\u5D3F\u5D42\u5D43\u5D46\u5D48\u5D55"+
"\u5D51\u5D59\u5D4A\u5D5F\u5D60\u5D61\u5D62\u5D64"+
"\u5D6A\u5D6D\u5D70\u5D79\u5D7A\u5D7E\u5D7F\u5D81"+
"\u5D83\u5D88\u5D8A\u5D92\u5D93\u5D94\u5D95\u5D99"+
"\u5D9B\u5D9F\u5DA0\u5DA7\u5DAB\u5DB0\u5DB4\u5DB8"+
"\u5DB9\u5DC3\u5DC7\u5DCB\u5DD0\u5DCE\u5DD8\u5DD9"+
"\u5DE0\u5DE4\u5DE9\u5DF8\u5DF9\u5E00\u5E07\u5E0D"+
"\u5E12\u5E14\u5E15\u5E18\u5E1F\u5E20\u5E2E\u5E28"+
"\u5E32\u5E35\u5E3E\u5E4B\u5E50\u5E49\u5E51\u5E56"+
"\u5E58\u5E5B\u5E5C\u5E5E\u5E68\u5E6A\u5E6B\u5E6C"+
"\u5E6D\u5E6E\u5E70\u5E80\u5E8B\u5E8E\u5EA2\u5EA4"+
"\u5EA5\u5EA8\u5EAA\u5EAC\u5EB1\u5EB3\u5EBD\u5EBE"+
"\u5EBF\u5EC6\u5ECC\u5ECB\u5ECE\u5ED1\u5ED2\u5ED4"+
"\u5ED5\u5EDC\u5EDE\u5EE5\u5EEB\u5F02\u5F06\u5F07"+
"\u5F08\u5F0E\u5F19\u5F1C\u5F1D\u5F21\u5F22\u5F23"+
"\u5F24\u5F28\u5F2B\u5F2C\u5F2E\u5F30\u5F34\u5F36"+
"\u5F3B\u5F3D\u5F3F\u5F40\u5F44\u5F45\u5F47\u5F4D"+
"\u5F50\u5F54\u5F58\u5F5B\u5F60\u5F63\u5F64\u5F67"+
"\u5F6F\u5F72\u5F74\u5F75\u5F78\u5F7A\u5F7D\u5F7E"+
"\u5F89\u5F8D\u5F8F\u5F96\u5F9C\u5F9D\u5FA2\u5FA7"+
"\u5FAB\u5FA4\u5FAC\u5FAF\u5FB0\u5FB1\u5FB8\u5FC4"+
"\u5FC7\u5FC8\u5FC9\u5FCB\u5FD0\u5FD1\u5FD2\u5FD3"+
"\u5FD4\u5FDE\u5FE1\u5FE2\u5FE8\u5FE9\u5FEA\u5FEC"+
"\u5FED\u5FEE\u5FEF\u5FF2\u5FF3\u5FF6\u5FFA\u5FFC"+
"\u6007\u600A\u600D\u6013\u6014\u6017\u6018\u601A"+
"\u601F\u6024\u602D\u6033\u6035\u6040\u6047\u6048"+
"\u6049\u604C\u6051\u6054\u6056\u6057\u605D\u6061"+
"\u6067\u6071\u607E\u607F\u6082\u6086\u6088\u608A"+
"\u608E\u6091\u6093\u6095\u6098\u609D\u609E\u60A2"+
"\u60A4\u60A5\u60A8\u60B0\u60B1\u60B7\u60BB\u60BE"+
"\u60C2\u60C4\u60C8\u60C9\u60CA\u60CB\u60CE\u60CF"+
"\u60D4\u60D5\u60D9\u60DB\u60DD\u60DE\u60E2\u60E5"+
"\u60F2\u60F5\u60F8\u60FC\u60FD\u6102\u6107\u610A"+
"\u610C\u6110\u6111\u6112\u6113\u6114\u6116\u6117"+
"\u6119\u611C\u611E\u6122\u612A\u612B\u6130\u6131"+
"\u6135\u6136\u6137\u6139\u6141\u6145\u6146\u6149"+
"\u615E\u6160\u616C\u6172\u6178\u617B\u617C\u617F"+
"\u6180\u6181\u6183\u6184\u618B\u618D\u6192\u6193"+
"\u6197\u6198\u619C\u619D\u619F\u61A0\u61A5\u61A8"+
"\u61AA\u61AD\u61B8\u61B9\u61BC\u61C0\u61C1\u61C2"+
"\u61CE\u61CF\u61D5\u61DC\u61DD\u61DE\u61DF\u61E1"+
"\u61E2\u61E7\u61E9\u61E5\u61EC\u61ED\u61EF\u6201"+
"\u6203\u6204\u6207\u6213\u6215\u621C\u6220\u6222"+
"\u6223\u6227\u6229\u622B\u6239\u623D\u6242\u6243"+
"\u6244\u6246\u624C\u6250\u6251\u6252\u6254\u6256"+
"\u625A\u625C\u6264\u626D\u626F\u6273\u627A\u627D"+
"\u628D\u628E\u628F\u6290\u62A6\u62A8\u62B3\u62B6"+
"\u62B7\u62BA\u62BE\u62BF\u62C4\u62CE\u62D5\u62D6"+
"\u62DA\u62EA\u62F2\u62F4\u62FC\u62FD\u6303\u6304"+
"\u630A\u630B\u630D\u6310\u6313\u6316\u6318\u6329"+
"\u632A\u632D\u6335\u6336\u6339\u633C\u6341\u6342"+
"\u6343\u6344\u6346\u634A\u634B\u634E\u6352\u6353"+
"\u6354\u6358\u635B\u6365\u6366\u636C\u636D\u6371"+
"\u6374\u6375\u6378\u637C\u637D\u637F\u6382\u6384"+
"\u6387\u638A\u6390\u6394\u6395\u6399\u639A\u639E"+
"\u63A4\u63A6\u63AD\u63AE\u63AF\u63BD\u63C1\u63C5"+
"\u63C8\u63CE\u63D1\u63D3\u63D4\u63D5\u63DC\u63E0"+
"\u63E5\u63EA\u63EC\u63F2\u63F3\u63F5\u63F8\u63F9"+
"\u6409\u640A\u6410\u6412\u6414\u6418\u641E\u6420"+
"\u6422\u6424\u6425\u6429\u642A\u642F\u6430\u6435"+
"\u643D\u643F\u644B\u644F\u6451\u6452\u6453\u6454"+
"\u645A\u645B\u645C\u645D\u645F\u6460\u6461\u6463"+
"\u646D\u6473\u6474\u647B\u647D\u6485\u6487\u648F"+
"\u6490\u6491\u6498\u6499\u649B\u649D\u649F\u64A1"+
"\u64A3\u64A6\u64A8\u64AC\u64B3\u64BD\u64BE\u64BF"+
"\u64C4\u64C9\u64CA\u64CB\u64CC\u64CE\u64D0\u64D1"+
"\u64D5\u64D7\u64E4\u64E5\u64E9\u64EA\u64ED\u64F0"+
"\u64F5\u64F7\u64FB\u64FF\u6501\u6504\u6508\u6509"+
"\u650A\u650F\u6513\u6514\u6516\u6519\u651B\u651E"+
"\u651F\u6522\u6526\u6529\u652E\u6531\u653A\u653C"+
"\u653D\u6543\u6547\u6549\u6550\u6552\u6554\u655F"+
"\u6560\u6567\u656B\u657A\u657D\u6581\u6585\u658A"+
"\u6592\u6595\u6598\u659D\u65A0\u65A3\u65A6\u65AE"+
"\u65B2\u65B3\u65B4\u65BF\u65C2\u65C8\u65C9\u65CE"+
"\u65D0\u65D4\u65D6\u65D8\u65DF\u65F0\u65F2\u65F4"+
"\u65F5\u65F9\u65FE\u65FF\u6600\u6604\u6608\u6609"+
"\u660D\u6611\u6612\u6615\u6616\u661D\u661E\u6621"+
"\u6622\u6623\u6624\u6626\u6629\u662A\u662B\u662C"+
"\u662E\u6630\u6631\u6633\u6639\u6637\u6640\u6645"+
"\u6646\u664A\u664C\u6651\u664E\u6657\u6658\u6659"+
"\u665B\u665C\u6660\u6661\u66FB\u666A\u666B\u666C"+
"\u667E\u6673\u6675\u667F\u6677\u6678\u6679\u667B"+
"\u6680\u667C\u668B\u668C\u668D\u6690\u6692\u6699"+
"\u669A\u669B\u669C\u669F\u66A0\u66A4\u66AD\u66B1"+
"\u66B2\u66B5\u66BB\u66BF\u66C0\u66C2\u66C3\u66C8"+
"\u66CC\u66CE\u66CF\u66D4\u66DB\u66DF\u66E8\u66EB"+
"\u66EC\u66EE\u66FA\u6705\u6707\u670E\u6713\u6719"+
"\u671C\u6720\u6722\u6733\u673E\u6745\u6747\u6748"+
"\u674C\u6754\u6755\u675D\u6766\u676C\u676E\u6774"+
"\u6776\u677B\u6781\u6784\u678E\u678F\u6791\u6793"+
"\u6796\u6798\u6799\u679B\u67B0\u67B1\u67B2\u67B5"+
"\u67BB\u67BC\u67BD\u67F9\u67C0\u67C2\u67C3\u67C5"+
"\u67C8\u67C9\u67D2\u67D7\u67D9\u67DC\u67E1\u67E6"+
"\u67F0\u67F2\u67F6\u67F7\u6852\u6814\u6819\u681D"+
"\u681F\u6828\u6827\u682C\u682D\u682F\u6830\u6831"+
"\u6833\u683B\u683F\u6844\u6845\u684A\u684C\u6855"+
"\u6857\u6858\u685B\u686B\u686E\u686F\u6870\u6871"+
"\u6872\u6875\u6879\u687A\u687B\u687C\u6882\u6884"+
"\u6886\u6888\u6896\u6898\u689A\u689C\u68A1\u68A3"+
"\u68A5\u68A9\u68AA\u68AE\u68B2\u68BB\u68C5\u68C8"+
"\u68CC\u68CF\u68D0\u68D1\u68D3\u68D6\u68D9\u68DC"+
"\u68DD\u68E5\u68E8\u68EA\u68EB\u68EC\u68ED\u68F0"+
"\u68F1\u68F5\u68F6\u68FB\u68FC\u68FD\u6906\u6909"+
"\u690A\u6910\u6911\u6913\u6916\u6917\u6931\u6933"+
"\u6935\u6938\u693B\u6942\u6945\u6949\u694E\u6957"+
"\u695B\u6963\u6964\u6965\u6966\u6968\u6969\u696C"+
"\u6970\u6971\u6972\u697A\u697B\u697F\u6980\u698D"+
"\u6992\u6996\u6998\u69A1\u69A5\u69A6\u69A8\u69AB"+
"\u69AD\u69AF\u69B7\u69B8\u69BA\u69BC\u69C5\u69C8"+
"\u69D1\u69D6\u69D7\u69E2\u69E5\u69EE\u69EF\u69F1"+
"\u69F3\u69F5\u69FE\u6A00\u6A01\u6A03\u6A0F\u6A11"+
"\u6A15\u6A1A\u6A1D\u6A20\u6A24\u6A28\u6A30\u6A32"+
"\u6A34\u6A37\u6A3B\u6A3E\u6A3F\u6A45\u6A46\u6A49"+
"\u6A4A\u6A4E\u6A50\u6A51\u6A52\u6A55\u6A56\u6A5B"+
"\u6A64\u6A67\u6A6A\u6A71\u6A73\u6A7E\u6A81\u6A83"+
"\u6A86\u6A87\u6A89\u6A8B\u6A91\u6A9B\u6A9D\u6A9E"+
"\u6A9F\u6AA5\u6AAB\u6AAF\u6AB0\u6AB1\u6AB4\u6ABD"+
"\u6ABE\u6ABF\u6AC6\u6AC9\u6AC8\u6ACC\u6AD0\u6AD4"+
"\u6AD5\u6AD6\u6ADC\u6ADD\u6AE4\u6AE7\u6AEC\u6AF0"+
"\u6AF1\u6AF2\u6AFC\u6AFD\u6B02\u6B03\u6B06\u6B07"+
"\u6B09\u6B0F\u6B10\u6B11\u6B17\u6B1B\u6B1E\u6B24"+
"\u6B28\u6B2B\u6B2C\u6B2F\u6B35\u6B36\u6B3B\u6B3F"+
"\u6B46\u6B4A\u6B4D\u6B52\u6B56\u6B58\u6B5D\u6B60"+
"\u6B67\u6B6B\u6B6E\u6B70\u6B75\u6B7D\u6B7E\u6B82"+
"\u6B85\u6B97\u6B9B\u6B9F\u6BA0\u6BA2\u6BA3\u6BA8"+
"\u6BA9\u6BAC\u6BAD\u6BAE\u6BB0\u6BB8\u6BB9\u6BBD"+
"\u6BBE\u6BC3\u6BC4\u6BC9\u6BCC\u6BD6\u6BDA\u6BE1"+
"\u6BE3\u6BE6\u6BE7\u6BEE\u6BF1\u6BF7\u6BF9\u6BFF"+
"\u6C02\u6C04\u6C05\u6C09\u6C0D\u6C0E\u6C10\u6C12"+
"\u6C19\u6C1F\u6C26\u6C27\u6C28\u6C2C\u6C2E\u6C33"+
"\u6C35\u6C36\u6C3A\u6C3B\u6C3F\u6C4A\u6C4B\u6C4D"+
"\u6C4F\u6C52\u6C54\u6C59\u6C5B\u6C5C\u6C6B\u6C6D"+
"\u6C6F\u6C74\u6C76\u6C78\u6C79\u6C7B\u6C85\u6C86"+
"\u6C87\u6C89\u6C94\u6C95\u6C97\u6C98\u6C9C\u6C9F"+
"\u6CB0\u6CB2\u6CB4\u6CC2\u6CC6\u6CCD\u6CCF\u6CD0"+
"\u6CD1\u6CD2\u6CD4\u6CD6\u6CDA\u6CDC\u6CE0\u6CE7"+
"\u6CE9\u6CEB\u6CEC\u6CEE\u6CF2\u6CF4\u6D04\u6D07"+
"\u6D0A\u6D0E\u6D0F\u6D11\u6D13\u6D1A\u6D26\u6D27"+
"\u6D28\u6C67\u6D2E\u6D2F\u6D31\u6D39\u6D3C\u6D3F"+
"\u6D57\u6D5E\u6D5F\u6D61\u6D65\u6D67\u6D6F\u6D70"+
"\u6D7C\u6D82\u6D87\u6D91\u6D92\u6D94\u6D96\u6D97"+
"\u6D98\u6DAA\u6DAC\u6DB4\u6DB7\u6DB9\u6DBD\u6DBF"+
"\u6DC4\u6DC8\u6DCA\u6DCE\u6DCF\u6DD6\u6DDB\u6DDD"+
"\u6DDF\u6DE0\u6DE2\u6DE5\u6DE9\u6DEF\u6DF0\u6DF4"+
"\u6DF6\u6DFC\u6E00\u6E04\u6E1E\u6E22\u6E27\u6E32"+
"\u6E36\u6E39\u6E3B\u6E3C\u6E44\u6E45\u6E48\u6E49"+
"\u6E4B\u6E4F\u6E51\u6E52\u6E53\u6E54\u6E57\u6E5C"+
"\u6E5D\u6E5E\u6E62\u6E63\u6E68\u6E73\u6E7B\u6E7D"+
"\u6E8D\u6E93\u6E99\u6EA0\u6EA7\u6EAD\u6EAE\u6EB1"+
"\u6EB3\u6EBB\u6EBF\u6EC0\u6EC1\u6EC3\u6EC7\u6EC8"+
"\u6ECA\u6ECD\u6ECE\u6ECF\u6EEB\u6EED\u6EEE\u6EF9"+
"\u6EFB\u6EFD\u6F04\u6F08\u6F0A\u6F0C\u6F0D\u6F16"+
"\u6F18\u6F1A\u6F1B\u6F26\u6F29\u6F2A\u6F2F\u6F30"+
"\u6F33\u6F36\u6F3B\u6F3C\u6F2D\u6F4F\u6F51\u6F52"+
"\u6F53\u6F57\u6F59\u6F5A\u6F5D\u6F5E\u6F61\u6F62"+
"\u6F68\u6F6C\u6F7D\u6F7E\u6F83\u6F87\u6F88\u6F8B"+
"\u6F8C\u6F8D\u6F90\u6F92\u6F93\u6F94\u6F96\u6F9A"+
"\u6F9F\u6FA0\u6FA5\u6FA6\u6FA7\u6FA8\u6FAE\u6FAF"+
"\u6FB0\u6FB5\u6FB6\u6FBC\u6FC5\u6FC7\u6FC8\u6FCA";
private final static String innerIndex2=
"\u6FDA\u6FDE\u6FE8\u6FE9\u6FF0\u6FF5\u6FF9\u6FFC"+
"\u6FFD\u7000\u7005\u7006\u7007\u700D\u7017\u7020"+
"\u7023\u702F\u7034\u7037\u7039\u703C\u7043\u7044"+
"\u7048\u7049\u704A\u704B\u7054\u7055\u705D\u705E"+
"\u704E\u7064\u7065\u706C\u706E\u7075\u7076\u707E"+
"\u7081\u7085\u7086\u7094\u7095\u7096\u7097\u7098"+
"\u709B\u70A4\u70AB\u70B0\u70B1\u70B4\u70B7\u70CA"+
"\u70D1\u70D3\u70D4\u70D5\u70D6\u70D8\u70DC\u70E4"+
"\u70FA\u7103\u7104\u7105\u7106\u7107\u710B\u710C"+
"\u710F\u711E\u7120\u712B\u712D\u712F\u7130\u7131"+
"\u7138\u7141\u7145\u7146\u7147\u714A\u714B\u7150"+
"\u7152\u7157\u715A\u715C\u715E\u7160\u7168\u7179"+
"\u7180\u7185\u7187\u718C\u7192\u719A\u719B\u71A0"+
"\u71A2\u71AF\u71B0\u71B2\u71B3\u71BA\u71BF\u71C0"+
"\u71C1\u71C4\u71CB\u71CC\u71D3\u71D6\u71D9\u71DA"+
"\u71DC\u71F8\u71FE\u7200\u7207\u7208\u7209\u7213"+
"\u7217\u721A\u721D\u721F\u7224\u722B\u722F\u7234"+
"\u7238\u7239\u7241\u7242\u7243\u7245\u724E\u724F"+
"\u7250\u7253\u7255\u7256\u725A\u725C\u725E\u7260"+
"\u7263\u7268\u726B\u726E\u726F\u7271\u7277\u7278"+
"\u727B\u727C\u727F\u7284\u7289\u728D\u728E\u7293"+
"\u729B\u72A8\u72AD\u72AE\u72B1\u72B4\u72BE\u72C1"+
"\u72C7\u72C9\u72CC\u72D5\u72D6\u72D8\u72DF\u72E5"+
"\u72F3\u72F4\u72FA\u72FB\u72FE\u7302\u7304\u7305"+
"\u7307\u730B\u730D\u7312\u7313\u7318\u7319\u731E"+
"\u7322\u7324\u7327\u7328\u732C\u7331\u7332\u7335"+
"\u733A\u733B\u733D\u7343\u734D\u7350\u7352\u7356"+
"\u7358\u735D\u735E\u735F\u7360\u7366\u7367\u7369"+
"\u736B\u736C\u736E\u736F\u7371\u7377\u7379\u737C"+
"\u7380\u7381\u7383\u7385\u7386\u738E\u7390\u7393"+
"\u7395\u7397\u7398\u739C\u739E\u739F\u73A0\u73A2"+
"\u73A5\u73A6\u73AA\u73AB\u73AD\u73B5\u73B7\u73B9"+
"\u73BC\u73BD\u73BF\u73C5\u73C6\u73C9\u73CB\u73CC"+
"\u73CF\u73D2\u73D3\u73D6\u73D9\u73DD\u73E1\u73E3"+
"\u73E6\u73E7\u73E9\u73F4\u73F5\u73F7\u73F9\u73FA"+
"\u73FB\u73FD\u73FF\u7400\u7401\u7404\u7407\u740A"+
"\u7411\u741A\u741B\u7424\u7426\u7428\u7429\u742A"+
"\u742B\u742C\u742D\u742E\u742F\u7430\u7431\u7439"+
"\u7440\u7443\u7444\u7446\u7447\u744B\u744D\u7451"+
"\u7452\u7457\u745D\u7462\u7466\u7467\u7468\u746B"+
"\u746D\u746E\u7471\u7472\u7480\u7481\u7485\u7486"+
"\u7487\u7489\u748F\u7490\u7491\u7492\u7498\u7499"+
"\u749A\u749C\u749F\u74A0\u74A1\u74A3\u74A6\u74A8"+
"\u74A9\u74AA\u74AB\u74AE\u74AF\u74B1\u74B2\u74B5"+
"\u74B9\u74BB\u74BF\u74C8\u74C9\u74CC\u74D0\u74D3"+
"\u74D8\u74DA\u74DB\u74DE\u74DF\u74E4\u74E8\u74EA"+
"\u74EB\u74EF\u74F4\u74FA\u74FB\u74FC\u74FF\u7506"+
"\u7512\u7516\u7517\u7520\u7521\u7524\u7527\u7529"+
"\u752A\u752F\u7536\u7539\u753D\u753E\u753F\u7540"+
"\u7543\u7547\u7548\u754E\u7550\u7552\u7557\u755E"+
"\u755F\u7561\u756F\u7571\u7579\u757A\u757B\u757C"+
"\u757D\u757E\u7581\u7585\u7590\u7592\u7593\u7595"+
"\u7599\u759C\u75A2\u75A4\u75B4\u75BA\u75BF\u75C0"+
"\u75C1\u75C4\u75C6\u75CC\u75CE\u75CF\u75D7\u75DC"+
"\u75DF\u75E0\u75E1\u75E4\u75E7\u75EC\u75EE\u75EF"+
"\u75F1\u75F9\u7600\u7602\u7603\u7604\u7607\u7608"+
"\u760A\u760C\u760F\u7612\u7613\u7615\u7616\u7619"+
"\u761B\u761C\u761D\u761E\u7623\u7625\u7626\u7629"+
"\u762D\u7632\u7633\u7635\u7638\u7639\u763A\u763C"+
"\u764A\u7640\u7641\u7643\u7644\u7645\u7649\u764B"+
"\u7655\u7659\u765F\u7664\u7665\u766D\u766E\u766F"+
"\u7671\u7674\u7681\u7685\u768C\u768D\u7695\u769B"+
"\u769C\u769D\u769F\u76A0\u76A2\u76A3\u76A4\u76A5"+
"\u76A6\u76A7\u76A8\u76AA\u76AD\u76BD\u76C1\u76C5"+
"\u76C9\u76CB\u76CC\u76CE\u76D4\u76D9\u76E0\u76E6"+
"\u76E8\u76EC\u76F0\u76F1\u76F6\u76F9\u76FC\u7700"+
"\u7706\u770A\u770E\u7712\u7714\u7715\u7717\u7719"+
"\u771A\u771C\u7722\u7728\u772D\u772E\u772F\u7734"+
"\u7735\u7736\u7739\u773D\u773E\u7742\u7745\u7746"+
"\u774A\u774D\u774E\u774F\u7752\u7756\u7757\u775C"+
"\u775E\u775F\u7760\u7762\u7764\u7767\u776A\u776C"+
"\u7770\u7772\u7773\u7774\u777A\u777D\u7780\u7784"+
"\u778C\u778D\u7794\u7795\u7796\u779A\u779F\u77A2"+
"\u77A7\u77AA\u77AE\u77AF\u77B1\u77B5\u77BE\u77C3"+
"\u77C9\u77D1\u77D2\u77D5\u77D9\u77DE\u77DF\u77E0"+
"\u77E4\u77E6\u77EA\u77EC\u77F0\u77F1\u77F4\u77F8"+
"\u77FB\u7805\u7806\u7809\u780D\u780E\u7811\u781D"+
"\u7821\u7822\u7823\u782D\u782E\u7830\u7835\u7837"+
"\u7843\u7844\u7847\u7848\u784C\u784E\u7852\u785C"+
"\u785E\u7860\u7861\u7863\u7864\u7868\u786A\u786E"+
"\u787A\u787E\u788A\u788F\u7894\u7898\u78A1\u789D"+
"\u789E\u789F\u78A4\u78A8\u78AC\u78AD\u78B0\u78B1"+
"\u78B2\u78B3\u78BB\u78BD\u78BF\u78C7\u78C8\u78C9"+
"\u78CC\u78CE\u78D2\u78D3\u78D5\u78D6\u78E4\u78DB"+
"\u78DF\u78E0\u78E1\u78E6\u78EA\u78F2\u78F3\u7900"+
"\u78F6\u78F7\u78FA\u78FB\u78FF\u7906\u790C\u7910"+
"\u791A\u791C\u791E\u791F\u7920\u7925\u7927\u7929"+
"\u792D\u7931\u7934\u7935\u793B\u793D\u793F\u7944"+
"\u7945\u7946\u794A\u794B\u794F\u7951\u7954\u7958"+
"\u795B\u795C\u7967\u7969\u796B\u7972\u7979\u797B"+
"\u797C\u797E\u798B\u798C\u7991\u7993\u7994\u7995"+
"\u7996\u7998\u799B\u799C\u79A1\u79A8\u79A9\u79AB"+
"\u79AF\u79B1\u79B4\u79B8\u79BB\u79C2\u79C4\u79C7"+
"\u79C8\u79CA\u79CF\u79D4\u79D6\u79DA\u79DD\u79DE"+
"\u79E0\u79E2\u79E5\u79EA\u79EB\u79ED\u79F1\u79F8"+
"\u79FC\u7A02\u7A03\u7A07\u7A09\u7A0A\u7A0C\u7A11"+
"\u7A15\u7A1B\u7A1E\u7A21\u7A27\u7A2B\u7A2D\u7A2F"+
"\u7A30\u7A34\u7A35\u7A38\u7A39\u7A3A\u7A44\u7A45"+
"\u7A47\u7A48\u7A4C\u7A55\u7A56\u7A59\u7A5C\u7A5D"+
"\u7A5F\u7A60\u7A65\u7A67\u7A6A\u7A6D\u7A75\u7A78"+
"\u7A7E\u7A80\u7A82\u7A85\u7A86\u7A8A\u7A8B\u7A90"+
"\u7A91\u7A94\u7A9E\u7AA0\u7AA3\u7AAC\u7AB3\u7AB5"+
"\u7AB9\u7ABB\u7ABC\u7AC6\u7AC9\u7ACC\u7ACE\u7AD1"+
"\u7ADB\u7AE8\u7AE9\u7AEB\u7AEC\u7AF1\u7AF4\u7AFB"+
"\u7AFD\u7AFE\u7B07\u7B14\u7B1F\u7B23\u7B27\u7B29"+
"\u7B2A\u7B2B\u7B2D\u7B2E\u7B2F\u7B30\u7B31\u7B34"+
"\u7B3D\u7B3F\u7B40\u7B41\u7B47\u7B4E\u7B55\u7B60"+
"\u7B64\u7B66\u7B69\u7B6A\u7B6D\u7B6F\u7B72\u7B73"+
"\u7B77\u7B84\u7B89\u7B8E\u7B90\u7B91\u7B96\u7B9B"+
"\u7B9E\u7BA0\u7BA5\u7BAC\u7BAF\u7BB0\u7BB2\u7BB5"+
"\u7BB6\u7BBA\u7BBB\u7BBC\u7BBD\u7BC2\u7BC5\u7BC8"+
"\u7BCA\u7BD4\u7BD6\u7BD7\u7BD9\u7BDA\u7BDB\u7BE8"+
"\u7BEA\u7BF2\u7BF4\u7BF5\u7BF8\u7BF9\u7BFA\u7BFC"+
"\u7BFE\u7C01\u7C02\u7C03\u7C04\u7C06\u7C09\u7C0B"+
"\u7C0C\u7C0E\u7C0F\u7C19\u7C1B\u7C20\u7C25\u7C26"+
"\u7C28\u7C2C\u7C31\u7C33\u7C34\u7C36\u7C39\u7C3A"+
"\u7C46\u7C4A\u7C55\u7C51\u7C52\u7C53\u7C59\u7C5A"+
"\u7C5B\u7C5C\u7C5D\u7C5E\u7C61\u7C63\u7C67\u7C69"+
"\u7C6D\u7C6E\u7C70\u7C72\u7C79\u7C7C\u7C7D\u7C86"+
"\u7C87\u7C8F\u7C94\u7C9E\u7CA0\u7CA6\u7CB0\u7CB6"+
"\u7CB7\u7CBA\u7CBB\u7CBC\u7CBF\u7CC4\u7CC7\u7CC8"+
"\u7CC9\u7CCD\u7CCF\u7CD3\u7CD4\u7CD5\u7CD7\u7CD9"+
"\u7CDA\u7CDD\u7CE6\u7CE9\u7CEB\u7CF5\u7D03\u7D07"+
"\u7D08\u7D09\u7D0F\u7D11\u7D12\u7D13\u7D16\u7D1D"+
"\u7D1E\u7D23\u7D26\u7D2A\u7D2D\u7D31\u7D3C\u7D3D"+
"\u7D3E\u7D40\u7D41\u7D47\u7D48\u7D4D\u7D51\u7D53"+
"\u7D57\u7D59\u7D5A\u7D5C\u7D5D\u7D65\u7D67\u7D6A"+
"\u7D70\u7D78\u7D7A\u7D7B\u7D7F\u7D81\u7D82\u7D83"+
"\u7D85\u7D86\u7D88\u7D8B\u7D8C\u7D8D\u7D91\u7D96"+
"\u7D97\u7D9D\u7D9E\u7DA6\u7DA7\u7DAA\u7DB3\u7DB6"+
"\u7DB7\u7DB9\u7DC2\u7DC3\u7DC4\u7DC5\u7DC6\u7DCC"+
"\u7DCD\u7DCE\u7DD7\u7DD9\u7E00\u7DE2\u7DE5\u7DE6"+
"\u7DEA\u7DEB\u7DED\u7DF1\u7DF5\u7DF6\u7DF9\u7DFA"+
"\u7E08\u7E10\u7E11\u7E15\u7E17\u7E1C\u7E1D\u7E20"+
"\u7E27\u7E28\u7E2C\u7E2D\u7E2F\u7E33\u7E36\u7E3F"+
"\u7E44\u7E45\u7E47\u7E4E\u7E50\u7E52\u7E58\u7E5F"+
"\u7E61\u7E62\u7E65\u7E6B\u7E6E\u7E6F\u7E73\u7E78"+
"\u7E7E\u7E81\u7E86\u7E87\u7E8A\u7E8D\u7E91\u7E95"+
"\u7E98\u7E9A\u7E9D\u7E9E\u7F3C\u7F3B\u7F3D\u7F3E"+
"\u7F3F\u7F43\u7F44\u7F47\u7F4F\u7F52\u7F53\u7F5B"+
"\u7F5C\u7F5D\u7F61\u7F63\u7F64\u7F65\u7F66\u7F6D"+
"\u7F71\u7F7D\u7F7E\u7F7F\u7F80\u7F8B\u7F8D\u7F8F"+
"\u7F90\u7F91\u7F96\u7F97\u7F9C\u7FA1\u7FA2\u7FA6"+
"\u7FAA\u7FAD\u7FB4\u7FBC\u7FBF\u7FC0\u7FC3\u7FC8"+
"\u7FCE\u7FCF\u7FDB\u7FDF\u7FE3\u7FE5\u7FE8\u7FEC"+
"\u7FEE\u7FEF\u7FF2\u7FFA\u7FFD\u7FFE\u7FFF\u8007"+
"\u8008\u800A\u800D\u800E\u800F\u8011\u8013\u8014"+
"\u8016\u801D\u801E\u801F\u8020\u8024\u8026\u802C"+
"\u802E\u8030\u8034\u8035\u8037\u8039\u803A\u803C"+
"\u803E\u8040\u8044\u8060\u8064\u8066\u806D\u8071"+
"\u8075\u8081\u8088\u808E\u809C\u809E\u80A6\u80A7"+
"\u80AB\u80B8\u80B9\u80C8\u80CD\u80CF\u80D2\u80D4"+
"\u80D5\u80D7\u80D8\u80E0\u80ED\u80EE\u80F0\u80F2"+
"\u80F3\u80F6\u80F9\u80FA\u80FE\u8103\u810B\u8116"+
"\u8117\u8118\u811C\u811E\u8120\u8124\u8127\u812C"+
"\u8130\u8135\u813A\u813C\u8145\u8147\u814A\u814C"+
"\u8152\u8157\u8160\u8161\u8167\u8168\u8169\u816D"+
"\u816F\u8177\u8181\u8190\u8184\u8185\u8186\u818B"+
"\u818E\u8196\u8198\u819B\u819E\u81A2\u81AE\u81B2"+
"\u81B4\u81BB\u81CB\u81C3\u81C5\u81CA\u81CE\u81CF"+
"\u81D5\u81D7\u81DB\u81DD\u81DE\u81E1\u81E4\u81EB"+
"\u81EC\u81F0\u81F1\u81F2\u81F5\u81F6\u81F8\u81F9"+
"\u81FD\u81FF\u8200\u8203\u820F\u8213\u8214\u8219"+
"\u821A\u821D\u8221\u8222\u8228\u8232\u8234\u823A"+
"\u8243\u8244\u8245\u8246\u824B\u824E\u824F\u8251"+
"\u8256\u825C\u8260\u8263\u8267\u826D\u8274\u827B"+
"\u827D\u827F\u8280\u8281\u8283\u8284\u8287\u8289"+
"\u828A\u828E\u8291\u8294\u8296\u8298\u829A\u829B"+
"\u82A0\u82A1\u82A3\u82A4\u82A7\u82A8\u82A9\u82AA"+
"\u82AE\u82B0\u82B2\u82B4\u82B7\u82BA\u82BC\u82BE"+
"\u82BF\u82C6\u82D0\u82D5\u82DA\u82E0\u82E2\u82E4"+
"\u82E8\u82EA\u82ED\u82EF\u82F6\u82F7\u82FD\u82FE"+
"\u8300\u8301\u8307\u8308\u830A\u830B\u8354\u831B"+
"\u831D\u831E\u831F\u8321\u8322\u832C\u832D\u832E"+
"\u8330\u8333\u8337\u833A\u833C\u833D\u8342\u8343"+
"\u8344\u8347\u834D\u834E\u8351\u8355\u8356\u8357"+
"\u8370\u8378\u837D\u837F\u8380\u8382\u8384\u8386"+
"\u838D\u8392\u8394\u8395\u8398\u8399\u839B\u839C"+
"\u839D\u83A6\u83A7\u83A9\u83AC\u83BE\u83BF\u83C0"+
"\u83C7\u83C9\u83CF\u83D0\u83D1\u83D4\u83DD\u8353"+
"\u83E8\u83EA\u83F6\u83F8\u83F9\u83FC\u8401\u8406"+
"\u840A\u840F\u8411\u8415\u8419\u83AD\u842F\u8439"+
"\u8445\u8447\u8448\u844A\u844D\u844F\u8451\u8452"+
"\u8456\u8458\u8459\u845A\u845C\u8460\u8464\u8465"+
"\u8467\u846A\u8470\u8473\u8474\u8476\u8478\u847C"+
"\u847D\u8481\u8485\u8492\u8493\u8495\u849E\u84A6"+
"\u84A8\u84A9\u84AA\u84AF\u84B1\u84B4\u84BA\u84BD"+
"\u84BE\u84C0\u84C2\u84C7\u84C8\u84CC\u84CF\u84D3";
private final static String innerIndex3=
"\u84DC\u84E7\u84EA\u84EF\u84F0\u84F1\u84F2\u84F7"+
"\u8532\u84FA\u84FB\u84FD\u8502\u8503\u8507\u850C"+
"\u850E\u8510\u851C\u851E\u8522\u8523\u8524\u8525"+
"\u8527\u852A\u852B\u852F\u8533\u8534\u8536\u853F"+
"\u8546\u854F\u8550\u8551\u8552\u8553\u8556\u8559"+
"\u855C\u855D\u855E\u855F\u8560\u8561\u8562\u8564"+
"\u856B\u856F\u8579\u857A\u857B\u857D\u857F\u8581"+
"\u8585\u8586\u8589\u858B\u858C\u858F\u8593\u8598"+
"\u859D\u859F\u85A0\u85A2\u85A5\u85A7\u85B4\u85B6"+
"\u85B7\u85B8\u85BC\u85BD\u85BE\u85BF\u85C2\u85C7"+
"\u85CA\u85CB\u85CE\u85AD\u85D8\u85DA\u85DF\u85E0"+
"\u85E6\u85E8\u85ED\u85F3\u85F6\u85FC\u85FF\u8600"+
"\u8604\u8605\u860D\u860E\u8610\u8611\u8612\u8618"+
"\u8619\u861B\u861E\u8621\u8627\u8629\u8636\u8638"+
"\u863A\u863C\u863D\u8640\u8642\u8646\u8652\u8653"+
"\u8656\u8657\u8658\u8659\u865D\u8660\u8661\u8662"+
"\u8663\u8664\u8669\u866C\u866F\u8675\u8676\u8677"+
"\u867A\u868D\u8691\u8696\u8698\u869A\u869C\u86A1"+
"\u86A6\u86A7\u86A8\u86AD\u86B1\u86B3\u86B4\u86B5"+
"\u86B7\u86B8\u86B9\u86BF\u86C0\u86C1\u86C3\u86C5"+
"\u86D1\u86D2\u86D5\u86D7\u86DA\u86DC\u86E0\u86E3"+
"\u86E5\u86E7\u8688\u86FA\u86FC\u86FD\u8704\u8705"+
"\u8707\u870B\u870E\u870F\u8710\u8713\u8714\u8719"+
"\u871E\u871F\u8721\u8723\u8728\u872E\u872F\u8731"+
"\u8732\u8739\u873A\u873C\u873D\u873E\u8740\u8743"+
"\u8745\u874D\u8758\u875D\u8761\u8764\u8765\u876F"+
"\u8771\u8772\u877B\u8783\u8784\u8785\u8786\u8787"+
"\u8788\u8789\u878B\u878C\u8790\u8793\u8795\u8797"+
"\u8798\u8799\u879E\u87A0\u87A3\u87A7\u87AC\u87AD"+
"\u87AE\u87B1\u87B5\u87BE\u87BF\u87C1\u87C8\u87C9"+
"\u87CA\u87CE\u87D5\u87D6\u87D9\u87DA\u87DC\u87DF"+
"\u87E2\u87E3\u87E4\u87EA\u87EB\u87ED\u87F1\u87F3"+
"\u87F8\u87FA\u87FF\u8801\u8803\u8806\u8809\u880A"+
"\u880B\u8810\u8819\u8812\u8813\u8814\u8818\u881A"+
"\u881B\u881C\u881E\u881F\u8828\u882D\u882E\u8830"+
"\u8832\u8835\u883A\u883C\u8841\u8843\u8845\u8848"+
"\u8849\u884A\u884B\u884E\u8851\u8855\u8856\u8858"+
"\u885A\u885C\u885F\u8860\u8864\u8869\u8871\u8879"+
"\u887B\u8880\u8898\u889A\u889B\u889C\u889F\u88A0"+
"\u88A8\u88AA\u88BA\u88BD\u88BE\u88C0\u88CA\u88CB"+
"\u88CC\u88CD\u88CE\u88D1\u88D2\u88D3\u88DB\u88DE"+
"\u88E7\u88EF\u88F0\u88F1\u88F5\u88F7\u8901\u8906"+
"\u890D\u890E\u890F\u8915\u8916\u8918\u8919\u891A"+
"\u891C\u8920\u8926\u8927\u8928\u8930\u8931\u8932"+
"\u8935\u8939\u893A\u893E\u8940\u8942\u8945\u8946"+
"\u8949\u894F\u8952\u8957\u895A\u895B\u895C\u8961"+
"\u8962\u8963\u896B\u896E\u8970\u8973\u8975\u897A"+
"\u897B\u897C\u897D\u8989\u898D\u8990\u8994\u8995"+
"\u899B\u899C\u899F\u89A0\u89A5\u89B0\u89B4\u89B5"+
"\u89B6\u89B7\u89BC\u89D4\u89D5\u89D6\u89D7\u89D8"+
"\u89E5\u89E9\u89EB\u89ED\u89F1\u89F3\u89F6\u89F9"+
"\u89FD\u89FF\u8A04\u8A05\u8A07\u8A0F\u8A11\u8A12"+
"\u8A14\u8A15\u8A1E\u8A20\u8A22\u8A24\u8A26\u8A2B"+
"\u8A2C\u8A2F\u8A35\u8A37\u8A3D\u8A3E\u8A40\u8A43"+
"\u8A45\u8A47\u8A49\u8A4D\u8A4E\u8A53\u8A56\u8A57"+
"\u8A58\u8A5C\u8A5D\u8A61\u8A65\u8A67\u8A75\u8A76"+
"\u8A77\u8A79\u8A7A\u8A7B\u8A7E\u8A7F\u8A80\u8A83"+
"\u8A86\u8A8B\u8A8F\u8A90\u8A92\u8A96\u8A97\u8A99"+
"\u8A9F\u8AA7\u8AA9\u8AAE\u8AAF\u8AB3\u8AB6\u8AB7"+
"\u8ABB\u8ABE\u8AC3\u8AC6\u8AC8\u8AC9\u8ACA\u8AD1"+
"\u8AD3\u8AD4\u8AD5\u8AD7\u8ADD\u8ADF\u8AEC\u8AF0"+
"\u8AF4\u8AF5\u8AF6\u8AFC\u8AFF\u8B05\u8B06\u8B0B"+
"\u8B11\u8B1C\u8B1E\u8B1F\u8B0A\u8B2D\u8B30\u8B37"+
"\u8B3C\u8B42\u8B43\u8B44\u8B45\u8B46\u8B48\u8B52"+
"\u8B53\u8B54\u8B59\u8B4D\u8B5E\u8B63\u8B6D\u8B76"+
"\u8B78\u8B79\u8B7C\u8B7E\u8B81\u8B84\u8B85\u8B8B"+
"\u8B8D\u8B8F\u8B94\u8B95\u8B9C\u8B9E\u8B9F\u8C38"+
"\u8C39\u8C3D\u8C3E\u8C45\u8C47\u8C49\u8C4B\u8C4F"+
"\u8C51\u8C53\u8C54\u8C57\u8C58\u8C5B\u8C5D\u8C59"+
"\u8C63\u8C64\u8C66\u8C68\u8C69\u8C6D\u8C73\u8C75"+
"\u8C76\u8C7B\u8C7E\u8C86\u8C87\u8C8B\u8C90\u8C92"+
"\u8C93\u8C99\u8C9B\u8C9C\u8CA4\u8CB9\u8CBA\u8CC5"+
"\u8CC6\u8CC9\u8CCB\u8CCF\u8CD6\u8CD5\u8CD9\u8CDD"+
"\u8CE1\u8CE8\u8CEC\u8CEF\u8CF0\u8CF2\u8CF5\u8CF7"+
"\u8CF8\u8CFE\u8CFF\u8D01\u8D03\u8D09\u8D12\u8D17"+
"\u8D1B\u8D65\u8D69\u8D6C\u8D6E\u8D7F\u8D82\u8D84"+
"\u8D88\u8D8D\u8D90\u8D91\u8D95\u8D9E\u8D9F\u8DA0"+
"\u8DA6\u8DAB\u8DAC\u8DAF\u8DB2\u8DB5\u8DB7\u8DB9"+
"\u8DBB\u8DC0\u8DC5\u8DC6\u8DC7\u8DC8\u8DCA\u8DCE"+
"\u8DD1\u8DD4\u8DD5\u8DD7\u8DD9\u8DE4\u8DE5\u8DE7"+
"\u8DEC\u8DF0\u8DBC\u8DF1\u8DF2\u8DF4\u8DFD\u8E01"+
"\u8E04\u8E05\u8E06\u8E0B\u8E11\u8E14\u8E16\u8E20"+
"\u8E21\u8E22\u8E23\u8E26\u8E27\u8E31\u8E33\u8E36"+
"\u8E37\u8E38\u8E39\u8E3D\u8E40\u8E41\u8E4B\u8E4D"+
"\u8E4E\u8E4F\u8E54\u8E5B\u8E5C\u8E5D\u8E5E\u8E61"+
"\u8E62\u8E69\u8E6C\u8E6D\u8E6F\u8E70\u8E71\u8E79"+
"\u8E7A\u8E7B\u8E82\u8E83\u8E89\u8E90\u8E92\u8E95"+
"\u8E9A\u8E9B\u8E9D\u8E9E\u8EA2\u8EA7\u8EA9\u8EAD"+
"\u8EAE\u8EB3\u8EB5\u8EBA\u8EBB\u8EC0\u8EC1\u8EC3"+
"\u8EC4\u8EC7\u8ECF\u8ED1\u8ED4\u8EDC\u8EE8\u8EEE"+
"\u8EF0\u8EF1\u8EF7\u8EF9\u8EFA\u8EED\u8F00\u8F02"+
"\u8F07\u8F08\u8F0F\u8F10\u8F16\u8F17\u8F18\u8F1E"+
"\u8F20\u8F21\u8F23\u8F25\u8F27\u8F28\u8F2C\u8F2D"+
"\u8F2E\u8F34\u8F35\u8F36\u8F37\u8F3A\u8F40\u8F41"+
"\u8F43\u8F47\u8F4F\u8F51\u8F52\u8F53\u8F54\u8F55"+
"\u8F58\u8F5D\u8F5E\u8F65\u8F9D\u8FA0\u8FA1\u8FA4"+
"\u8FA5\u8FA6\u8FB5\u8FB6\u8FB8\u8FBE\u8FC0\u8FC1"+
"\u8FC6\u8FCA\u8FCB\u8FCD\u8FD0\u8FD2\u8FD3\u8FD5"+
"\u8FE0\u8FE3\u8FE4\u8FE8\u8FEE\u8FF1\u8FF5\u8FF6"+
"\u8FFB\u8FFE\u9002\u9004\u9008\u900C\u9018\u901B"+
"\u9028\u9029\u902F\u902A\u902C\u902D\u9033\u9034"+
"\u9037\u903F\u9043\u9044\u904C\u905B\u905D\u9062"+
"\u9066\u9067\u906C\u9070\u9074\u9079\u9085\u9088"+
"\u908B\u908C\u908E\u9090\u9095\u9097\u9098\u9099"+
"\u909B\u90A0\u90A1\u90A2\u90A5\u90B0\u90B2\u90B3"+
"\u90B4\u90B6\u90BD\u90CC\u90BE\u90C3\u90C4\u90C5"+
"\u90C7\u90C8\u90D5\u90D7\u90D8\u90D9\u90DC\u90DD"+
"\u90DF\u90E5\u90D2\u90F6\u90EB\u90EF\u90F0\u90F4"+
"\u90FE\u90FF\u9100\u9104\u9105\u9106\u9108\u910D"+
"\u9110\u9114\u9116\u9117\u9118\u911A\u911C\u911E"+
"\u9120\u9125\u9122\u9123\u9127\u9129\u912E\u912F"+
"\u9131\u9134\u9136\u9137\u9139\u913A\u913C\u913D"+
"\u9143\u9147\u9148\u914F\u9153\u9157\u9159\u915A"+
"\u915B\u9161\u9164\u9167\u916D\u9174\u9179\u917A"+
"\u917B\u9181\u9183\u9185\u9186\u918A\u918E\u9191"+
"\u9193\u9194\u9195\u9198\u919E\u91A1\u91A6\u91A8"+
"\u91AC\u91AD\u91AE\u91B0\u91B1\u91B2\u91B3\u91B6"+
"\u91BB\u91BC\u91BD\u91BF\u91C2\u91C3\u91C5\u91D3"+
"\u91D4\u91D7\u91D9\u91DA\u91DE\u91E4\u91E5\u91E9"+
"\u91EA\u91EC\u91ED\u91EE\u91EF\u91F0\u91F1\u91F7"+
"\u91F9\u91FB\u91FD\u9200\u9201\u9204\u9205\u9206"+
"\u9207\u9209\u920A\u920C\u9210\u9212\u9213\u9216"+
"\u9218\u921C\u921D\u9223\u9224\u9225\u9226\u9228"+
"\u922E\u922F\u9230\u9233\u9235\u9236\u9238\u9239"+
"\u923A\u923C\u923E\u9240\u9242\u9243\u9246\u9247"+
"\u924A\u924D\u924E\u924F\u9251\u9258\u9259\u925C"+
"\u925D\u9260\u9261\u9265\u9267\u9268\u9269\u926E"+
"\u926F\u9270\u9275\u9276\u9277\u9278\u9279\u927B"+
"\u927C\u927D\u927F\u9288\u9289\u928A\u928D\u928E"+
"\u9292\u9297\u9299\u929F\u92A0\u92A4\u92A5\u92A7"+
"\u92A8\u92AB\u92AF\u92B2\u92B6\u92B8\u92BA\u92BB"+
"\u92BC\u92BD\u92BF\u92C0\u92C1\u92C2\u92C3\u92C5"+
"\u92C6\u92C7\u92C8\u92CB\u92CC\u92CD\u92CE\u92D0"+
"\u92D3\u92D5\u92D7\u92D8\u92D9\u92DC\u92DD\u92DF"+
"\u92E0\u92E1\u92E3\u92E5\u92E7\u92E8\u92EC\u92EE"+
"\u92F0\u92F9\u92FB\u92FF\u9300\u9302\u9308\u930D"+
"\u9311\u9314\u9315\u931C\u931D\u931E\u931F\u9321"+
"\u9324\u9325\u9327\u9329\u932A\u9333\u9334\u9336"+
"\u9337\u9347\u9348\u9349\u9350\u9351\u9352\u9355"+
"\u9357\u9358\u935A\u935E\u9364\u9365\u9367\u9369"+
"\u936A\u936D\u936F\u9370\u9371\u9373\u9374\u9376"+
"\u937A\u937D\u937F\u9380\u9381\u9382\u9388\u938A"+
"\u938B\u938D\u938F\u9392\u9395\u9398\u939B\u939E"+
"\u93A1\u93A3\u93A4\u93A6\u93A8\u93AB\u93B4\u93B5"+
"\u93B6\u93BA\u93A9\u93C1\u93C4\u93C5\u93C6\u93C7"+
"\u93C9\u93CA\u93CB\u93CC\u93CD\u93D3\u93D9\u93DC"+
"\u93DE\u93DF\u93E2\u93E6\u93E7\u93F9\u93F7\u93F8"+
"\u93FA\u93FB\u93FD\u9401\u9402\u9404\u9408\u9409"+
"\u940D\u940E\u940F\u9415\u9416\u9417\u941F\u942E"+
"\u942F\u9431\u9432\u9433\u9434\u943B\u943F\u943D"+
"\u9443\u9445\u9448\u944A\u944C\u9455\u9459\u945C"+
"\u945F\u9461\u9463\u9468\u946B\u946D\u946E\u946F"+
"\u9471\u9472\u9484\u9483\u9578\u9579\u957E\u9584"+
"\u9588\u958C\u958D\u958E\u959D\u959E\u959F\u95A1"+
"\u95A6\u95A9\u95AB\u95AC\u95B4\u95B6\u95BA\u95BD"+
"\u95BF\u95C6\u95C8\u95C9\u95CB\u95D0\u95D1\u95D2"+
"\u95D3\u95D9\u95DA\u95DD\u95DE\u95DF\u95E0\u95E4"+
"\u95E6\u961D\u961E\u9622\u9624\u9625\u9626\u962C"+
"\u9631\u9633\u9637\u9638\u9639\u963A\u963C\u963D"+
"\u9641\u9652\u9654\u9656\u9657\u9658\u9661\u966E"+
"\u9674\u967B\u967C\u967E\u967F\u9681\u9682\u9683"+
"\u9684\u9689\u9691\u9696\u969A\u969D\u969F\u96A4"+
"\u96A5\u96A6\u96A9\u96AE\u96AF\u96B3\u96BA\u96CA"+
"\u96D2\u5DB2\u96D8\u96DA\u96DD\u96DE\u96DF\u96E9"+
"\u96EF\u96F1\u96FA\u9702\u9703\u9705\u9709\u971A"+
"\u971B\u971D\u9721\u9722\u9723\u9728\u9731\u9733"+
"\u9741\u9743\u974A\u974E\u974F\u9755\u9757\u9758"+
"\u975A\u975B\u9763\u9767\u976A\u976E\u9773\u9776"+
"\u9777\u9778\u977B\u977D\u977F\u9780\u9789\u9795"+
"\u9796\u9797\u9799\u979A\u979E\u979F\u97A2\u97AC"+
"\u97AE\u97B1\u97B2\u97B5\u97B6\u97B8\u97B9\u97BA"+
"\u97BC\u97BE\u97BF\u97C1\u97C4\u97C5\u97C7\u97C9"+
"\u97CA\u97CC\u97CD\u97CE\u97D0\u97D1\u97D4\u97D7"+
"\u97D8\u97D9\u97DD\u97DE\u97E0\u97DB\u97E1\u97E4"+
"\u97EF\u97F1\u97F4\u97F7\u97F8\u97FA\u9807\u980A"+
"\u9819\u980D\u980E\u9814\u9816\u981C\u981E\u9820"+
"\u9823\u9826\u982B\u982E\u982F\u9830\u9832\u9833"+
"\u9835\u9825\u983E\u9844\u9847\u984A\u9851\u9852"+
"\u9853\u9856\u9857\u9859\u985A\u9862\u9863\u9865"+
"\u9866\u986A\u986C\u98AB\u98AD\u98AE\u98B0\u98B4"+
"\u98B7\u98B8\u98BA\u98BB\u98BF\u98C2\u98C5\u98C8"+
"\u98CC\u98E1\u98E3\u98E5\u98E6\u98E7\u98EA\u98F3"+
"\u98F6\u9902\u9907\u9908\u9911\u9915\u9916\u9917"+
"\u991A\u991B\u991C\u991F\u9922\u9926\u9927\u992B"+
"\u9931\u9932\u9933\u9934\u9935\u9939\u993A\u993B"+
"\u993C\u9940\u9941\u9946\u9947\u9948\u994D\u994E"+
"\u9954\u9958\u9959\u995B\u995C\u995E\u995F\u9960"+
"\u999B\u999D\u999F\u99A6\u99B0\u99B1\u99B2\u99B5";
private final static String innerIndex4=
"\u99B9\u99BA\u99BD\u99BF\u99C3\u99C9\u99D3\u99D4"+
"\u99D9\u99DA\u99DC\u99DE\u99E7\u99EA\u99EB\u99EC"+
"\u99F0\u99F4\u99F5\u99F9\u99FD\u99FE\u9A02\u9A03"+
"\u9A04\u9A0B\u9A0C\u9A10\u9A11\u9A16\u9A1E\u9A20"+
"\u9A22\u9A23\u9A24\u9A27\u9A2D\u9A2E\u9A33\u9A35"+
"\u9A36\u9A38\u9A47\u9A41\u9A44\u9A4A\u9A4B\u9A4C"+
"\u9A4E\u9A51\u9A54\u9A56\u9A5D\u9AAA\u9AAC\u9AAE"+
"\u9AAF\u9AB2\u9AB4\u9AB5\u9AB6\u9AB9\u9ABB\u9ABE"+
"\u9ABF\u9AC1\u9AC3\u9AC6\u9AC8\u9ACE\u9AD0\u9AD2"+
"\u9AD5\u9AD6\u9AD7\u9ADB\u9ADC\u9AE0\u9AE4\u9AE5"+
"\u9AE7\u9AE9\u9AEC\u9AF2\u9AF3\u9AF5\u9AF9\u9AFA"+
"\u9AFD\u9AFF\u9B00\u9B01\u9B02\u9B03\u9B04\u9B05"+
"\u9B08\u9B09\u9B0B\u9B0C\u9B0D\u9B0E\u9B10\u9B12"+
"\u9B16\u9B19\u9B1B\u9B1C\u9B20\u9B26\u9B2B\u9B2D"+
"\u9B33\u9B34\u9B35\u9B37\u9B39\u9B3A\u9B3D\u9B48"+
"\u9B4B\u9B4C\u9B55\u9B56\u9B57\u9B5B\u9B5E\u9B61"+
"\u9B63\u9B65\u9B66\u9B68\u9B6A\u9B6B\u9B6C\u9B6D"+
"\u9B6E\u9B73\u9B75\u9B77\u9B78\u9B79\u9B7F\u9B80"+
"\u9B84\u9B85\u9B86\u9B87\u9B89\u9B8A\u9B8B\u9B8D"+
"\u9B8F\u9B90\u9B94\u9B9A\u9B9D\u9B9E\u9BA6\u9BA7"+
"\u9BA9\u9BAC\u9BB0\u9BB1\u9BB2\u9BB7\u9BB8\u9BBB"+
"\u9BBC\u9BBE\u9BBF\u9BC1\u9BC7\u9BC8\u9BCE\u9BD0"+
"\u9BD7\u9BD8\u9BDD\u9BDF\u9BE5\u9BE7\u9BEA\u9BEB"+
"\u9BEF\u9BF3\u9BF7\u9BF8\u9BF9\u9BFA\u9BFD\u9BFF"+
"\u9C00\u9C02\u9C0B\u9C0F\u9C11\u9C16\u9C18\u9C19"+
"\u9C1A\u9C1C\u9C1E\u9C22\u9C23\u9C26\u9C27\u9C28"+
"\u9C29\u9C2A\u9C31\u9C35\u9C36\u9C37\u9C3D\u9C41"+
"\u9C43\u9C44\u9C45\u9C49\u9C4A\u9C4E\u9C4F\u9C50"+
"\u9C53\u9C54\u9C56\u9C58\u9C5B\u9C5D\u9C5E\u9C5F"+
"\u9C63\u9C69\u9C6A\u9C5C\u9C6B\u9C68\u9C6E\u9C70"+
"\u9C72\u9C75\u9C77\u9C7B\u9CE6\u9CF2\u9CF7\u9CF9"+
"\u9D0B\u9D02\u9D11\u9D17\u9D18\u9D1C\u9D1D\u9D1E"+
"\u9D2F\u9D30\u9D32\u9D33\u9D34\u9D3A\u9D3C\u9D45"+
"\u9D3D\u9D42\u9D43\u9D47\u9D4A\u9D53\u9D54\u9D5F"+
"\u9D63\u9D62\u9D65\u9D69\u9D6A\u9D6B\u9D70\u9D76"+
"\u9D77\u9D7B\u9D7C\u9D7E\u9D83\u9D84\u9D86\u9D8A"+
"\u9D8D\u9D8E\u9D92\u9D93\u9D95\u9D96\u9D97\u9D98"+
"\u9DA1\u9DAA\u9DAC\u9DAE\u9DB1\u9DB5\u9DB9\u9DBC"+
"\u9DBF\u9DC3\u9DC7\u9DC9\u9DCA\u9DD4\u9DD5\u9DD6"+
"\u9DD7\u9DDA\u9DDE\u9DDF\u9DE0\u9DE5\u9DE7\u9DE9"+
"\u9DEB\u9DEE\u9DF0\u9DF3\u9DF4\u9DFE\u9E0A\u9E02"+
"\u9E07\u9E0E\u9E10\u9E11\u9E12\u9E15\u9E16\u9E19"+
"\u9E1C\u9E1D\u9E7A\u9E7B\u9E7C\u9E80\u9E82\u9E83"+
"\u9E84\u9E85\u9E87\u9E8E\u9E8F\u9E96\u9E98\u9E9B"+
"\u9E9E\u9EA4\u9EA8\u9EAC\u9EAE\u9EAF\u9EB0\u9EB3"+
"\u9EB4\u9EB5\u9EC6\u9EC8\u9ECB\u9ED5\u9EDF\u9EE4"+
"\u9EE7\u9EEC\u9EED\u9EEE\u9EF0\u9EF1\u9EF2\u9EF5"+
"\u9EF8\u9EFF\u9F02\u9F03\u9F09\u9F0F\u9F10\u9F11"+
"\u9F12\u9F14\u9F16\u9F17\u9F19\u9F1A\u9F1B\u9F1F"+
"\u9F22\u9F26\u9F2A\u9F2B\u9F2F\u9F31\u9F32\u9F34"+
"\u9F37\u9F39\u9F3A\u9F3C\u9F3D\u9F3F\u9F41\u9F43"+
"\u9F44\u9F45\u9F46\u9F47\u9F53\u9F55\u9F56\u9F57"+
"\u9F58\u9F5A\u9F5D\u9F5E\u9F68\u9F69\u9F6D\u9F6E"+
"\u9F6F\u9F70\u9F71\u9F73\u9F75\u9F7A\u9F7D\u9F8F"+
"\u9F90\u9F91\u9F92\u9F94\u9F96\u9F97\u9F9E\u9FA1"+
"\u9FA2\u9FA3\u9FA5\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD\uFFFD"+
"\u2170\u2171\u2172\u2173\u2174\u2175\u2176\u2177"+
"\u2178\u2179\u2160\u2161\u2162\u2163\u2164\u2165"+
"\u2166\u2167\u2168\u2169\uFF07\uFF02\u3231\u2116"+
"\u2121\u70BB\u4EFC\u50F4\u51EC\u5307\u5324\uFA0E"+
"\u548A\u5759\uFA0F\uFA10\u589E\u5BEC\u5CF5\u5D53"+
"\uFA11\u5FB7\u6085\u6120\u654E\u663B\u6665\uFA12"+
"\uF929\u6801\uFA13\uFA14\u6A6B\u6AE2\u6DF8\u6DF2"+
"\u7028\uFA15\uFA16\u7501\u7682\u769E\uFA17\u7930"+
"\uFA18\uFA19\uFA1A\uFA1B\u7AE7\uFA1C\uFA1D\u7DA0"+
"\u7DD6\uFA1E\u8362\uFA1F\u85B0\uFA20\uFA21\u8807"+
"\uFA22\u8B7F\u8CF4\u8D76\uFA23\uFA24\uFA25\u90DE"+
"\uFA26\u9115\uFA27\uFA28\u9592\uF9DC\uFA29\u973B"+
"\u974D\u9751\uFA2A\uFA2B\uFA2C\u999E\u9AD9\u9B72"+
"\uFA2D\u9ED1";
private final static short index1[] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 2, 3, 0, 4, 5, 6, 0, 0, 0, 0,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 0, 0,
0, 0, 0, 69, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
private final static String index2[] = {
innerIndex0,
innerIndex1,
innerIndex2,
innerIndex3,
innerIndex4
};
protected char convSingleByte(int b) {
return REPLACE_CHAR;
}
}
|
|
/**
* @author gurnoor
* The Class Table. Contain various information about table, including the Columns that further contains cells
*/
package nl.esciencecenter.qtm;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import nl.esciencecenter.qtm.solr.tagger.utils.TagResponse;
import nl.esciencecenter.qtm.utils.Configs;
import nl.esciencecenter.qtm.solr.tagger.recognize.Evaluate;
public class Table {
private int tab_num;
private String xml;
private String documentFileName;
public String pragmaticClass;
public boolean isEmptyOnlyHeaders = true;
private boolean isTraitTable = false;
private String[] tableHeadersColumns;
private Columns[] tableCol;
public enum StructureType {
LIST, MATRIX, SUBHEADER, MULTI, NULL
};
private String sectionOfTable;
/** The num_of_rows. */
private int num_of_rows;
public int tableInTable;
/** The num_of_columns. */
private int num_of_columns;
/** The table_title. */
private String table_label;
/** The table_caption. */
private String table_caption;
/** The table_footer. */
private String table_footer;
private boolean hasHeader = true;
private boolean hasBody = true;
private boolean isNoXMLTable = false;
private boolean isRowSpanning = false;
private boolean isColSpanning = false;
private int structureClass = 0; // 0 - no class,1- simplest, 2 - simple, 3 -
// medium, 4 - complex
private StructureType tableStructureType;
// Constructors
/**
* Instantiates a new table.
*
* @param label
* the title
*/
public Table(String label) {
table_label = label;
hasHeader = true;
hasBody = true;
isNoXMLTable = false;
isRowSpanning = false;
isColSpanning = false;
}
/**
* Instantiates a new table.
*
* @param label
* the title
* @param Caption
* the caption
* @param Footer
* the footer
*/
public Table(String label, String Caption, String Footer) {
table_label = label;
table_caption = Caption;
table_footer = Footer;
// stat = new TableStats();
}
/**
* Instantiates a new table.
*
* @param label
* the title
* @param Caption
* the caption
* @param Footer
* the footer
* @param Columns
* the columns
* @param Rows
* the rows
*/
public Table(String label, String Caption, String Footer, int Columns,
int Rows) {
table_label = label;
table_caption = Caption;
table_footer = Footer;
num_of_rows = Rows;
num_of_columns = Columns;
// stat = new TableStats();
}
public boolean isaTraitTable() {
String word1 = "QTL";
String word2 = "trait";
// String word3="Quantitavie Trait loci";
if (this.table_caption.toLowerCase().indexOf(word1.toLowerCase()) != -1
|| this.table_caption.toLowerCase()
.indexOf(word2.toLowerCase()) != -1)
return true;
if (this.table_footer.toLowerCase().indexOf(word1.toLowerCase()) != -1
|| this.table_footer.toLowerCase()
.indexOf(word2.toLowerCase()) != -1)
return true;
Columns tc[] = this.getTableCol();
String word3 = "phenotype";
for (Columns col : tc) {
if (col.getColumns_type().indexOf("QTL value") == -1) {
if (col.getHeader().toLowerCase()
.indexOf(word1.toLowerCase()) != -1
|| col.getHeader().toLowerCase()
.indexOf(word2.toLowerCase()) != -1
|| col.getHeader().toLowerCase()
.indexOf(word3.toLowerCase()) != -1) {
return true;
}
}
}
return false;
}
// Getters and setters
public int getTabnum() {
return tab_num;
}
public void setTableid(int tab_num) {
this.tab_num = tab_num;
}
public String[] getTableHeadersColumns() {
return tableHeadersColumns;
}
public void setTableHeadersColumns(String[] tableHeadersCols) {
tableHeadersColumns = tableHeadersCols;
}
public boolean getisTraitTable() {
return isTraitTable;
}
public void setisTraitTable(boolean isTraitTable) {
this.isTraitTable = isTraitTable;
}
public Columns[] getTableCol() {
return tableCol;
}
public void setTableCol(Columns[] tCol) {
tableCol = tCol;
}
/**
* Gets the num_of_rows.
*
* @return the num_of_rows
*/
public int getNum_of_rows() {
return num_of_rows;
}
/**
* Sets the num_of_rows.
*
* @param num_of_rows
* the new num_of_rows
*/
public void setNum_of_rows(int num_of_rows) {
this.num_of_rows = num_of_rows;
}
/**
* Gets the num_of_columns.
*
* @return the num_of_columns
*/
public int getNum_of_columns() {
return num_of_columns;
}
/**
* Sets the num_of_columns.
*
* @param num_of_columns
* the new num_of_columns
*/
public void setNum_of_columns(int num_of_columns) {
this.num_of_columns = num_of_columns;
}
/**
* Gets the table_title.
*
* @return the table_title
*/
public String getTable_label() {
return table_label;
}
/**
* Sets the table_title.
*
* @param table_title
* the new table_title
*/
public void setTable_label(String table_label) {
this.table_label = table_label;
}
/**
* Gets the table_caption.
*
* @return the table_caption
*/
public String getTable_caption() {
return table_caption;
}
/**
* Sets the table_caption.
*
* @param table_caption
* the new table_caption
*/
public void setTable_caption(String table_caption) {
this.table_caption = table_caption;
}
/**
* Gets the table_footer.
*
* @return the table_footer
*/
public String getTable_footer() {
return table_footer;
}
/**
* Sets the table_footer.
*
* @param table_footer
* the new table_footer
*/
public void setTable_footer(String table_footer) {
this.table_footer = table_footer;
}
/**
* Gets the table_cells.
*
* @return the table_cells
*/
// public C[][] getTable_cells() {
// return cells;
// }
// public Hc[][] getTable_Headercells() {
// return header_cells;
// }
// public List<C[]> getTable_cellList() {
// return LOC;
// }
/**
* Sets the table_cells.
*
* @param cells
* the new table_cells
*/
// public void setTableHeadercells(Hc[][] cells) {
// this.header_cells = cells;
// }
// public void setTable_cells(C[][] cells) {
// this.cells = cells;
// }
// public void setTable_cellList(List<C[]> L) {
// this.LOC=L;
// }
public boolean isHasHeader() {
return hasHeader;
}
public void setHasHeader(boolean hasHeader) {
this.hasHeader = hasHeader;
}
public boolean isHasBody() {
return hasBody;
}
public void setHasBody(boolean hasBody) {
this.hasBody = hasBody;
}
public boolean isNoXMLTable() {
return isNoXMLTable;
}
public void setNoXMLTable(boolean isNoXMLTable) {
this.isNoXMLTable = isNoXMLTable;
}
public boolean isRowSpanning() {
return isRowSpanning;
}
public void setRowSpanning(boolean isRowSpanning) {
this.isRowSpanning = isRowSpanning;
}
public boolean isColSpanning() {
return isColSpanning;
}
public void setColSpanning(boolean isColSpanning) {
this.isColSpanning = isColSpanning;
}
public String getXml() {
return xml;
}
public void setXml(String xml) {
this.xml = xml;
}
public String getDocumentFileName() {
return documentFileName;
}
public void setDocumentFileName(String documentFileName) {
this.documentFileName = documentFileName;
}
public int getStructureClass() {
return structureClass;
}
public void setStructureClass(int sClass) {
structureClass = sClass;
}
/**
* @return the tableStructureType
*/
public StructureType getTableStructureType() {
return tableStructureType;
}
/**
* @param tableStructureType
* the tableStructureType to set
*/
public void setTableStructureType(StructureType tStructureType) {
tableStructureType = tStructureType;
}
/**
* @return the sectionOfTable
*/
public String getSectionOfTable() {
return sectionOfTable;
}
/**
* @param sectionOfTable
* the sectionOfTable to set
*/
public void setSectionOfTable(String secOfTable) {
sectionOfTable = secOfTable;
}
public void printTable2() throws Exception {
StringBuffer sb = new StringBuffer();
for (Columns c : this.getTableCol()) {
sb.append(
c.getHeader() + "(" + c.getColumns_type() + ")" + "\t\t");
}
Main.logger.debug(sb.toString());
int i = 0;
while (i < this.num_of_rows) {
sb = new StringBuffer();
for (Columns c : this.getTableCol()) {
try {
sb.append(c.getcelz()[i].getcell_value() + "("
+ c.getcelz()[i].getCell_type() + ")" + "\t\t");
} catch (Exception e) {
sb.append("Null(Null" + "\t\t");
}
}
Main.logger.debug(sb.toString());
i++;
}
}
public Table tableClassification() {
// C[][] cells=this.getTable_cells();
Columns[] tc = this.getTableCol();
// int rows = cells.length;
int cols = this.num_of_columns;
HashMap<String, Integer> ColTypes = new HashMap<String, Integer>();
for (int l = 0; l < tc.length; l++) {
ColTypes.clear();
ColTypes.put("Partially Numeric", 0);
ColTypes.put("Numeric", 0);
ColTypes.put("Text", 0);
ColTypes.put("Empty", 0);
try {
for (int k = 0; k < tc[l].getcelz().length; k++) {
if (tc[l].getcelz()[k].getCell_type() == "Numeric") {
ColTypes.put("Numeric", ColTypes.get("Numeric") + 1);
} else if (tc[l].getcelz()[k]
.getCell_type() == "Partially Numeric") {
ColTypes.put("Partially Numeric",
ColTypes.get("Partially Numeric") + 1);
} else if (tc[l].getcelz()[k].getCell_type() == "Text") {
ColTypes.put("Text", ColTypes.get("Text") + 1);
} else if (tc[l].getcelz()[k].getCell_type() == "Empty") {
ColTypes.put("Empty", ColTypes.get("Empty") + 1);
}
}
} catch (Exception e) {
Main.logger.debug(e);
}
String word1 = "qtl";
String word2 = "trait";
String word3 = "phenotype";
float totalNumeric = (float) ColTypes.get("Numeric")
/ (float) (tc[l].getcelz().length - ColTypes.get("Empty"));
float totalPartiallyNumeric = (float) ColTypes
.get("Partially Numeric")
/ (float) (tc[l].getcelz().length - ColTypes.get("Empty"));
float totalText = (float) ColTypes.get("Text")
/ (float) (tc[l].getcelz().length - ColTypes.get("Empty"));
if (totalNumeric >= 0.60)
tc[l].setColumns_type("QTL value");
else
tc[l].setColumns_type("QTL property");
int countwords = 0;
try {
if (tc[l].getColumns_type().equals("QTL property")) {
for (int k = 0; k < tc[l].getcelz().length; k++) {
if (tc[l].getcelz()[k].getcell_value().toLowerCase()
.indexOf(word1) != -1
|| tc[l].getcelz()[k].getcell_value()
.toLowerCase().indexOf(word2) != -1
|| tc[l].getcelz()[k].getcell_value()
.toLowerCase().indexOf(word3) != -1) {
countwords++;
}
}
if (tc[l].getHeader().toLowerCase().indexOf(word1) != -1
|| tc[l].getHeader().toLowerCase().toLowerCase()
.indexOf(word2) != -1
|| tc[l].getHeader().toLowerCase().toLowerCase()
.indexOf(word3) != -1)
countwords++;
}
} catch (Exception e) {
Main.logger.warn("Failed to classify column #" + l + " header.");
Main.logger.debug(e);
}
if (countwords > 0)
tc[l].setColumns_type("QTL descriptor");
if (tc[l].getColumns_type() == null) {
tc[l].setColumns_type("NotIdentified");
}
}
// filter out 1 QTL descriptor based on annotations
int num_QTLdescriptors = 0;
List<Integer> QTLdescriptorPosition = new ArrayList<Integer>();
for (int l = 0; l < tc.length; l++) {
if (tc[l].getColumns_type().equals("QTL descriptor")) {
num_QTLdescriptors++;
QTLdescriptorPosition.add(l);
}
}
if (num_QTLdescriptors > 1) {
Iterator<Integer> myListIterator = QTLdescriptorPosition.iterator();
int bestmatch = QTLdescriptorPosition.get(0);
int numofbestmatchAnnotations = 0;
try {
while (myListIterator.hasNext()) {
int numofannotatedTerms = 0;
Integer j = myListIterator.next();
tc[j].setColumns_type("QTL property");
for (int k = 0; k < tc[j].getcelz().length; k++) {
TagResponse QTLannotation = Evaluate.processString(tc[j]
.getcelz()[k]
.getcell_value()
.toLowerCase(),
Configs.getPropertyQTM("coreTraitDescriptors"),
Configs.getPropertyQTM("match"),
Configs.getPropertyQTM("type"));
if (QTLannotation.getItems().size() != 0) {
numofannotatedTerms++;
}
}
if (numofannotatedTerms > numofbestmatchAnnotations) {
numofbestmatchAnnotations = numofannotatedTerms;
bestmatch = j;
}
}
tc[bestmatch].setColumns_type("QTL descriptor");
} catch (Exception e) {
Main.logger.debug(e);
}
}
return this;
}
}
|
|
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.gateway.handlers.detecting.protocol.stomp;
import io.fabric8.gateway.handlers.detecting.protocol.Ascii;
import org.vertx.java.core.buffer.Buffer;
import java.io.UnsupportedEncodingException;
import java.util.*;
import static io.fabric8.gateway.handlers.detecting.protocol.Ascii.ascii;
import static io.fabric8.gateway.handlers.detecting.protocol.BufferSupport.startsWith;
import static io.fabric8.gateway.handlers.detecting.protocol.stomp.Constants.*;
/**
* A STOMP protocol frame.
*
*/
public class StompFrame {
public static final Buffer NO_DATA = new Buffer(new byte[]{});
static public class HeaderEntry {
public final Ascii key;
public final Ascii value;
public HeaderEntry(Ascii key, Ascii value) {
this.key = key;
this.value = value;
}
public Ascii getKey() {
return key;
}
public Ascii getValue() {
return value;
}
@Override
public String toString() {
return "" + key +
"=" + value;
}
}
private Ascii action;
private ArrayList<HeaderEntry> headerList;
private HashMap<Ascii, Ascii> headerMap = new HashMap<Ascii, Ascii>(16);
private Buffer content = NO_DATA;
public StompFrame() {
}
public StompFrame(Ascii action) {
this.action = action;
}
public StompFrame clone() {
StompFrame rc = new StompFrame(action);
if( headerList!=null ) {
rc.headerList = new ArrayList<HeaderEntry>(headerList);
rc.headerMap = null;
} else {
rc.headerMap = new HashMap<Ascii,Ascii>(headerMap);
rc.headerList = null;
}
rc.content = content;
return rc;
}
public Ascii action() {
return action;
}
public StompFrame action(Ascii action) {
assert action != null;
this.action = action;
return this;
}
public Buffer content() {
return this.content;
}
public StompFrame content(Buffer content) {
assert content != null;
this.content = content;
return this;
}
public String contentAsString() {
return content.getString(0, content.length(), "UTF-8");
}
public Map<Ascii, Ascii> headerMap() {
return headerMap(Collections.EMPTY_SET);
}
public Map<Ascii, Ascii> headerMap(Set<Ascii> reversedHeaderHandling) {
if( headerMap==null ) {
headerMap = new HashMap<Ascii, Ascii>();
for (HeaderEntry HeaderEntry : headerList) {
final Ascii key = HeaderEntry.getKey();
Ascii old = headerMap.put(key, HeaderEntry.getValue());
if( old !=null && !reversedHeaderHandling.contains(key) ) {
headerMap.put(key, old);
}
}
headerList = null;
}
return headerMap;
}
public List<HeaderEntry> headerList() {
if( headerList==null ) {
for (Map.Entry<Ascii,Ascii> entry : headerMap.entrySet()) {
headerList.add(new HeaderEntry(entry.getKey(), entry.getValue()));
}
headerMap = null;
}
return headerList;
}
public void addHeader(Ascii key, Ascii value) {
if( headerList!=null ) {
headerList.add(0, new HeaderEntry(key, value));
} else {
headerMap.put(key, value);
}
}
String getHeaderAsString(Ascii key) {
Ascii header = getHeader(key);
if( header !=null ) {
return decodeHeader(header.toBuffer());
}
return null;
}
public Ascii getHeader(Ascii key) {
if( headerList!=null ) {
for (HeaderEntry HeaderEntry : headerList) {
if( HeaderEntry.getKey().equals(key) ) {
return HeaderEntry.getValue();
}
}
return null;
} else {
return headerMap.get(key);
}
}
public void clearHeaders() {
if( headerList!=null) {
headerList.clear();
} else {
headerMap.clear();
}
}
public void setHeaders(ArrayList<HeaderEntry> values) {
headerList = values;
headerMap = null;
}
/*
public Buffer toBuffer() {
return toBuffer(true);
}
*/
/*
public Buffer toBuffer(boolean includeBody) {
try {
DataByteArrayOutputStream out = new DataByteArrayOutputStream();
write(out, includeBody);
return out.toBuffer();
} catch (IOException e) {
throw new RuntimeException(e); // not expected to occur.
}
}
*/
/*
private void write(DataOutput out, Buffer buffer) throws IOException {
out.write(buffer.data, buffer.offset, buffer.length);
}
public void write(DataOutput out) throws IOException {
write(out, true);
}
*/
public void addContentLengthHeader() {
addHeader(CONTENT_LENGTH, new Ascii(Integer.toString(content.length())));
}
/*
public int size() {
int rc = action.length() + 1;
if( headerList!=null ) {
for (HeaderEntry entry : headerList) {
rc += entry.getKey().length() + entry.getValue().length() + 2;
}
} else {
for (Map.Entry<Ascii,Ascii> entry : headerMap.entrySet()) {
rc += entry.getKey().length() + entry.getValue().length() + 2;
}
}
rc += content.length() + 3;
return rc;
}
*/
/*
public void write(DataOutput out, boolean includeBody) throws IOException {
write(out, action);
out.writeByte(NEWLINE_BYTE);
if( headerList!=null ) {
for (HeaderEntry entry : headerList) {
write(out, entry.getKey());
out.writeByte(COLON_BYTE);
write(out, entry.getValue());
out.writeByte(NEWLINE_BYTE);
}
} else {
for (Map.Entry<Ascii,Ascii> entry : headerMap.entrySet()) {
write(out, entry.getKey());
out.writeByte(COLON_BYTE);
write(out, entry.getValue());
out.writeByte(NEWLINE_BYTE);
}
}
//denotes end of headers with a new line
out.writeByte(NEWLINE_BYTE);
if (includeBody) {
write(out, content);
out.writeByte(NULL_BYTE);
out.writeByte(NEWLINE_BYTE);
}
}
*/
/*
public String toString() {
return toBuffer(false).ascii().toString();
}
public String errorMessage() {
Ascii value = getHeader(MESSAGE_HEADER);
if (value != null) {
return decodeHeader(value);
} else {
return contentAsString();
}
}
*/
public static String decodeHeader(Buffer value) {
if (value == null)
return null;
Buffer rc = new Buffer(value.length());
int pos = 0;
int max = value.length();
while (pos < max) {
if (startsWith(value, pos, ESCAPE_ESCAPE_SEQ.toBuffer())) {
rc.appendByte(ESCAPE_BYTE);
pos += 2;
} else if (startsWith(value, pos, COLON_ESCAPE_SEQ.toBuffer())) {
rc.appendByte(COLON_BYTE);
pos += 2;
} else if (startsWith(value, pos, NEWLINE_ESCAPE_SEQ.toBuffer())) {
rc.appendByte(NEWLINE_BYTE);
pos += 2;
} else {
rc.appendByte(value.getByte(pos));
pos += 1;
}
}
return rc.toString();
}
public static Ascii encodeHeader(String value) {
if (value == null)
return null;
try {
byte[] data = value.getBytes("UTF-8");
Buffer rc = new Buffer(data.length);
for (byte d : data) {
switch (d) {
case ESCAPE_BYTE:
rc.appendBuffer(ESCAPE_ESCAPE_SEQ.toBuffer());
break;
case COLON_BYTE:
rc.appendBuffer(COLON_ESCAPE_SEQ.toBuffer());
break;
case NEWLINE_BYTE:
rc.appendBuffer(COLON_ESCAPE_SEQ.toBuffer());
break;
default:
rc.appendByte(d);
}
}
return ascii(rc);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e); // not expected.
}
}
public static Map<Ascii, Ascii> encodeHeaders(Map<String, String> headers) {
if(headers==null)
return null;
HashMap<Ascii, Ascii> rc = new HashMap<Ascii, Ascii>(headers.size());
for (Map.Entry<String, String> entry : headers.entrySet()) {
rc.put(StompFrame.encodeHeader(entry.getKey()), StompFrame.encodeHeader(entry.getValue()));
}
return rc;
}
}
|
|
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.tools.build.bundletool.device;
import static com.android.bundle.Targeting.TextureCompressionFormat.TextureCompressionFormatAlias.ASTC;
import static com.android.bundle.Targeting.TextureCompressionFormat.TextureCompressionFormatAlias.ETC2;
import static com.android.tools.build.bundletool.model.GetSizeRequest.Dimension.SDK;
import static com.android.tools.build.bundletool.model.GetSizeRequest.Dimension.TEXTURE_COMPRESSION_FORMAT;
import static com.android.tools.build.bundletool.testing.ApksArchiveHelpers.createApkDescription;
import static com.android.tools.build.bundletool.testing.ApksArchiveHelpers.createAssetSliceSet;
import static com.android.tools.build.bundletool.testing.ApksArchiveHelpers.createMasterApkDescription;
import static com.android.tools.build.bundletool.testing.DeviceFactory.sdkVersion;
import static com.android.tools.build.bundletool.testing.TargetingUtils.apkSdkTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.apkTextureTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.mergeApkTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.sdkVersionFrom;
import static com.android.tools.build.bundletool.testing.TargetingUtils.variantSdkTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.variantTextureTargeting;
import static com.google.common.truth.Truth.assertThat;
import com.android.bundle.Commands.AssetSliceSet;
import com.android.bundle.Commands.DeliveryType;
import com.android.bundle.Targeting.ApkTargeting;
import com.android.bundle.Targeting.VariantTargeting;
import com.android.tools.build.bundletool.commands.GetSizeCommand;
import com.android.tools.build.bundletool.commands.GetSizeCommand.GetSizeSubcommand;
import com.android.tools.build.bundletool.model.ConfigurationSizes;
import com.android.tools.build.bundletool.model.SizeConfiguration;
import com.android.tools.build.bundletool.model.ZipPath;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.nio.file.Paths;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class AssetModuleSizeAggregatorTest {
private static final long ASSET_1_MASTER_SIZE = 1 << 0;
private static final long ASSET_1_ETC2_SIZE = 1 << 1;
private static final long ASSET_1_ASTC_SIZE = 1 << 2;
private static final long ASSET_2_MASTER_SIZE = 1 << 3;
private static final long ASSET_2_ETC2_SIZE = 1 << 4;
private static final long ASSET_2_ASTC_SIZE = 1 << 5;
private static final AssetSliceSet ASSET_MODULE_1 =
createAssetSliceSet(
"asset1",
DeliveryType.INSTALL_TIME,
createMasterApkDescription(
apkSdkTargeting(sdkVersionFrom(21)), ZipPath.create("asset1-master.apk")),
createApkDescription(
mergeApkTargeting(
apkTextureTargeting(ETC2, ImmutableSet.of(ASTC)),
apkSdkTargeting(sdkVersionFrom(21))),
ZipPath.create("asset1-tcf_etc2.apk"),
/* isMasterSplit= */ false),
createApkDescription(
mergeApkTargeting(
apkTextureTargeting(ASTC, ImmutableSet.of(ETC2)),
apkSdkTargeting(sdkVersionFrom(21))),
ZipPath.create("asset1-tcf_astc.apk"),
/* isMasterSplit= */ false));
private static final AssetSliceSet ASSET_MODULE_2 =
createAssetSliceSet(
"asset2",
DeliveryType.INSTALL_TIME,
createMasterApkDescription(
apkSdkTargeting(sdkVersionFrom(21)), ZipPath.create("asset2-master.apk")),
createApkDescription(
mergeApkTargeting(
apkTextureTargeting(ETC2, ImmutableSet.of(ASTC)),
apkSdkTargeting(sdkVersionFrom(21))),
ZipPath.create("asset2-tcf_etc2.apk"),
/* isMasterSplit= */ false),
createApkDescription(
mergeApkTargeting(
apkTextureTargeting(ASTC, ImmutableSet.of(ETC2)),
apkSdkTargeting(sdkVersionFrom(21))),
ZipPath.create("asset2-tcf_astc.apk"),
/* isMasterSplit= */ false));
private static final ImmutableMap<String, Long> SIZE_BY_APK_PATHS =
ImmutableMap.<String, Long>builder()
.put("asset1-master.apk", ASSET_1_MASTER_SIZE)
.put("asset1-tcf_etc2.apk", ASSET_1_ETC2_SIZE)
.put("asset1-tcf_astc.apk", ASSET_1_ASTC_SIZE)
.put("asset2-master.apk", ASSET_2_MASTER_SIZE)
.put("asset2-tcf_etc2.apk", ASSET_2_ETC2_SIZE)
.put("asset2-tcf_astc.apk", ASSET_2_ASTC_SIZE)
.build();
private final GetSizeCommand.Builder getSizeCommand =
GetSizeCommand.builder()
.setApksArchivePath(Paths.get("dummy.apks"))
.setGetSizeSubCommand(GetSizeSubcommand.TOTAL);
@Test
public void getSize_noAssetModules() throws Exception {
ConfigurationSizes configurationSizes =
new AssetModuleSizeAggregator(
ImmutableList.of(),
VariantTargeting.getDefaultInstance(),
ImmutableMap.of(),
getSizeCommand.build())
.getSize();
assertThat(configurationSizes.getMinSizeConfigurationMap())
.containsExactly(SizeConfiguration.getDefaultInstance(), 0L);
assertThat(configurationSizes.getMaxSizeConfigurationMap())
.containsExactly(SizeConfiguration.getDefaultInstance(), 0L);
}
@Test
public void getSize_singleAssetModule_noTargeting() throws Exception {
ImmutableList<AssetSliceSet> assetModules =
ImmutableList.of(
createAssetSliceSet(
"asset1",
DeliveryType.INSTALL_TIME,
createMasterApkDescription(
ApkTargeting.getDefaultInstance(), ZipPath.create("asset1-master.apk"))));
VariantTargeting variantTargeting = VariantTargeting.getDefaultInstance();
ImmutableMap<String, Long> sizeByApkPaths = ImmutableMap.of("asset1-master.apk", 10L);
ConfigurationSizes configurationSizes =
new AssetModuleSizeAggregator(
assetModules, variantTargeting, sizeByApkPaths, getSizeCommand.build())
.getSize();
assertThat(configurationSizes.getMinSizeConfigurationMap())
.containsExactly(SizeConfiguration.getDefaultInstance(), 10L);
assertThat(configurationSizes.getMaxSizeConfigurationMap())
.containsExactly(SizeConfiguration.getDefaultInstance(), 10L);
}
@Test
public void getSize_multipleAssetModules_withTargeting() throws Exception {
ImmutableList<AssetSliceSet> assetModules = ImmutableList.of(ASSET_MODULE_1, ASSET_MODULE_2);
VariantTargeting variantTargeting = variantSdkTargeting(21);
ConfigurationSizes configurationSizes =
new AssetModuleSizeAggregator(
assetModules,
variantTargeting,
SIZE_BY_APK_PATHS,
getSizeCommand.setDimensions(ImmutableSet.of(TEXTURE_COMPRESSION_FORMAT)).build())
.getSize();
assertThat(configurationSizes.getMinSizeConfigurationMap())
.containsExactly(
SizeConfiguration.builder().setTextureCompressionFormat("etc2").build(),
ASSET_1_MASTER_SIZE + ASSET_1_ETC2_SIZE + ASSET_2_MASTER_SIZE + ASSET_2_ETC2_SIZE,
SizeConfiguration.builder().setTextureCompressionFormat("astc").build(),
ASSET_1_MASTER_SIZE + ASSET_1_ASTC_SIZE + ASSET_2_MASTER_SIZE + ASSET_2_ASTC_SIZE);
assertThat(configurationSizes.getMaxSizeConfigurationMap())
.containsExactly(
SizeConfiguration.builder().setTextureCompressionFormat("etc2").build(),
ASSET_1_MASTER_SIZE + ASSET_1_ETC2_SIZE + ASSET_2_MASTER_SIZE + ASSET_2_ETC2_SIZE,
SizeConfiguration.builder().setTextureCompressionFormat("astc").build(),
ASSET_1_MASTER_SIZE + ASSET_1_ASTC_SIZE + ASSET_2_MASTER_SIZE + ASSET_2_ASTC_SIZE);
}
@Test
public void getSize_multipleAssetModules_withDeviceSpecAndVariantTargeting() throws Exception {
ImmutableList<AssetSliceSet> assetModules = ImmutableList.of(ASSET_MODULE_1, ASSET_MODULE_2);
VariantTargeting variantTargeting = variantTextureTargeting(ETC2);
ConfigurationSizes configurationSizes =
new AssetModuleSizeAggregator(
assetModules,
variantTargeting,
SIZE_BY_APK_PATHS,
getSizeCommand
.setDimensions(ImmutableSet.of(TEXTURE_COMPRESSION_FORMAT, SDK))
.setDeviceSpec(sdkVersion(21))
.build())
.getSize();
assertThat(configurationSizes.getMinSizeConfigurationMap())
.containsExactly(
SizeConfiguration.builder()
.setTextureCompressionFormat("etc2")
.setSdkVersion("21")
.build(),
ASSET_1_MASTER_SIZE + ASSET_1_ETC2_SIZE + ASSET_2_MASTER_SIZE + ASSET_2_ETC2_SIZE);
assertThat(configurationSizes.getMaxSizeConfigurationMap())
.containsExactly(
SizeConfiguration.builder()
.setTextureCompressionFormat("etc2")
.setSdkVersion("21")
.build(),
ASSET_1_MASTER_SIZE + ASSET_1_ETC2_SIZE + ASSET_2_MASTER_SIZE + ASSET_2_ETC2_SIZE);
}
}
|
|
/*
* Copyright (c) 2015, Abhishek Dabholkar
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package xyz.abhid.newsletterh.util;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.pm.PackageManager;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Environment;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import android.view.View;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.channels.FileChannel;
import java.util.Locale;
@SuppressWarnings("unused")
public class AndroidUtils {
private static final int DEFAULT_BUFFER_SIZE = 8192;
public static boolean isMarshmallowOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M;
}
public static boolean isLollipopMR1OrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP_MR1;
}
public static boolean isLollipopOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
}
public static boolean isKitKatWatchOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH;
}
public static boolean isKitKatOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
}
public static boolean isJellyBeanMR2OrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
}
public static boolean isJellyBeanMR1OrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
}
public static boolean isJellyBeanOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN;
}
public static boolean isICSMR1OrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1;
}
public static boolean isICSOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH;
}
public static boolean isHoneycombOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
}
public static boolean isGingerbreadOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD;
}
public static boolean isGoogleTV(Context context) {
PackageManager packageManager = context.getPackageManager();
return packageManager != null && packageManager.hasSystemFeature("com.google.android.tv");
}
/**
* Checks if {@link Environment}.MEDIA_MOUNTED is returned by {@code getExternalStorageState()}
* and therefore external storage is read- and writeable.
*/
public static boolean isExtStorageAvailable() {
return Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState());
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static boolean isRtlLayout() {
if (AndroidUtils.isJellyBeanMR1OrHigher()) {
int direction = TextUtils.getLayoutDirectionFromLocale(Locale.getDefault());
return direction == View.LAYOUT_DIRECTION_RTL;
}
return false;
}
@Nullable
private static NetworkInfo getActiveNetworkInfo(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivityManager == null) {
return null;
}
return connectivityManager.getActiveNetworkInfo();
}
/**
* Whether there is an active network connection.
*/
public static boolean isNetworkConnected(Context context) {
NetworkInfo activeNetworkInfo = getActiveNetworkInfo(context);
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
/**
* Whether there is an active network connection and it is via WiFi.
* <p/>
* <p>If you want to check whether to transmit large amounts of data, you may want to use {@link
* #isUnmeteredNetworkConnected(Context)}.
*/
public static boolean isWifiConnected(Context context) {
NetworkInfo activeNetwork = getActiveNetworkInfo(context);
return activeNetwork != null && activeNetwork.isConnected()
&& activeNetwork.getType() == ConnectivityManager.TYPE_WIFI;
}
/**
* Whether there is an active network connection and it is not metered, e.g. so large amounts of
* data may be transmitted.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public static boolean isUnmeteredNetworkConnected(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivityManager == null) {
return false;
}
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected()
&& !connectivityManager.isActiveNetworkMetered();
}
/**
* Copies the contents of one file to the other using {@link FileChannel}s.
*
* @param src source {@link File}
* @param dst destination {@link File}
*/
public static void copyFile(File src, File dst) throws IOException {
FileInputStream in = new FileInputStream(src);
FileOutputStream out = new FileOutputStream(dst);
FileChannel inChannel = in.getChannel();
FileChannel outChannel = out.getChannel();
try {
inChannel.transferTo(0, inChannel.size(), outChannel);
} finally {
if (inChannel != null) {
inChannel.close();
}
if (outChannel != null) {
outChannel.close();
}
}
in.close();
out.close();
}
/**
* Copies data from one input stream to the other using a buffer of 8 kilobyte in size.
*
* @param input {@link InputStream}
* @param output {@link OutputStream}
*/
public static int copy(InputStream input, OutputStream output) throws IOException {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int count = 0;
int n;
while (-1 != (n = input.read(buffer))) {
output.write(buffer, 0, n);
count += n;
}
return count;
}
/**
* Execute an {@link AsyncTask} on a thread pool.
*
* @param task Task to execute.
* @param args Optional arguments to pass to {@link AsyncTask#execute(Object[])}.
* @param <T> Task argument type.
*/
@SafeVarargs
@TargetApi(11)
public static <T> void executeOnPool(AsyncTask<T, ?, ?> task, T... args) {
// TODO figure out how to subclass abstract and generalized AsyncTask,
// then put this there
if (AndroidUtils.isHoneycombOrHigher()) {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, args);
} else {
task.execute(args);
}
}
/**
* Executes the {@link android.os.AsyncTask} on the {@link android.os.AsyncTask#SERIAL_EXECUTOR},
* e.g. one after another.
* <p/>
* <p> This is useful for executing non-blocking operations (e.g. NO network activity, etc.).
*/
@SafeVarargs
public static <T> AsyncTask executeInOrder(AsyncTask<T, ?, ?> task, T... args) {
return task.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, args);
}
}
|
|
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.adwords.axis.utils.v201506.shopping;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.utils.AxisSerializer;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterion;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterionOperation;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterionPage;
import com.google.api.ads.adwords.axis.v201506.cm.BidSource;
import com.google.api.ads.adwords.axis.v201506.cm.BiddableAdGroupCriterion;
import com.google.api.ads.adwords.axis.v201506.cm.BiddingStrategyConfiguration;
import com.google.api.ads.adwords.axis.v201506.cm.Bids;
import com.google.api.ads.adwords.axis.v201506.cm.CpcBid;
import com.google.api.ads.adwords.axis.v201506.cm.Money;
import com.google.api.ads.adwords.axis.v201506.cm.NegativeAdGroupCriterion;
import com.google.api.ads.adwords.axis.v201506.cm.Operator;
import com.google.api.ads.adwords.axis.v201506.cm.ProductBrand;
import com.google.api.ads.adwords.axis.v201506.cm.ProductDimension;
import com.google.api.ads.adwords.axis.v201506.cm.ProductPartition;
import com.google.api.ads.adwords.axis.v201506.cm.ProductPartitionType;
import com.google.api.ads.adwords.axis.v201506.cm.UserStatus;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.testing.MockHttpIntegrationTest;
import com.google.api.client.auth.oauth2.BearerToken;
import com.google.api.client.auth.oauth2.Credential;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.axis.encoding.SerializationContext;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.xml.sax.Attributes;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import javax.xml.namespace.QName;
/**
* Tests for {@link ProductPartitionTree}.
*/
@RunWith(JUnit4.class)
public class ProductPartitionTreeTest extends MockHttpIntegrationTest {
private final BiddingStrategyConfiguration biddingStrategyConfig =
new BiddingStrategyConfiguration();
/**
* Tests creating an empty ad group tree. In this case, all operations generated should be ADD
* operations.
*/
@Test
public void testCreateEmptyTree() {
ProductPartitionTree tree = ProductPartitionTree.createAdGroupTree(-1L, biddingStrategyConfig,
Collections.<AdGroupCriterion>emptyList());
assertNotNull("Even an empty tree should automatically have a root node", tree.getRoot());
assertTrue("The root node for an empty tree should have a negative (temporary) ID",
tree.getRoot().getProductPartitionId().longValue() < 0L);
assertTrue("The root node for an empty tree should be a UNIT", tree.getRoot().isUnit());
List<AdGroupCriterionOperation> mutateOperations = tree.getMutateOperations();
assertEquals("Number of operations is incorrect", 1, mutateOperations.size());
AdGroupCriterionOperation operation = mutateOperations.iterator().next();
assertEquals("Should have a single operation to ADD the root node", Operator.ADD,
operation.getOperator());
BiddableAdGroupCriterion adGroupCriterion = (BiddableAdGroupCriterion) operation.getOperand();
assertNull("Product dimension of operation's operand should be null",
((ProductPartition) adGroupCriterion.getCriterion()).getCaseValue());
assertTrue("Partition ID of the operand should be negative",
adGroupCriterion.getCriterion().getId().longValue() < 0L);
}
/**
* Tests creating a tree that in its <em>final</em> state is just an empty tree.
*/
@Test
public void testCreateUltimatelyEmptyTree() {
ProductPartitionTree tree = ProductPartitionTree.createAdGroupTree(-1L, biddingStrategyConfig,
Collections.<AdGroupCriterion>emptyList());
ProductPartitionNode rootNode = tree.getRoot().asSubdivision();
ProductPartitionNode brand1 =
rootNode.addChild(ProductDimensions.createBrand("google")).asSubdivision();
brand1.addChild(ProductDimensions.createOfferId("A")).asBiddableUnit().setBid(1000000L);
brand1.addChild(ProductDimensions.createOfferId(null)).asExcludedUnit();
ProductPartitionNode brand2 =
rootNode.addChild(ProductDimensions.createBrand(null)).asExcludedUnit();
// Now remove the two child nodes under the root and set the root back to a UNIT. This should
// result in operations that simply create the root node.
rootNode.removeChild(brand1.getDimension());
rootNode.removeChild(brand2.getDimension());
rootNode = rootNode.asBiddableUnit();
List<AdGroupCriterionOperation> mutateOperations = tree.getMutateOperations();
assertEquals("Number of operations is incorrect", 1, mutateOperations.size());
AdGroupCriterionOperation operation = mutateOperations.iterator().next();
assertEquals("Should have a single operation to ADD the root node", Operator.ADD,
operation.getOperator());
BiddableAdGroupCriterion adGroupCriterion = (BiddableAdGroupCriterion) operation.getOperand();
assertNull("Product dimension of operation's operand should be null",
((ProductPartition) adGroupCriterion.getCriterion()).getCaseValue());
assertTrue("Partition ID of the operand should be negative",
adGroupCriterion.getCriterion().getId().longValue() < 0L);
}
/**
* Tests mutating an existing tree with multiple nodes.
*/
@Test
public void testMutateMultiNodeTree() {
List<AdGroupCriterion> adGroupCriteria = Lists.newArrayList();
List<CriterionDescriptor> descriptors = Lists.newArrayList();
descriptors.add(new CriterionDescriptor(false, false, null, null, 1L, null));
ProductBrand brandGoogle = ProductDimensions.createBrand("google");
descriptors.add(new CriterionDescriptor(false, false, brandGoogle, null, 2L, 1L));
descriptors.add(new CriterionDescriptor(true,
false,
ProductDimensions.createOfferId("A"),
1000000L,
3L,
2L));
Long offerBOriginalPartitionId = 4L;
descriptors.add(new CriterionDescriptor(true,
true,
ProductDimensions.createOfferId("B"),
null,
offerBOriginalPartitionId,
2L));
Long brandOtherOriginalPartitionId = 5L;
descriptors.add(new CriterionDescriptor(true,
true,
ProductDimensions.createBrand(null),
null,
brandOtherOriginalPartitionId,
1L));
ProductBrand brandMotorola = ProductDimensions.createBrand("motorola");
Long brandMotorolaOriginalPartitionId = 6L;
descriptors.add(new CriterionDescriptor(true,
true,
brandMotorola,
null,
brandMotorolaOriginalPartitionId,
1L));
for (CriterionDescriptor descriptor : descriptors) {
adGroupCriteria.add(descriptor.createCriterion());
}
Map<Long, Map<Long, CriterionDescriptor>> descriptorMap = buildDescriptorMap(descriptors);
ProductPartitionTree tree =
ProductPartitionTree.createAdGroupTree(-1L, biddingStrategyConfig, adGroupCriteria);
assertEquals("ad group ID is incorrect", -1L, tree.getAdGroupId().longValue());
Queue<ProductPartitionNode> nodes = Lists.newLinkedList();
nodes.add(tree.getRoot());
int nodesFound = 0;
while (!nodes.isEmpty()) {
ProductPartitionNode node = nodes.remove();
Long parentId = node.getParent() == null ? null : node.getParent().getProductPartitionId();
CriterionDescriptor expectedDescriptor =
descriptorMap.get(parentId).get(node.getProductPartitionId());
CriterionDescriptor actualDescriptor = new CriterionDescriptor(node);
expectedDescriptor.assertDescriptorEquals(actualDescriptor);
// Add children to process.
Iterables.addAll(nodes, node.getChildren());
nodesFound++;
}
assertEquals("Tree does not contain the expected # of nodes", adGroupCriteria.size(),
nodesFound);
// Change the bids on leaf nodes.
ProductPartitionNode brandGoogleNode = tree.getRoot().getChild(brandGoogle);
ProductPartitionNode offerANode =
brandGoogleNode.getChild(ProductDimensions.createOfferId("A"));
// This should produce 1 SET operation.
offerANode.setBid(offerANode.getBid() * 10);
// This should produce 1 REMOVE operation + 1 ADD operation.
ProductPartitionNode offerBNode =
brandGoogleNode.getChild(ProductDimensions.createOfferId("B"));
offerBNode.asBiddableUnit().setBid(5000000L);
// This should produce 1 REMOVE operation + 1 ADD operation.
ProductPartitionNode brandOtherNode =
tree.getRoot().getChild(ProductDimensions.createBrand(null));
brandOtherNode = brandOtherNode.asBiddableUnit();
// Add an offer C node. This should produce 1 ADD operation.
ProductPartitionNode offerCNode = brandGoogleNode.addChild(ProductDimensions.createOfferId("C"))
.asBiddableUnit().setBid(1500000L);
// Remove the brand Motorola node. This should produce 1 REMOVE operation.
tree.getRoot().removeChild(brandMotorola);
// Get the mutate operations generated by the modifications made to the tree.
List<AdGroupCriterionOperation> mutateOperations = tree.getMutateOperations();
assertEquals(7, mutateOperations.size());
// Put the mutate operations in a map keyed by partition ID.
Map<Long, CriterionDescriptor> opsDescriptorMap = Maps.newHashMap();
int i = 0;
for (AdGroupCriterionOperation mutateOperation : mutateOperations) {
CriterionDescriptor descriptor = new CriterionDescriptor(mutateOperation.getOperand(), i++);
opsDescriptorMap.put(descriptor.partitionId, descriptor);
}
// Check the node that simply had a bid update.
int setOpNumber = opsDescriptorMap.get(offerANode.getProductPartitionId()).operationNumber;
assertEquals("Offer A node with a bid update should have a SET operation", Operator.SET,
mutateOperations.get(setOpNumber).getOperator());
// Check the offer B node that went from excluded to biddable.
int addOfferBOpNumber =
opsDescriptorMap.get(offerBNode.getProductPartitionId()).operationNumber;
assertEquals("Offer B node with a biddable change should have an add operation for the new ID",
Operator.ADD, mutateOperations.get(addOfferBOpNumber).getOperator());
int removeOfferBOpNumber = opsDescriptorMap.get(offerBOriginalPartitionId).operationNumber;
assertEquals(
"Offer B node with a biddable change should have a remove operation for the original ID",
Operator.REMOVE, mutateOperations.get(removeOfferBOpNumber).getOperator());
// Check the offer C node that was added.
int addOfferCOpNumber =
opsDescriptorMap.get(offerCNode.getProductPartitionId()).operationNumber;
assertEquals("New offer C node should have an add operation for the new ID", Operator.ADD,
mutateOperations.get(addOfferCOpNumber).getOperator());
// Check the brand null node that went from excluded to biddable.
int addBrandOtherOpNumber =
opsDescriptorMap.get(brandOtherNode.getProductPartitionId()).operationNumber;
assertEquals(
"Brand null node with a biddable change should have an add operation for the new ID",
Operator.ADD, mutateOperations.get(addBrandOtherOpNumber).getOperator());
int brandOtherOpNumber = opsDescriptorMap.get(offerBOriginalPartitionId).operationNumber;
assertEquals(
"Brand null node with a biddable change should have a remove operation for the original ID",
Operator.REMOVE, mutateOperations.get(brandOtherOpNumber).getOperator());
// Check the brand Motorola node that was removed.
int brandMotorolaOpNumber =
opsDescriptorMap.get(brandMotorolaOriginalPartitionId).operationNumber;
assertEquals("Removed node should have a remove operation", Operator.REMOVE,
mutateOperations.get(brandMotorolaOpNumber).getOperator());
}
/**
* Tests creating an empty tree and then adding several levels of nodes.
*/
@Test
public void testCreateMultiNodeTreeFromScratch() {
ProductPartitionTree tree = ProductPartitionTree.createAdGroupTree(-1L, biddingStrategyConfig,
Collections.<AdGroupCriterion>emptyList());
ProductPartitionNode rootNode = tree.getRoot().asSubdivision();
ProductPartitionNode brand1 =
rootNode.addChild(ProductDimensions.createBrand("google")).asSubdivision();
ProductPartitionNode brand1Offer1 =
brand1.addChild(ProductDimensions.createOfferId("A")).asBiddableUnit().setBid(1000000L);
ProductPartitionNode brand1Offer2 =
brand1.addChild(ProductDimensions.createOfferId(null)).asExcludedUnit();
ProductPartitionNode brand2 =
rootNode.addChild(ProductDimensions.createBrand(null)).asExcludedUnit();
int expectedOpCount = 5;
List<AdGroupCriterionOperation> mutateOperations = tree.getMutateOperations();
assertEquals("Number of operations is incorrect", expectedOpCount, mutateOperations.size());
List<CriterionDescriptor> nodeDescriptors = Lists.newArrayList();
for (ProductPartitionNode node :
Arrays.asList(rootNode, brand1, brand1Offer1, brand1Offer2, brand2)) {
nodeDescriptors.add(new CriterionDescriptor(node));
}
int opNum = 0;
List<CriterionDescriptor> opDescriptors = Lists.newArrayList();
Map<Long, CriterionDescriptor> opDescriptorsById = Maps.newHashMap();
for (AdGroupCriterionOperation op : mutateOperations) {
CriterionDescriptor opDescriptor = new CriterionDescriptor(op.getOperand(), opNum++);
opDescriptors.add(opDescriptor);
opDescriptorsById.put(opDescriptor.partitionId, opDescriptor);
}
Map<Long, Map<Long, CriterionDescriptor>> opDescriptorMap = buildDescriptorMap(opDescriptors);
for (CriterionDescriptor nodeDescriptor : nodeDescriptors) {
CriterionDescriptor opDescriptor =
opDescriptorMap.get(nodeDescriptor.parentPartitionId).get(nodeDescriptor.partitionId);
nodeDescriptor.assertDescriptorEquals(opDescriptor);
AdGroupCriterionOperation op = mutateOperations.get(opDescriptor.operationNumber);
assertEquals("operator is incorrect", Operator.ADD, op.getOperator());
if (nodeDescriptor.parentPartitionId != null) {
CriterionDescriptor parentOpDescriptor =
opDescriptorsById.get(nodeDescriptor.parentPartitionId);
assertNotNull("no operation found for parent", parentOpDescriptor);
assertThat("operation # for parent is > operation # for child",
opDescriptor.operationNumber, Matchers.greaterThan(parentOpDescriptor.operationNumber));
}
}
assertThat("Tree toString does not contain the root's detailed toString", tree.toString(),
Matchers.containsString(tree.getRoot().toDetailedString()));
assertThat("Tree toString does not contain the ad group ID", tree.toString(),
Matchers.containsString(tree.getAdGroupId().toString()));
}
/**
* Tests that the factory method ignores removed criteria.
*/
@Test
public void testRemovedCriteriaIgnored() {
CriterionDescriptor rootDescriptor =
new CriterionDescriptor(true, false, null, 1000000L, 1L, null);
List<AdGroupCriterion> criteria = Lists.newArrayList();
criteria.add(rootDescriptor.createCriterion());
// Create a criteria for a child node and set its UserStatus to REMOVED.
ProductBrand brandGoogle = ProductDimensions.createBrand("google");
CriterionDescriptor removedDescriptor =
new CriterionDescriptor(true, false, brandGoogle, null, 2L, 1L);
AdGroupCriterion removedCriterion = removedDescriptor.createCriterion();
((BiddableAdGroupCriterion) removedCriterion).setUserStatus(UserStatus.REMOVED);
criteria.add(removedCriterion);
ProductPartitionTree tree =
ProductPartitionTree.createAdGroupTree(-1L, biddingStrategyConfig, criteria);
assertFalse("Brand = google criteria had status removed, but it is in the tree",
tree.getRoot().hasChild(brandGoogle));
}
/**
* Tests that the factory method that retrieves the tree using API services builds
* the correct tree and passes the correct paging arguments.
*/
@Test
public void testCreateTreeUsingService() throws Exception {
AdWordsServices adWordsServices = new AdWordsServices();
AdWordsSession session =
new AdWordsSession.Builder()
.withClientCustomerId("123-456-7890")
.withOAuth2Credential(new Credential(BearerToken.authorizationHeaderAccessMethod()))
.withDeveloperToken("devtoken")
.withUserAgent("test")
// Use the test server's endpoint
.withEndpoint(testHttpServer.getServerUrl())
.build();
// Extract the API version from this test's package.
List<String> packageComponents =
Lists.newArrayList(Splitter.on('.').split(getClass().getPackage().getName()));
final String apiVersion = packageComponents.get(packageComponents.size() - 2);
final int pageSize = 100;
final int numberOfCriteria = (pageSize * 5) + 1;
// Construct a list of CriterionDescriptors that will build a tree of the form:
// root
// OfferId = null EXCLUDED
// OfferId = 1 BIDDABLE
// OfferId = 2 BIDDABLE
// ...
// OfferId = numberOfCriteria BIDDABLE
List<CriterionDescriptor> descriptors = Lists.newArrayList();
long partitionId = 1L;
final long rootPartitionId = partitionId;
descriptors.add(new CriterionDescriptor(false, false, null, null, partitionId++, null));
descriptors.add(new CriterionDescriptor(
true, true, ProductDimensions.createOfferId(null), null, partitionId++, rootPartitionId));
for (int i = 1; i <= (numberOfCriteria - 2); i++) {
descriptors.add(
new CriterionDescriptor(true, false, ProductDimensions.createOfferId(Integer.toString(i)),
10000000L, partitionId++, rootPartitionId));
}
// Split the descriptor list into batches of size pageSize.
List<List<CriterionDescriptor>> descriptorBatches = Lists.partition(descriptors, pageSize);
List<String> responseBodies = Lists.newArrayList();
for (List<CriterionDescriptor> descriptorBatch : descriptorBatches) {
// For this batch of descriptors, manually construct the AdGroupCriterionPage
// to return. This is required because AdWordsServices is a final class, so this test
// cannot mock its behavior.
AdGroupCriterionPage mockPage = new AdGroupCriterionPage();
mockPage.setTotalNumEntries(numberOfCriteria);
mockPage.setEntries(new AdGroupCriterion[descriptorBatch.size()]);
int i = 0;
for (CriterionDescriptor descriptor : descriptorBatch) {
mockPage.setEntries(i++, descriptor.createCriterion());
}
// Serialize the page.
StringWriter writer = new StringWriter();
SerializationContext serializationContext = new SerializationContext(writer) {
/**
* Override the serialize method called by the Axis serializer and force it to
* pass {@code includeNull = false}.
*/
@SuppressWarnings("rawtypes")
@Override
public void serialize(QName elemQName, Attributes attributes, Object value, QName xmlType,
Class javaType) throws IOException {
super.serialize(elemQName, attributes, value, xmlType, javaType, false, null);
}
};
serializationContext.setSendDecl(false);
new AxisSerializer().serialize(mockPage, serializationContext);
// Wrap the serialized page in a SOAP envelope.
StringBuilder response = new StringBuilder();
response.append("<soap:Envelope xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\">"
+ "<soap:Header/><soap:Body>");
response.append(String.format(
"<getResponse xmlns=\"https://adwords.google.com/api/adwords/cm/%s\">", apiVersion));
// Replace the element name AdGroupCriterionPage with the expected name rval in the
// serialized page.
response.append(writer.toString().replaceAll("AdGroupCriterionPage", "rval"));
response.append("</getResponse></soap:Body></soap:Envelope>");
responseBodies.add(response.toString());
}
// Set the test server to return the response bodies constructed above.
testHttpServer.setMockResponseBodies(responseBodies);
// Build the tree.
ProductPartitionTree tree = ProductPartitionTree.createAdGroupTree(
adWordsServices, session, 9999L/* dummy ad group ID */);
// First, confirm that the paging elements were correct in each request's selector.
int requestNumber = 0;
for (String requestBody : testHttpServer.getAllRequestBodies()) {
int expectedOffset = requestNumber * pageSize;
assertThat("numberResults paging element is missing or incorrect in request", requestBody,
Matchers.containsString("numberResults>" + pageSize + "</"));
if (requestNumber == 0) {
assertThat("startIndex paging element unexpectedly found in the first request", requestBody,
Matchers.not(Matchers.containsString("startIndex>")));
} else {
assertThat("startIndex paging element is missing or incorrect in request", requestBody,
Matchers.containsString("startIndex>" + expectedOffset + "</"));
}
requestNumber++;
}
// Confirm that the tree returned by the factory method matches the expected tree.
descriptors.get(0).assertDescriptorEquals(new CriterionDescriptor(tree.getRoot()));
// Get a map of all of the child descriptors for the root node.
Map<Long, CriterionDescriptor> descriptorMap =
buildDescriptorMap(descriptors).get(rootPartitionId);
// Confirm each ProductPartitionNode under the root node has a matching entry in the descriptor
// map.
int childrenFound = 0;
for (ProductPartitionNode childNode : tree.getRoot().getChildren()) {
CriterionDescriptor nodeDescriptor = new CriterionDescriptor(childNode);
nodeDescriptor.assertDescriptorEquals(descriptorMap.get(nodeDescriptor.partitionId));
childrenFound++;
}
assertEquals("Did not find an entry in the response for every expected child node",
descriptorMap.size(), childrenFound);
}
/**
* Returns a map from parentPartitionId to map of partitionId to CriterionDescriptor.
*/
private Map<Long, Map<Long, CriterionDescriptor>> buildDescriptorMap(
Iterable<CriterionDescriptor> descriptors) {
Map<Long, Map<Long, CriterionDescriptor>> descriptorMap = Maps.newHashMap();
for (CriterionDescriptor descriptor : descriptors) {
Map<Long, CriterionDescriptor> mapForParent = descriptorMap.get(descriptor.parentPartitionId);
if (mapForParent == null) {
mapForParent = Maps.newHashMap();
descriptorMap.put(descriptor.parentPartitionId, mapForParent);
}
CriterionDescriptor existingDescriptor = mapForParent.put(descriptor.partitionId, descriptor);
assertNull("Multiple descriptors found for parent ID " + descriptor.parentPartitionId
+ " and partition ID " + descriptor.partitionId, existingDescriptor);
}
return descriptorMap;
}
/**
* Helper class that describes an {@link AdGroupCriterion}.
*/
private static class CriterionDescriptor {
private final boolean isUnit;
private final ProductDimension dimension;
private final boolean isExcluded;
private final Long bid;
private final Long partitionId;
private final Long parentPartitionId;
private final Integer operationNumber;
/**
* Creates a new instance based on explicitly provided attribute values.
*/
CriterionDescriptor(boolean isUnit,
boolean isExcluded,
ProductDimension dimension,
Long bid,
Long partitionId,
Long parentPartitionId) {
// Add a few sanity checks to catch coding errors in tests as early as possible.
if (isExcluded) {
Preconditions.checkArgument(isUnit, "Cannot exclude a non-unit");
Preconditions.checkArgument(bid == null,
"Cannot specify a non-null bid for an excluded criterion");
}
Preconditions.checkNotNull(partitionId, "Null partition ID");
if (dimension == null) {
Preconditions.checkArgument(parentPartitionId == null,
"Dimension is null (root node) but parent ID specified");
} else {
Preconditions.checkNotNull(parentPartitionId,
"Dimension is not null but parent ID is null");
}
this.isUnit = isUnit;
this.dimension = dimension;
this.isExcluded = isExcluded;
this.bid = bid;
this.partitionId = partitionId;
this.parentPartitionId = parentPartitionId;
this.operationNumber = null;
}
/**
* Creates a new instance based on a ProductPartitionNode.
*/
CriterionDescriptor(ProductPartitionNode node) {
Preconditions.checkNotNull(node, "node is null");
this.isUnit = node.isUnit();
this.dimension = node.getDimension();
this.isExcluded = node.isExcludedUnit();
this.bid = node.getBid();
this.partitionId = node.getProductPartitionId();
if (node.getParent() != null) {
this.parentPartitionId = node.getParent().getProductPartitionId();
} else {
this.parentPartitionId = null;
}
this.operationNumber = null;
}
CriterionDescriptor(AdGroupCriterion adGroupCriterion, int operationNumber) {
Preconditions.checkNotNull(adGroupCriterion, "Null ad group criterion");
ProductPartition partition = (ProductPartition) adGroupCriterion.getCriterion();
this.isUnit = ProductPartitionType.UNIT.equals(partition.getPartitionType());
this.dimension = partition.getCaseValue();
this.partitionId = partition.getId();
this.parentPartitionId = partition.getParentCriterionId();
if (adGroupCriterion instanceof BiddableAdGroupCriterion) {
BiddableAdGroupCriterion biddableCriterion = (BiddableAdGroupCriterion) adGroupCriterion;
this.isExcluded = false;
BiddingStrategyConfiguration biddingConfig =
biddableCriterion.getBiddingStrategyConfiguration();
Long bidAmount = null;
if (biddingConfig != null) {
Bids[] bids = biddingConfig.getBids();
if (bids != null) {
for (Bids bid : bids) {
CpcBid cpcBid = (CpcBid) bid;
if (BidSource.CRITERION.equals(cpcBid.getCpcBidSource())) {
bidAmount = cpcBid.getBid().getMicroAmount();
break;
}
}
}
}
this.bid = bidAmount;
} else {
this.isExcluded = true;
this.bid = null;
}
this.operationNumber = operationNumber;
}
/**
* Asserts that this object matches {@code other} on each attribute.
*/
void assertDescriptorEquals(CriterionDescriptor other) {
assertNotNull("Null descriptor compared to: " + this, other);
assertEquals("dimension does not match", 0,
new ProductDimensionComparator().compare(this.dimension, other.dimension));
assertEquals("bid is incorrect", this.bid, other.bid);
assertEquals("isUnit is incorrect", this.isUnit, other.isUnit);
assertEquals("isExcludedUnit is incorrect", this.isExcluded, other.isExcluded);
}
/**
* Returns a new AdGroupCriterion based on this descriptor.
*/
AdGroupCriterion createCriterion() {
AdGroupCriterion adGroupCriterion;
ProductPartition partition = new ProductPartition();
partition.setId(partitionId);
partition.setParentCriterionId(parentPartitionId);
partition.setCaseValue(dimension);
partition.setPartitionType(
isUnit ? ProductPartitionType.UNIT : ProductPartitionType.SUBDIVISION);
if (isExcluded) {
NegativeAdGroupCriterion negative = new NegativeAdGroupCriterion();
adGroupCriterion = negative;
} else {
BiddableAdGroupCriterion biddable = new BiddableAdGroupCriterion();
biddable.setUserStatus(UserStatus.ENABLED);
BiddingStrategyConfiguration biddingConfig = new BiddingStrategyConfiguration();
if (isUnit && bid != null) {
CpcBid cpcBid = new CpcBid();
Money bidMoney = new Money();
bidMoney.setMicroAmount(bid);
cpcBid.setBid(bidMoney);
cpcBid.setCpcBidSource(BidSource.CRITERION);
biddingConfig.setBids(new Bids[] {cpcBid});
}
biddable.setBiddingStrategyConfiguration(biddingConfig);
adGroupCriterion = biddable;
}
adGroupCriterion.setCriterion(partition);
return adGroupCriterion;
}
}
}
|
|
/**
* Copyright 2015 David Karnok and Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package hu.akarnokd.rxjava2flow.internal.operators.nbp;
import java.util.concurrent.atomic.*;
import hu.akarnokd.rxjava2flow.NbpObservable;
import hu.akarnokd.rxjava2flow.NbpObservable.*;
import hu.akarnokd.rxjava2flow.disposables.Disposable;
import hu.akarnokd.rxjava2flow.internal.disposables.EmptyDisposable;
import hu.akarnokd.rxjava2flow.internal.subscriptions.SubscriptionHelper;
import hu.akarnokd.rxjava2flow.plugins.RxJavaPlugins;
public final class NbpOnSubscribeAmb<T> implements NbpOnSubscribe<T> {
final NbpObservable<? extends T>[] sources;
final Iterable<? extends NbpObservable<? extends T>> sourcesIterable;
public NbpOnSubscribeAmb(NbpObservable<? extends T>[] sources, Iterable<? extends NbpObservable<? extends T>> sourcesIterable) {
this.sources = sources;
this.sourcesIterable = sourcesIterable;
}
@Override
@SuppressWarnings("unchecked")
public void accept(NbpSubscriber<? super T> s) {
NbpObservable<? extends T>[] sources = this.sources;
int count = 0;
if (sources == null) {
sources = new NbpObservable[8];
for (NbpObservable<? extends T> p : sourcesIterable) {
if (count == sources.length) {
NbpObservable<? extends T>[] b = new NbpObservable[count + (count >> 2)];
System.arraycopy(sources, 0, b, 0, count);
sources = b;
}
sources[count++] = p;
}
} else {
count = sources.length;
}
if (count == 0) {
EmptyDisposable.complete(s);
return;
} else
if (count == 1) {
sources[0].subscribe(s);
return;
}
AmbCoordinator<T> ac = new AmbCoordinator<>(s, count);
ac.subscribe(sources);
}
static final class AmbCoordinator<T> implements Disposable {
final NbpSubscriber<? super T> actual;
final AmbInnerSubscriber<T>[] subscribers;
volatile int winner;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<AmbCoordinator> WINNER =
AtomicIntegerFieldUpdater.newUpdater(AmbCoordinator.class, "winner");
@SuppressWarnings("unchecked")
public AmbCoordinator(NbpSubscriber<? super T> actual, int count) {
this.actual = actual;
this.subscribers = new AmbInnerSubscriber[count];
}
public void subscribe(NbpObservable<? extends T>[] sources) {
AmbInnerSubscriber<T>[] as = subscribers;
int len = as.length;
for (int i = 0; i < len; i++) {
as[i] = new AmbInnerSubscriber<>(this, i + 1, actual);
}
WINNER.lazySet(this, 0); // release the contents of 'as'
actual.onSubscribe(this);
for (int i = 0; i < len; i++) {
if (winner != 0) {
return;
}
sources[i].subscribe(as[i]);
}
}
public boolean win(int index) {
int w = winner;
if (w == 0) {
if (WINNER.compareAndSet(this, 0, index)) {
AmbInnerSubscriber<T>[] a = subscribers;
int n = a.length;
for (int i = 0; i < n; i++) {
if (i + 1 != index) {
a[i].dispose();
}
}
return true;
}
return false;
}
return w == index;
}
@Override
public void dispose() {
if (winner != -1) {
WINNER.lazySet(this, -1);
for (AmbInnerSubscriber<T> a : subscribers) {
a.dispose();
}
}
}
}
static final class AmbInnerSubscriber<T> extends AtomicReference<Disposable> implements NbpSubscriber<T>, Disposable {
/** */
private static final long serialVersionUID = -1185974347409665484L;
final AmbCoordinator<T> parent;
final int index;
final NbpSubscriber<? super T> actual;
boolean won;
static final Disposable CANCELLED = () -> { };
public AmbInnerSubscriber(AmbCoordinator<T> parent, int index, NbpSubscriber<? super T> actual) {
this.parent = parent;
this.index = index;
this.actual = actual;
}
@Override
public void onSubscribe(Disposable s) {
if (!compareAndSet(null, s)) {
s.dispose();
if (get() != CANCELLED) {
SubscriptionHelper.reportDisposableSet();
}
return;
}
}
@Override
public void onNext(T t) {
if (won) {
actual.onNext(t);
} else {
if (parent.win(index)) {
won = true;
actual.onNext(t);
} else {
get().dispose();
}
}
}
@Override
public void onError(Throwable t) {
if (won) {
actual.onError(t);
} else {
if (parent.win(index)) {
won = true;
actual.onError(t);
} else {
get().dispose();
RxJavaPlugins.onError(t);
}
}
}
@Override
public void onComplete() {
if (won) {
actual.onComplete();
} else {
if (parent.win(index)) {
won = true;
actual.onComplete();
} else {
get().dispose();
}
}
}
@Override
public void dispose() {
Disposable s = get();
if (s != CANCELLED) {
s = getAndSet(CANCELLED);
if (s != CANCELLED && s != null) {
s.dispose();
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.JarURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.catalog.internal.BasicBrooklynCatalog.BrooklynLoaderTracker;
import brooklyn.location.basic.SshMachineLocation;
import brooklyn.management.ManagementContext;
import brooklyn.management.classloading.BrooklynClassLoadingContext;
import brooklyn.management.classloading.JavaBrooklynClassLoadingContext;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.exceptions.Exceptions;
import brooklyn.util.javalang.Threads;
import brooklyn.util.net.Urls;
import brooklyn.util.os.Os;
import brooklyn.util.stream.Streams;
import brooklyn.util.text.DataUriSchemeParser;
import brooklyn.util.text.Strings;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
public class ResourceUtils {
private static final Logger log = LoggerFactory.getLogger(ResourceUtils.class);
private static final List<Function<Object,BrooklynClassLoadingContext>> classLoaderProviders = Lists.newCopyOnWriteArrayList();
private BrooklynClassLoadingContext loader = null;
private String context = null;
private Object contextObject = null;
/**
* Creates a {@link ResourceUtils} object with a specific class loader and context.
* <p>
* Use the provided {@link ClassLoader} object for class loading with the
* {@code contextObject} for context and the {@code contextMessage} string for
* error messages.
*
* @see ResourceUtils#create(Object, String)
* @see ResourceUtils#create(Object)
*/
public static final ResourceUtils create(ClassLoader loader, Object contextObject, String contextMessage) {
return new ResourceUtils(loader, contextObject, contextMessage);
}
/**
* Creates a {@link ResourceUtils} object with a specific class loader and context.
* <p>
* Use the provided {@link BrooklynClassLoadingContext} object for class loading with the
* {@code contextObject} for context and the {@code contextMessage} string for
* error messages.
*
* @see ResourceUtils#create(Object, String)
* @see ResourceUtils#create(Object)
*/
public static final ResourceUtils create(BrooklynClassLoadingContext loader, Object contextObject, String contextMessage) {
return new ResourceUtils(loader, contextObject, contextMessage);
}
/**
* Creates a {@link ResourceUtils} object with the given context.
* <p>
* Uses the {@link ClassLoader} of the given {@code contextObject} for class
* loading and the {@code contextMessage} string for error messages.
*
* @see ResourceUtils#create(ClassLoader, Object, String)
* @see ResourceUtils#create(Object)
*/
public static final ResourceUtils create(Object contextObject, String contextMessage) {
return new ResourceUtils(contextObject, contextMessage);
}
/**
* Creates a {@link ResourceUtils} object with the given context.
* <p>
* Uses the {@link ClassLoader} of the given {@code contextObject} for class
* loading and its {@link Object#toString()} (preceded by the word 'for') as
* the string used in error messages.
*
* @see ResourceUtils#create(ClassLoader, Object, String)
* @see ResourceUtils#create(Object)
*/
public static final ResourceUtils create(Object contextObject) {
return new ResourceUtils(contextObject);
}
/**
* Creates a {@link ResourceUtils} object with itself as the context.
*
* @see ResourceUtils#create(Object)
*/
public static final ResourceUtils create() {
return new ResourceUtils(null);
}
public ResourceUtils(ClassLoader loader, Object contextObject, String contextMessage) {
this(new JavaBrooklynClassLoadingContext(null, loader), contextObject, contextMessage);
}
public ResourceUtils(BrooklynClassLoadingContext loader, Object contextObject, String contextMessage) {
this.loader = loader;
this.contextObject = contextObject;
this.context = contextMessage;
}
public ResourceUtils(Object contextObject, String contextMessage) {
this(contextObject==null ? null : getClassLoadingContextForObject(contextObject), contextObject, contextMessage);
}
public ResourceUtils(Object contextObject) {
this(contextObject, Strings.toString(contextObject));
}
/** used to register custom mechanisms for getting classloaders given an object */
public static void addClassLoaderProvider(Function<Object,BrooklynClassLoadingContext> provider) {
classLoaderProviders.add(provider);
}
public static BrooklynClassLoadingContext getClassLoadingContextForObject(Object contextObject) {
if (contextObject instanceof BrooklynClassLoadingContext)
return (BrooklynClassLoadingContext) contextObject;
for (Function<Object,BrooklynClassLoadingContext> provider: classLoaderProviders) {
BrooklynClassLoadingContext result = provider.apply(contextObject);
if (result!=null) return result;
}
ClassLoader cl = contextObject instanceof Class ? ((Class<?>)contextObject).getClassLoader() :
contextObject instanceof ClassLoader ? ((ClassLoader)contextObject) :
contextObject.getClass().getClassLoader();
return getClassLoadingContextForClassLoader(cl);
}
protected static BrooklynClassLoadingContext getClassLoadingContextForClassLoader(ClassLoader loader) {
ManagementContext mgmt = null;
BrooklynClassLoadingContext bl = BrooklynLoaderTracker.getLoader();
if (bl!=null) mgmt = bl.getManagementContext();
return new JavaBrooklynClassLoadingContext(mgmt, loader);
}
public BrooklynClassLoadingContext getLoader() {
return (loader!=null ? loader : getClassLoadingContextForClassLoader(getClass().getClassLoader()));
}
/**
* Takes a string which is treated as a URL (with some extended "schemes" also expected),
* or as a path to something either on the classpath (absolute only) or the local filesystem (relative or absolute, depending on leading slash)
* <p>
* URLs can be of the form <b>classpath://com/acme/Foo.properties</b>
* as well as <b>file:///home/...</b> and <b>http://acme.com/...</b>.
* <p>
* Throws exception if not found, using the context parameter passed into the constructor.
* <p>
* TODO may want OSGi, or typed object; should consider pax url
*
* @return a stream, or throws exception (never returns null)
*/
public InputStream getResourceFromUrl(String url) {
try {
if (url==null) throw new NullPointerException("Cannot read from null");
if (url=="") throw new NullPointerException("Cannot read from empty string");
String orig = url;
String protocol = Urls.getProtocol(url);
if (protocol!=null) {
if ("classpath".equals(protocol)) {
try {
return getResourceViaClasspath(url);
} catch (IOException e) {
//catch the above because both orig and modified url may be interesting
throw new IOException("Error accessing "+orig+": "+e, e);
}
}
if ("sftp".equals(protocol)) {
try {
return getResourceViaSftp(url);
} catch (IOException e) {
throw new IOException("Error accessing "+orig+": "+e, e);
}
}
if ("file".equals(protocol))
url = tidyFileUrl(url);
if ("data".equals(protocol)) {
return new DataUriSchemeParser(url).lax().parse().getDataAsInputStream();
}
return new URL(url).openStream();
}
try {
//try as classpath reference, then as file
URL u = getLoader().getResource(url);
if (u!=null) return u.openStream();
if (url.startsWith("/")) {
//some getResource calls fail if argument starts with /
String urlNoSlash = url;
while (urlNoSlash.startsWith("/")) urlNoSlash = urlNoSlash.substring(1);
u = getLoader().getResource(urlNoSlash);
if (u!=null) return u.openStream();
// //Class.getResource can require a / (else it attempts to be relative) but Class.getClassLoader doesn't
// u = getLoader().getResource("/"+urlNoSlash);
// if (u!=null) return u.openStream();
}
File f;
// but first, if it starts with tilde, treat specially
if (url.startsWith("~/")) {
f = new File(Os.home(), url.substring(2));
} else if (url.startsWith("~\\")) {
f = new File(Os.home(), url.substring(2));
} else {
f = new File(url);
}
if (f.exists()) return new FileInputStream(f);
} catch (IOException e) {
//catch the above because both u and modified url will be interesting
throw new IOException("Error accessing "+orig+": "+e, e);
}
throw new IOException("'"+orig+"' not found on classpath or filesystem");
} catch (Exception e) {
if (context!=null) {
throw new RuntimeException("Error getting resource '"+url+"' for "+context+": "+e, e);
} else {
throw Exceptions.propagate(e);
}
}
}
private final static Pattern pattern = Pattern.compile("^file:/*~/+(.*)$");
public static URL tidy(URL url) {
// File class has helpful methods for URIs but not URLs. So we convert.
URI in;
try {
in = url.toURI();
} catch (URISyntaxException e) {
throw Exceptions.propagate(e);
}
URI out;
Matcher matcher = pattern.matcher(in.toString());
if (matcher.matches()) {
// home-relative
File home = new File(Os.home());
File file = new File(home, matcher.group(1));
out = file.toURI();
} else if (in.getScheme().equals("file:")) {
// some other file, so canonicalize
File file = new File(in);
out = file.toURI();
} else {
// some other scheme, so no-op
out = in;
}
URL urlOut;
try {
urlOut = out.toURL();
} catch (MalformedURLException e) {
throw Exceptions.propagate(e);
}
if (!urlOut.equals(in) && log.isDebugEnabled()) {
log.debug("quietly changing " + url + " to " + urlOut);
}
return urlOut;
}
public static String tidyFileUrl(String url) {
try {
return tidy(new URL(url)).toString();
} catch (MalformedURLException e) {
throw Exceptions.propagate(e);
}
}
/** @deprecated since 0.7.0; use method {@link Os#mergePaths(String...)} */ @Deprecated
public static String mergeFilePaths(String... items) {
return Os.mergePaths(items);
}
/** @deprecated since 0.7.0; use method {@link Os#tidyPath(String)} */ @Deprecated
public static String tidyFilePath(String path) {
return Os.tidyPath(path);
}
/** @deprecated since 0.7.0; use method {@link Urls#getProtocol(String)} */ @Deprecated
public static String getProtocol(String url) {
return Urls.getProtocol(url);
}
private InputStream getResourceViaClasspath(String url) throws IOException {
assert url.startsWith("classpath:");
String subUrl = url.substring("classpath:".length());
while (subUrl.startsWith("/")) subUrl = subUrl.substring(1);
URL u = getLoader().getResource(subUrl);
if (u!=null) return u.openStream();
else throw new IOException(subUrl+" not found on classpath");
}
private InputStream getResourceViaSftp(String url) throws IOException {
assert url.startsWith("sftp://");
String subUrl = url.substring("sftp://".length());
String user;
String address;
String path;
int atIndex = subUrl.indexOf("@");
int colonIndex = subUrl.indexOf(":", (atIndex > 0 ? atIndex : 0));
if (colonIndex <= 0 || colonIndex <= atIndex) {
throw new IllegalArgumentException("Invalid sftp url ("+url+"); IP or hostname must be specified, such as sftp://localhost:/path/to/file");
}
if (subUrl.length() <= (colonIndex+1)) {
throw new IllegalArgumentException("Invalid sftp url ("+url+"); must specify path of remote file, such as sftp://localhost:/path/to/file");
}
if (atIndex >= 0) {
user = subUrl.substring(0, atIndex);
} else {
user = null;
}
address = subUrl.substring(atIndex + 1, colonIndex);
path = subUrl.substring(colonIndex+1);
// TODO messy way to get an SCP session
SshMachineLocation machine = new SshMachineLocation(MutableMap.builder()
.putIfNotNull("user", user)
.put("address", InetAddress.getByName(address))
.build());
try {
final File tempFile = Os.newTempFile("brooklyn-sftp", "tmp");
tempFile.setReadable(true, true);
machine.copyFrom(path, tempFile.getAbsolutePath());
return new FileInputStream(tempFile) {
@Override
public void close() throws IOException {
super.close();
tempFile.delete();
}
};
} finally {
Streams.closeQuietly(machine);
}
}
/** takes {@link #getResourceFromUrl(String)} and reads fully, into a string */
public String getResourceAsString(String url) {
try {
return readFullyString(getResourceFromUrl(url));
} catch (Exception e) {
log.debug("ResourceUtils got error reading "+url+(context==null?"":" "+context)+" (rethrowing): "+e);
throw Throwables.propagate(e);
}
}
/** allows failing-fast if URL cannot be read */
public String checkUrlExists(String url) {
if (url==null) throw new NullPointerException("URL must not be null");
InputStream s;
try {
s = getResourceFromUrl(url);
} catch (Exception e) {
Exceptions.propagateIfFatal(e);
throw new IllegalArgumentException("Unable to access URL "+url, e);
}
Streams.closeQuietly(s);
return url;
}
/** tests whether the url exists, returning true or false */
public boolean doesUrlExist(String url) {
InputStream s = null;
try {
s = getResourceFromUrl(url);
return true;
} catch (Exception e) {
return false;
} finally {
Streams.closeQuietly(s);
}
}
/** returns the first available URL */
public Optional<String> firstAvailableUrl(String ...urls) {
for (String url: urls) {
if (doesUrlExist(url)) return Optional.of(url);
}
return Optional.absent();
}
/** returns the base directory or JAR from which the context is class-loaded, if possible;
* throws exception if not found */
public String getClassLoaderDir() {
if (contextObject==null) throw new IllegalArgumentException("No suitable context ("+context+") to auto-detect classloader dir");
Class<?> cc = contextObject instanceof Class ? (Class<?>)contextObject : contextObject.getClass();
return getClassLoaderDir(cc.getCanonicalName().replace('.', '/')+".class");
}
public String getClassLoaderDir(String resourceInThatDir) {
resourceInThatDir = Strings.removeFromStart(resourceInThatDir, "/");
URL url = getLoader().getResource(resourceInThatDir);
if (url==null) throw new NoSuchElementException("Resource ("+resourceInThatDir+") not found");
//Switching from manual parsing of jar: and file: URLs to java provided functionality.
//The old code was breaking on any Windows path and instead of fixing it, using
//the provided Java APIs seemed like the better option since they are already tested
//on multiple platforms.
boolean isJar = "jar".equals(url.getProtocol());
if(isJar) {
try {
//let java handle the parsing of jar URL, no network connection is established.
//Strips the jar protocol:
// jar:file:/<path to jar>!<resourceInThatDir>
// becomes
// file:/<path to jar>
JarURLConnection connection = (JarURLConnection) url.openConnection();
url = connection.getJarFileURL();
} catch (IOException e) {
throw new IllegalStateException(e);
}
} else {
//Remove the trailing resouceInThatDir path from the URL, thus getting the parent folder.
String path = url.toString();
int i = path.indexOf(resourceInThatDir);
if (i==-1) throw new IllegalStateException("Resource path ("+resourceInThatDir+") not in url substring ("+url+")");
String parent = path.substring(0, i);
try {
url = new URL(parent);
} catch (MalformedURLException e) {
throw new IllegalStateException("Resource ("+resourceInThatDir+") found at invalid URL parent (" + parent + ")", e);
}
}
if (!"file".equals(url.getProtocol())) throw new IllegalStateException("Resource ("+resourceInThatDir+") not on file system (at "+url+")");
//convert from file: URL to File
File file;
try {
file = new File(url.toURI());
} catch (URISyntaxException e) {
throw new IllegalStateException("Resource ("+resourceInThatDir+") found at invalid URI (" + url + ")", e);
}
if (!file.exists()) throw new IllegalStateException("Context class url substring ("+url+") not found on filesystem");
return file.getPath();
}
/** @deprecated since 0.7.0 use {@link Streams#readFullyString(InputStream) */ @Deprecated
public static String readFullyString(InputStream is) throws IOException {
return Streams.readFullyString(is);
}
/** @deprecated since 0.7.0 use {@link Streams#readFully(InputStream) */ @Deprecated
public static byte[] readFullyBytes(InputStream is) throws IOException {
return Streams.readFully(is);
}
/** @deprecated since 0.7.0 use {@link Streams#copy(InputStream, OutputStream)} */ @Deprecated
public static void copy(InputStream input, OutputStream output) throws IOException {
Streams.copy(input, output);
}
/** @deprecated since 0.7.0; use same method in {@link Os} */ @Deprecated
public static File mkdirs(File dir) {
return Os.mkdirs(dir);
}
/** @deprecated since 0.7.0; use same method in {@link Os} */ @Deprecated
public static File writeToTempFile(InputStream is, String prefix, String suffix) {
return Os.writeToTempFile(is, prefix, suffix);
}
/** @deprecated since 0.7.0; use same method in {@link Os} */ @Deprecated
public static File writeToTempFile(InputStream is, File tempDir, String prefix, String suffix) {
return Os.writeToTempFile(is, tempDir, prefix, suffix);
}
/** @deprecated since 0.7.0; use method {@link Os#writePropertiesToTempFile(Properties, String, String)} */ @Deprecated
public static File writeToTempFile(Properties props, String prefix, String suffix) {
return Os.writePropertiesToTempFile(props, prefix, suffix);
}
/** @deprecated since 0.7.0; use method {@link Os#writePropertiesToTempFile(Properties, File, String, String)} */ @Deprecated
public static File writeToTempFile(Properties props, File tempDir, String prefix, String suffix) {
return Os.writePropertiesToTempFile(props, tempDir, prefix, suffix);
}
/** @deprecated since 0.7.0; use method {@link Threads#addShutdownHook(Runnable)} */ @Deprecated
public static Thread addShutdownHook(final Runnable task) {
return Threads.addShutdownHook(task);
}
/** @deprecated since 0.7.0; use method {@link Threads#removeShutdownHook(Thread)} */ @Deprecated
public static boolean removeShutdownHook(Thread hook) {
return Threads.removeShutdownHook(hook);
}
/** returns the items with exactly one "/" between items (whether or not the individual items start or end with /),
* except where character before the / is a : (url syntax) in which case it will permit multiple (will not remove any)
* @deprecated since 0.7.0 use either {@link Os#mergePathsUnix(String...)} {@link Urls#mergePaths(String...) */ @Deprecated
public static String mergePaths(String ...items) {
return Urls.mergePaths(items);
}
}
|
|
/*******************************************************************************
*
* This file is part of iBioSim. Please visit <http://www.async.ece.utah.edu/ibiosim>
* for the latest version of iBioSim.
*
* Copyright (C) 2017 University of Utah
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the Apache License. A copy of the license agreement is provided
* in the file named "LICENSE.txt" included with this software distribution
* and also available online at <http://www.async.ece.utah.edu/ibiosim/License>.
*
*******************************************************************************/
package edu.utah.ece.async.ibiosim.analysis.properties;
import java.util.ArrayList;
import java.util.List;
import edu.utah.ece.async.ibiosim.dataModels.util.observe.CoreObservable;
/**
* The simulation properties contains information associated with the simulation options.
*
* @author Leandro Watanabe
* @author Chris Myers
* @author <a href="http://www.async.ece.utah.edu/ibiosim#Credits"> iBioSim Contributors </a>
* @version $Rev$
* @version %I%
*/
public final class SimulationProperties extends CoreObservable {
private int numSteps, run, startIndex;
private double initialTime, outputStartTime, minTimeStep, maxTimeStep, printInterval, timeLimit, absError, relError;
private String printer_id, printer_track_quantity, genStats;
private long rndSeed;
private List<String> intSpecies;
SimulationProperties() {
run = 1;
initialTime = 0;
outputStartTime = 0;
maxTimeStep = Double.POSITIVE_INFINITY;
minTimeStep = 0;
printInterval = 1;
absError = 1e-9;
relError = 1e-9;
rndSeed = 314159;
printer_id = "tsd.printer";
printer_track_quantity = "amount";
genStats = "false";
timeLimit = 100;
intSpecies = new ArrayList<>();
startIndex = 1;
}
/**
* Getter for absolute error.
*
* @return the absolute error.
*/
public double getAbsError() {
return absError;
}
/**
* Getter for initial simulation time.
*
* @return the initial time.
*/
public double getInitialTime() {
return initialTime;
}
/**
* Getter for minimum time step.
*
* @return the minimum time step.
*/
public double getMinTimeStep() {
return minTimeStep;
}
/**
* Getter for maximum time step.
*
* @return the maximum time step.
*/
public double getMaxTimeStep() {
return maxTimeStep;
}
/**
* Getter for output start time.
*
* @return the output start time.
*/
public double getOutputStartTime() {
return outputStartTime;
}
/**
* Getter for printer id.
*
* @return the printer id
*/
public String getPrinter_id() {
return printer_id;
}
/**
* Getter for printer track quantity (amount or concentration).
*
* @return the printer quantity type.
*/
public String getPrinter_track_quantity() {
return printer_track_quantity;
}
/**
* Getter for print interval.
*
* @return the print interval
*/
public double getPrintInterval() {
return printInterval;
}
/**
* Getter for relative error.
*
* @return the relative error.
*/
public double getRelError() {
return relError;
}
/**
* Getter for random seed.
*
* @return the random seed.
*/
public long getRndSeed() {
return rndSeed;
}
/**
* Getter for simulation time limit.
*
* @return the simulation time limit.
*/
public double getTimeLimit() {
return timeLimit;
}
/**
* Getter for number of runs.
*
* @return the number of runs.
*/
public int getRun() {
return run;
}
/**
* Setter for absolute error.
*
* @param absError
* - a positive double corresponding to absolute error.
*/
public void setAbsError(double absError) {
this.absError = absError;
}
/**
* Setter for initial simulation time.
*
* @param initialTime
* - non-negative double corresponding to the initial time.
*/
public void setInitialTime(double initialTime) {
this.initialTime = initialTime;
}
/**
* Setter for minimum time step.
*
* @param minTimeStep
* - a positive double corresponding to the minimum time step.
*/
public void setMinTimeStep(double minTimeStep) {
this.minTimeStep = minTimeStep;
}
/**
* Setter for maximum time step.
*
* @param maxTimeStep
* - a positive double corresponding to the maximum time step.
*/
public void setMaxTimeStep(double maxTimeStep) {
this.maxTimeStep = maxTimeStep;
}
/**
* Setter for output start time, the time when simulation starts reporting results.
*
* @param outputStartTime
* - a non-negative double corresponding to the output start time.
*/
public void setOutputStartTime(double outputStartTime) {
this.outputStartTime = outputStartTime;
}
/**
* Setter for printer id (tsd or null).
*
* @param printer_id
* - the printer id.
*/
public void setPrinter_id(String printer_id) {
this.printer_id = printer_id;
}
/**
* Setter for the track quantity.
*
* @param printer_track_quantity
* - the printer track quantity (amount or concentration).
*/
public void setPrinter_track_quantity(String printer_track_quantity) {
this.printer_track_quantity = printer_track_quantity;
}
/**
* Setter for the print interval.
*
* @param printInterval
* - a positive double for the print interval.
*/
public void setPrintInterval(double printInterval) {
this.printInterval = printInterval;
}
/**
* Setter for the relative error.
*
* @param relError
* - a positive double for the relative error.
*/
public void setRelError(double relError) {
this.relError = relError;
}
/**
* Setter for the random seed.
*
* @param rndSeed
* - an arbitrary long to be used as the random seed.
*/
public void setRndSeed(long rndSeed) {
this.rndSeed = rndSeed;
}
/**
* Setter for the number of runs.
*
* @param run
* - a positive integer corresponding to the number of runs.
*/
public void setRun(int run) {
this.run = run;
}
/**
* Setter for the simulation time limit.
*
* @param timeLimit
* - a positive double corresponding to the time limit.
*/
public void setTimeLimit(double timeLimit) {
this.timeLimit = timeLimit;
}
/**
* Getter for the number of steps.
*
* @return the number of steps.
*/
public int getNumSteps() {
return numSteps;
}
/**
* Setter for the number of steps.
*
* @param numSteps
* - a positive integer corresponding to the number of steps.
*/
public void setNumSteps(int numSteps) {
this.numSteps = numSteps;
}
/**
* Add interesting species that needs to be tracked when reporting results.
*/
public void addIntSpecies(String species) {
if (intSpecies == null) {
intSpecies = new ArrayList<>();
}
intSpecies.add(species);
}
/**
* Getter for the list of species that need to have the results printed.
*
* @return the list of interesting species
*/
public List<String> getIntSpecies() {
return intSpecies;
}
/**
* Getter for the generate statistics.
*
* @return the flag for generate statistics.
*/
public String getGenStats() {
return genStats;
}
/**
* Setter for generate statistics.
*
* @param genStats
* - the flag that indicates whether to print statistics or not.
*/
public void setGenStats(String genStats) {
this.genStats = genStats;
}
/**
* Gets the start index.
*
* @return the startIndex
*/
public int getStartIndex() {
return startIndex;
}
/**
* Sets the start index.
*
* @param startIndex
* - the startIndex to set
*/
public void setStartIndex(int startIndex) {
this.startIndex = startIndex;
}
}
|
|
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.controller;
import com.thoughtworks.go.config.GoConfigDao;
import com.thoughtworks.go.domain.Pipeline;
import com.thoughtworks.go.domain.Stage;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.functional.helpers.CSVResponse;
import com.thoughtworks.go.server.service.PipelineService;
import com.thoughtworks.go.server.service.PropertiesService;
import com.thoughtworks.go.server.service.RestfulService;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.SystemEnvironment;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.web.servlet.ModelAndView;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.util.Map;
import static com.thoughtworks.go.server.controller.RestfulActionTestHelper.assertContentStatusWithTextPlain;
import static javax.servlet.http.HttpServletResponse.*;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml",
"classpath:WEB-INF/spring-all-servlet.xml",
})
public class RestfulPropertiesControllerTest {
@Autowired private PropertiesService propertiesService;
@Autowired private RestfulService restfulService;
@Autowired private GoConfigDao goConfigDao;
@Autowired private PipelineService pipelineService;
@Autowired private SystemEnvironment systemEnvironment;
private MockHttpServletResponse response;
private MockHttpServletRequest request;
@Autowired private DatabaseAccessHelper dbHelper;
private Pipeline oldPipeline;
private Pipeline newPipeline;
private PropertiesController propertiesController;
private Stage oldStage;
private Stage newStage;
private static GoConfigFileHelper configHelper = new GoConfigFileHelper();
@Before public void setup() throws Exception {
configHelper.onSetUp();
configHelper.usingCruiseConfigDao(goConfigDao);
response = new MockHttpServletResponse();
request = new MockHttpServletRequest();
dbHelper.onSetUp();
oldPipeline = dbHelper.saveTestPipeline("pipeline", "stage", "build");
oldStage = oldPipeline.getStages().byName("stage");
newPipeline = dbHelper.saveTestPipeline("pipeline", "stage", "build");
newStage = newPipeline.getStages().byName("stage");
configHelper.addPipeline("pipeline", "stage", "build");
propertiesController = new PropertiesController(propertiesService, restfulService, pipelineService, systemEnvironment);
request.addHeader("Confirm", "True");
}
@After public void teardown() throws Exception {
dbHelper.onTearDown();
configHelper.onTearDown();
}
@Test public void shouldGetPropertyRestfully() throws Exception {
setProperty("foo", "bar");
ModelAndView modelAndView = getProperty("foo", "json");
Map map = modelAndView.getModel();
String content = map.get("json").toString();
assertThat(content, containsString("bar"));
assertThat(response.getStatus(), is(SC_OK));
}
@Test public void shouldReturn404WhenPropertyNotSet() throws Exception {
getProperty("foo", "json");
assertValidJsonContentAndStatus(SC_NOT_FOUND, "Property 'foo' not found.");
}
@Test public void shouldReturn404WhenUnknownBuildOnGettingProperty() throws Exception {
String counter = String.valueOf(newStage.getCounter());
propertiesController.jobSearch("unknown", "latest", "stage", counter,
"build", "json", "foo", response);
assertValidJsonContentAndStatus(SC_NOT_FOUND, "Job unknown/latest/stage/" + counter + "/build not found.");
}
@Test public void shouldReturnCreatedWhenCreatingNewProperty() throws Exception {
setProperty("a", "b");
assertValidJsonContentAndStatus(SC_CREATED, "Property 'a' created with value 'b'");
ModelAndView modelAndView = getProperty("a", "json");
Map map = modelAndView.getModel();
String content = map.get("json").toString();
assertThat(content, containsString("b"));
assertThat(response.getStatus(), is(SC_OK));
}
@Test public void shouldNotCreatePropertyTwice() throws Exception {
setProperty("a", "b");
assertValidJsonContentAndStatus(SC_CREATED, "Property 'a' created with value 'b'");
setProperty("a", "c");
assertValidJsonContentAndStatus(SC_CONFLICT, "Property 'a' is already set.");
}
@Test public void shouldNotAllowCreatingPropertyWithKeyOrValueLargerThat255Characters() throws Exception {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < 200; i++) {
sb.append("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx");
}
String manyChars = sb.toString();
setProperty(manyChars, "a");
assertValidJsonContentAndStatus(SC_FORBIDDEN, "Unable to set property with key larger than 255 characters.");
setProperty("a", manyChars);
assertValidJsonContentAndStatus(SC_FORBIDDEN, "Unable to set property with value larger than 255 characters.");
}
@Test public void shouldAllowCreatingAPropertyContainingAURI() throws Exception {
URI uri = new URI("https", "user:password", "10.18.32.41", 986, "/foo/bar/baz", "a=b&c=d", "some_thing");
setProperty("uri1", uri.toString());
assertValidJsonContentAndStatus(SC_CREATED,
"Property 'uri1' created with value 'https://user:password@10.18.32.41:986/foo/bar/baz?a=b&c=d#some_thing'");
setProperty("uri2", uri.toString());
assertValidJsonContentAndStatus(SC_CREATED,
"Property 'uri2' created with value 'https://user:password@10.18.32.41:986/foo/bar/baz?a=b&c=d#some_thing'");
}
@Test public void shouldNotAllowCreatingPropertyWithKeyWithInvalidChars() throws Exception {
String valid = "4aZ_-./";
String invalid = "*aZ_-./,";
setProperty(invalid, valid);
assertValidJsonContentAndStatus(SC_FORBIDDEN, PropertiesController.INVALID_VALUE);
}
@Test public void shouldReturn404WhenUnknownBuildOnSettingProperty() throws Exception {
propertiesController.setProperty("unknown", "latest", "stage", "1", "build", "foo", "bar", response, request);
assertValidJsonContentAndStatus(SC_NOT_FOUND, "Job unknown/latest/stage/1/build not found.");
}
@Test public void shouldReturnOkListingAllPropertiesHistoryInCsvFormatBySearch() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
setProperty(newPipeline, "a/2", "400");
setProperty(newPipeline, "a/1", "300");
getAllPropertyHistoryListAsCsvBySearch();
CSVResponse csvResponse = new CSVResponse(response);
assertThat(csvResponse.isCSV(), is(true));
assertThat(csvResponse.statusEquals(SC_OK), is(true));
assertThat(csvResponse.containsRow("a/1", "a/2"), is(true));
assertThat(csvResponse.containsColumn("a/1", "100", "300"), is(true));
assertThat(csvResponse.containsColumn("a/2", "200", "400"), is(true));
}
@Test public void shouldSupportLimitingHistoryBySearch() throws Exception {
setProperty(oldPipeline, "a", "100");
setProperty(newPipeline, "a", "300");
getAllPropertyHistoryListAsCsvBySearch(null, 1);
CSVResponse csvResponse = new CSVResponse(response);
assertThat(csvResponse.isCSV(), is(true));
assertThat(csvResponse.statusEquals(SC_OK), is(true));
assertThat(csvResponse.containsColumn("a", "300"), is(true));
}
@Test public void shouldSupportLimitingHistoryBasedOnPipelineLabelBySearch() throws Exception {
setProperty(oldPipeline, "a", "100");
setProperty(newPipeline, "a", "300");
getAllPropertyHistoryListAsCsvBySearch(oldPipeline, 1);
CSVResponse csvResponse = new CSVResponse(response);
assertThat(csvResponse.statusEquals(SC_OK), is(true));
assertThat(csvResponse.containsColumn("a", "100"), is(true));
}
@Test public void shouldReturnOkListingAllPropertiesInCsvFormatBySearch() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
getPropertyHistoryListBySearch(oldStage.getCounter(), oldPipeline.getLabel(), "csv", null);
CSVResponse csvResponse = new CSVResponse(response);
assertThat(csvResponse.isCSV(), is(true));
assertThat(csvResponse.statusEquals(SC_OK), is(true));
assertThat(csvResponse.containsRow("a/1", "a/2"), is(true));
assertThat(csvResponse.containsColumn("a/1", "100"), is(true));
assertThat(csvResponse.containsColumn("a/2", "200"), is(true));
}
@Test public void shouldReturnOkListingAllPropertiesInCsvFormatAsDefaultBySearch() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
getPropertyHistoryListBySearch(oldStage.getCounter(), oldPipeline.getLabel(), null, null);
CSVResponse csvResponse = new CSVResponse(response);
assertThat(csvResponse.isCSV(), is(true));
assertThat(csvResponse.statusEquals(SC_OK), is(true));
assertThat(csvResponse.containsRow("a/1", "a/2"), is(true));
assertThat(csvResponse.containsColumn("a/1", "100"), is(true));
assertThat(csvResponse.containsColumn("a/2", "200"), is(true));
}
@Test public void shouldReturnOkSpecificPropertyInCsvFormatBySearch() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
getPropertyHistoryListBySearch(oldStage.getCounter(), oldPipeline.getLabel(), null, "a/2");
CSVResponse csvResponse = new CSVResponse(response);
assertThat(csvResponse.isCSV(), is(true));
assertThat(csvResponse.statusEquals(SC_OK), is(true));
assertThat(csvResponse.containsRow("a/2"), is(true));
assertThat(csvResponse.containsColumn("a/2", "200"), is(true));
assertThat(csvResponse.containsColumn("a/1", "100"), is(false));
}
@Test public void shouldReturnOkSpecificPropertyInJSONFormatBySearch() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
response = new MockHttpServletResponse();
ModelAndView modelAndView = propertiesController.jobSearch("pipeline", oldPipeline.getLabel(), "stage",
String.valueOf(oldStage.getCounter()), "build",
"json", "a/2", response);
Map map = modelAndView.getModel();
String content = map.get("json").toString();
assertThat(content, containsString("a/2"));
assertThat(content, containsString("200"));
assertThat(content, not(containsString("a/1")));
assertThat(content, not(containsString("100")));
}
@Test public void shouldReturnOkListingAllPropertiesInJsonFormatBySearch() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
response = new MockHttpServletResponse();
ModelAndView modelAndView = propertiesController.jobSearch("pipeline", oldPipeline.getLabel(), "stage",
String.valueOf(oldStage.getCounter()), "build",
"json", null, response);
Map map = modelAndView.getModel();
String content = map.get("json").toString();
assertThat(content, containsString("a/2"));
assertThat(content, containsString("a/1"));
assertThat(content, containsString("200"));
assertThat(content, containsString("100"));
}
@Test public void shouldReturn404WhenUnknownBuildOnList() throws Exception {
setProperty(oldPipeline, "a/2", "200");
setProperty(oldPipeline, "a/1", "100");
response = new MockHttpServletResponse();
propertiesController.jobSearch("unknown", oldPipeline.getLabel(), "stage",
String.valueOf(oldStage.getCounter()), "build",
"json", null, response);
assertThat(response.getStatus(), is(SC_NOT_FOUND));
}
private void getAllPropertyHistoryListAsCsvBySearch() throws Exception {
getAllPropertyHistoryListAsCsvBySearch(null, null);
}
private ModelAndView getPropertyHistoryListBySearch(Integer counter, String label, String type, String propertyKey)
throws Exception {
response = new MockHttpServletResponse();
return propertiesController.jobSearch("pipeline", label, "stage", String.valueOf(counter), "build", type,
propertyKey, response);
}
private void getAllPropertyHistoryListAsCsvBySearch(Pipeline startFrom, Integer count) throws Exception {
response = new MockHttpServletResponse();
String limitLabel = startFrom == null ? null : startFrom.getLabel();
propertiesController.jobsSearch("pipeline", "stage", "build", limitLabel, count, response);
}
private ModelAndView getProperty(String property, String type) throws Exception {
response = new MockHttpServletResponse();
return propertiesController.jobSearch("pipeline", "latest", "stage", String.valueOf(newStage.getCounter()),
"build", type, property, response);
}
private void setProperty(String property, String value) throws Exception {
response = new MockHttpServletResponse();
propertiesController.setProperty("pipeline", "latest", "stage", null, "build", property, value, response, request);
}
private void setProperty(Pipeline pipeline, String property, String value) throws Exception {
response = new MockHttpServletResponse();
propertiesController.setProperty("pipeline", pipeline.getLabel(), "stage", null, "build",
property, value, response, request);
assertThat(response.getContentAsString(), response.getStatus(), is(SC_CREATED));
}
private void assertValidJsonContentAndStatus(int status, String content) throws UnsupportedEncodingException {
assertContentStatusWithTextPlain(response, status, content);
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.refactoring;
import com.intellij.JavaTestUtil;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.ui.TypeSelectorManagerImpl;
import com.intellij.testFramework.LightCodeInsightTestCase;
import com.intellij.testFramework.TestDataPath;
import com.intellij.util.VisibilityUtil;
import junit.framework.Assert;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
/**
* @author ven
*/
@TestDataPath("$CONTENT_ROOT/testData")
public class IntroduceConstantTest extends LightCodeInsightTestCase {
@NonNls private static final String BASE_PATH = "/refactoring/introduceConstant/";
@NotNull
@Override
protected String getTestDataPath() {
return JavaTestUtil.getJavaTestDataPath();
}
public void testInNonNls() {
doTest(false);
}
private void doTest(boolean makeEnumConstant) {
configureByFile(BASE_PATH + getTestName(false) + ".java");
convertLocal(makeEnumConstant);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testFromEnumConstantInitializer() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testUnresolvedReferenceInEnum() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testFromEnumConstantInitializer1() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testFromEnumConstantInitializer2() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testEnumConstant() {
doTest(true);
}
public void testAnonymousClassWithThrownClause() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testAnnotationDescription() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testTailingErrorUnacceptableWholeLineSelection() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
private static void convertLocal(final boolean makeEnumConstant) {
PsiLocalVariable local = PsiTreeUtil.getParentOfType(getFile().findElementAt(getEditor().getCaretModel().getOffset()), PsiLocalVariable.class);
new MockLocalToFieldHandler(getProject(), true, makeEnumConstant).convertLocalToField(local, getEditor());
}
public void testPartialStringLiteral() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testPartialStringLiteralConvertibleToInt() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testStringLiteralConvertibleToInt() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testPartialStringLiteralQualified() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
final PsiClass psiClass = ((PsiJavaFile)getFile()).getClasses()[0];
Assert.assertNotNull(psiClass);
final PsiClass targetClass = psiClass.findInnerClassByName("D", false);
Assert.assertNotNull(targetClass);
new MockIntroduceConstantHandler(targetClass).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testPartialStringLiteralAnchor() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testPartialStringLiteralAnchorFromAnnotation() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testIntroduceConstantFromThisCall() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testForwardReferences() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testArrayFromVarargs() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testWithMethodReferenceBySecondSearch() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null).invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testComments() {
doTestExpr();
}
private void doTestExpr() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
checkDefaultType(CommonClassNames.JAVA_LANG_STRING);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testContainingClass() {
doTestExpr();
}
public void testEscalateVisibility() {
configureByFile(BASE_PATH + getTestName(false) + ".java");
final PsiClass[] classes = ((PsiJavaFile)getFile()).getClasses();
Assert.assertTrue(classes.length == 2);
final PsiClass targetClass = classes[1];
Assert.assertNotNull(targetClass);
new MockIntroduceConstantHandler(targetClass){
@Override
protected String getVisibility() {
return VisibilityUtil.ESCALATE_VISIBILITY;
}
}.invoke(getProject(), getEditor(), getFile(), null);
checkResultByFile(BASE_PATH + getTestName(false) + "_after.java");
}
public void testResultedType() {
checkDefaultType(CommonClassNames.JAVA_LANG_OBJECT);
}
public void testResultedTypeWhenNonLocal() {
checkDefaultType("Test.C");
}
private void checkDefaultType(final String expectedType) {
configureByFile(BASE_PATH + getTestName(false) + ".java");
new MockIntroduceConstantHandler(null){
@Override
protected Settings showRefactoringDialog(Project project,
Editor editor,
PsiClass parentClass,
PsiExpression expr,
PsiType type,
PsiExpression[] occurrences,
PsiElement anchorElement,
PsiElement anchorElementIfAll) {
final TypeSelectorManagerImpl selectorManager =
new TypeSelectorManagerImpl(project, type, PsiTreeUtil.getParentOfType(anchorElement, PsiMethod.class), expr, occurrences);
final PsiType psiType = selectorManager.getDefaultType();
Assert.assertEquals(psiType.getCanonicalText(), expectedType);
return new Settings("xxx", expr, occurrences, true, true, true,
InitializationPlace.IN_FIELD_DECLARATION, getVisibility(), null, psiType, false,
parentClass, false, false);
}
}.invoke(getProject(), getEditor(), getFile(), null);
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tasks/v2beta3/cloudtasks.proto
package com.google.cloud.tasks.v2beta3;
/**
*
*
* <pre>
* Request message for forcing a task to run now using
* [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta3.RunTaskRequest}
*/
public final class RunTaskRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta3.RunTaskRequest)
RunTaskRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RunTaskRequest.newBuilder() to construct.
private RunTaskRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RunTaskRequest() {
name_ = "";
responseView_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RunTaskRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private RunTaskRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 16:
{
int rawValue = input.readEnum();
responseView_ = rawValue;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta3.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta3_RunTaskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta3.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta3_RunTaskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta3.RunTaskRequest.class,
com.google.cloud.tasks.v2beta3.RunTaskRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESPONSE_VIEW_FIELD_NUMBER = 2;
private int responseView_;
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @return The enum numeric value on the wire for responseView.
*/
@java.lang.Override
public int getResponseViewValue() {
return responseView_;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @return The responseView.
*/
@java.lang.Override
public com.google.cloud.tasks.v2beta3.Task.View getResponseView() {
@SuppressWarnings("deprecation")
com.google.cloud.tasks.v2beta3.Task.View result =
com.google.cloud.tasks.v2beta3.Task.View.valueOf(responseView_);
return result == null ? com.google.cloud.tasks.v2beta3.Task.View.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (responseView_ != com.google.cloud.tasks.v2beta3.Task.View.VIEW_UNSPECIFIED.getNumber()) {
output.writeEnum(2, responseView_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (responseView_ != com.google.cloud.tasks.v2beta3.Task.View.VIEW_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, responseView_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tasks.v2beta3.RunTaskRequest)) {
return super.equals(obj);
}
com.google.cloud.tasks.v2beta3.RunTaskRequest other =
(com.google.cloud.tasks.v2beta3.RunTaskRequest) obj;
if (!getName().equals(other.getName())) return false;
if (responseView_ != other.responseView_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + RESPONSE_VIEW_FIELD_NUMBER;
hash = (53 * hash) + responseView_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.tasks.v2beta3.RunTaskRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for forcing a task to run now using
* [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta3.RunTaskRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta3.RunTaskRequest)
com.google.cloud.tasks.v2beta3.RunTaskRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta3.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta3_RunTaskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta3.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta3_RunTaskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta3.RunTaskRequest.class,
com.google.cloud.tasks.v2beta3.RunTaskRequest.Builder.class);
}
// Construct using com.google.cloud.tasks.v2beta3.RunTaskRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
responseView_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tasks.v2beta3.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta3_RunTaskRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta3.RunTaskRequest getDefaultInstanceForType() {
return com.google.cloud.tasks.v2beta3.RunTaskRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tasks.v2beta3.RunTaskRequest build() {
com.google.cloud.tasks.v2beta3.RunTaskRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta3.RunTaskRequest buildPartial() {
com.google.cloud.tasks.v2beta3.RunTaskRequest result =
new com.google.cloud.tasks.v2beta3.RunTaskRequest(this);
result.name_ = name_;
result.responseView_ = responseView_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tasks.v2beta3.RunTaskRequest) {
return mergeFrom((com.google.cloud.tasks.v2beta3.RunTaskRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tasks.v2beta3.RunTaskRequest other) {
if (other == com.google.cloud.tasks.v2beta3.RunTaskRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.responseView_ != 0) {
setResponseViewValue(other.getResponseViewValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.tasks.v2beta3.RunTaskRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.tasks.v2beta3.RunTaskRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private int responseView_ = 0;
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @return The enum numeric value on the wire for responseView.
*/
@java.lang.Override
public int getResponseViewValue() {
return responseView_;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @param value The enum numeric value on the wire for responseView to set.
* @return This builder for chaining.
*/
public Builder setResponseViewValue(int value) {
responseView_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @return The responseView.
*/
@java.lang.Override
public com.google.cloud.tasks.v2beta3.Task.View getResponseView() {
@SuppressWarnings("deprecation")
com.google.cloud.tasks.v2beta3.Task.View result =
com.google.cloud.tasks.v2beta3.Task.View.valueOf(responseView_);
return result == null ? com.google.cloud.tasks.v2beta3.Task.View.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @param value The responseView to set.
* @return This builder for chaining.
*/
public Builder setResponseView(com.google.cloud.tasks.v2beta3.Task.View value) {
if (value == null) {
throw new NullPointerException();
}
responseView_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta3.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta3.Task.View response_view = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearResponseView() {
responseView_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta3.RunTaskRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.RunTaskRequest)
private static final com.google.cloud.tasks.v2beta3.RunTaskRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta3.RunTaskRequest();
}
public static com.google.cloud.tasks.v2beta3.RunTaskRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RunTaskRequest> PARSER =
new com.google.protobuf.AbstractParser<RunTaskRequest>() {
@java.lang.Override
public RunTaskRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RunTaskRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RunTaskRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RunTaskRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta3.RunTaskRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.imagepipeline.datasource;
import com.facebook.common.references.CloseableReference;
import com.facebook.common.references.ResourceReleaser;
import com.facebook.datasource.DataSource;
import com.facebook.datasource.DataSubscriber;
import com.facebook.imagepipeline.listener.RequestListener;
import com.facebook.imagepipeline.producers.Consumer;
import com.facebook.imagepipeline.producers.Producer;
import com.facebook.imagepipeline.producers.SettableProducerContext;
import com.facebook.common.executors.CallerThreadExecutor;
import org.junit.*;
import org.junit.runner.*;
import org.mockito.*;
import org.robolectric.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(RobolectricTestRunner.class)
public class CloseableProducerToDataSourceAdapterTest {
@Mock public RequestListener mRequestListener;
private static final boolean FINISHED = true;
private static final boolean NOT_FINISHED = false;
private static final boolean WITH_RESULT = true;
private static final boolean WITHOUT_RESULT = false;
private static final boolean FAILED = true;
private static final boolean NOT_FAILED = false;
private static final boolean LAST = true;
private static final boolean INTERMEDIATE = false;
private static final int NO_INTERACTIONS = 0;
private static final int ON_NEW_RESULT = 1;
private static final int ON_FAILURE = 2;
private static final Exception NPE = new NullPointerException();
private static final String mRequestId = "requestId";
private ResourceReleaser mResourceReleaser;
private CloseableReference<Object> mResultRef1;
private CloseableReference<Object> mResultRef2;
private CloseableReference<Object> mResultRef3;
private Exception mException;
private DataSubscriber<CloseableReference<Object>> mDataSubscriber1;
private DataSubscriber<CloseableReference<Object>> mDataSubscriber2;
private SettableProducerContext mSettableProducerContext;
private Producer<CloseableReference<Object>> mProducer;
private Consumer<CloseableReference<Object>> mInternalConsumer;
private DataSource<CloseableReference<Object>> mDataSource;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
mResourceReleaser = mock(ResourceReleaser.class);
mResultRef1 = CloseableReference.of(new Object(), mResourceReleaser);
mResultRef2 = CloseableReference.of(new Object(), mResourceReleaser);
mResultRef3 = CloseableReference.of(new Object(), mResourceReleaser);
mException = mock(Exception.class);
mDataSubscriber1 = mock(DataSubscriber.class);
mDataSubscriber2 = mock(DataSubscriber.class);
mSettableProducerContext = mock(SettableProducerContext.class);
when(mSettableProducerContext.getId()).thenReturn(mRequestId);
when(mSettableProducerContext.isPrefetch()).thenReturn(false);
mProducer = mock(Producer.class);
mDataSource = CloseableProducerToDataSourceAdapter.create(
mProducer,
mSettableProducerContext,
mRequestListener);
ArgumentCaptor<Consumer> captor = ArgumentCaptor.forClass(Consumer.class);
verify(mRequestListener).onRequestStart(
mSettableProducerContext.getImageRequest(),
mSettableProducerContext.getCallerContext(),
mRequestId,
mSettableProducerContext.isPrefetch());
verify(mProducer).produceResults(captor.capture(), any(SettableProducerContext.class));
mInternalConsumer = captor.getValue();
mDataSource.subscribe(mDataSubscriber1, CallerThreadExecutor.getInstance());
}
/* reference assertions */
private static <T> void assertReferenceCount(int expectedCount, CloseableReference<T> ref) {
assertEquals(expectedCount, ref.getUnderlyingReferenceTestOnly().getRefCountTestOnly());
}
private static <T> void assertReferencesSame(
String errorMessage,
CloseableReference<T> expectedRef,
CloseableReference<T> actualRef) {
if (expectedRef == null) {
assertNull(errorMessage, actualRef);
} else {
assertSame(errorMessage, expectedRef.get(), actualRef.get());
}
}
/* verification helpers */
private void verifyState(
boolean isFinished,
boolean hasResult,
CloseableReference<Object> resultRef,
boolean hasFailed,
Throwable failureCause) {
DataSource<CloseableReference<Object>> dataSource = mDataSource;
assertEquals("isFinished", isFinished, dataSource.isFinished());
assertEquals("hasResult", hasResult, dataSource.hasResult());
CloseableReference<Object> dataSourceRef = dataSource.getResult();
assertReferencesSame("getResult", resultRef, dataSourceRef);
CloseableReference.closeSafely(dataSourceRef);
assertEquals("hasFailed", hasFailed, dataSource.hasFailed());
if (failureCause == NPE) {
assertNotNull("failure", dataSource.getFailureCause());
assertSame("failure", NullPointerException.class, dataSource.getFailureCause().getClass());
} else {
assertSame("failure", failureCause, dataSource.getFailureCause());
}
}
private void verifyReferenceCount(CloseableReference<Object> resultRef) {
// this unit test class keeps references alive, so their ref count must be 1;
// except for the result which have ref count of 2 because it's also kept by data source
assertReferenceCount((resultRef == mResultRef1) ? 2 : 1, mResultRef1);
assertReferenceCount((resultRef == mResultRef2) ? 2 : 1, mResultRef2);
assertReferenceCount((resultRef == mResultRef3) ? 2 : 1, mResultRef3);
}
private void verifyNoMoreInteractionsAndReset() {
verifyNoMoreInteractions(mRequestListener, mDataSubscriber1, mDataSubscriber2);
reset(mRequestListener, mDataSubscriber1, mDataSubscriber2);
}
/* state verification methods */
private void verifyInitial() {
verifyState(NOT_FINISHED, WITHOUT_RESULT, null, NOT_FAILED, null);
verifyReferenceCount(null);
verifyNoMoreInteractionsAndReset();
}
private void verifyWithResult(CloseableReference<Object> resultRef, boolean isLast) {
verifyState(isLast, resultRef != null, resultRef, NOT_FAILED, null);
verifyReferenceCount(resultRef);
verifyNoMoreInteractionsAndReset();
}
private void verifyFailed(CloseableReference<Object> resultRef, Throwable throwable) {
verifyState(FINISHED, resultRef != null, resultRef, FAILED, throwable);
verifyReferenceCount(resultRef);
verifyNoMoreInteractionsAndReset();
}
private void verifyClosed(boolean isFinished, Throwable throwable) {
verifyState(isFinished, WITHOUT_RESULT, null, throwable != null, throwable);
verifyReferenceCount(null);
verifyNoMoreInteractionsAndReset();
}
/* event testing helpers */
private void testSubscribe(int expected) {
mDataSource.subscribe(mDataSubscriber2, CallerThreadExecutor.getInstance());
switch (expected) {
case NO_INTERACTIONS:
break;
case ON_NEW_RESULT:
verify(mDataSubscriber2).onNewResult(mDataSource);
break;
case ON_FAILURE:
verify(mDataSubscriber2).onFailure(mDataSource);
break;
}
verifyNoMoreInteractionsAndReset();
}
private void testNewResult(
CloseableReference<Object> resultRef,
boolean isLast,
int numSubscribers) {
mInternalConsumer.onNewResult(resultRef, isLast);
if (isLast) {
verify(mRequestListener).onRequestSuccess(
mSettableProducerContext.getImageRequest(),
mRequestId,
mSettableProducerContext.isPrefetch());
}
if (numSubscribers >= 1) {
verify(mDataSubscriber1).onNewResult(mDataSource);
}
if (numSubscribers >= 2) {
verify(mDataSubscriber2).onNewResult(mDataSource);
}
verifyWithResult(resultRef, isLast);
}
private void testFailure(CloseableReference<Object> resultRef, int numSubscribers) {
mInternalConsumer.onFailure(mException);
verify(mRequestListener).onRequestFailure(
mSettableProducerContext.getImageRequest(),
mRequestId,
mException,
mSettableProducerContext.isPrefetch());
if (numSubscribers >= 1) {
verify(mDataSubscriber1).onFailure(mDataSource);
}
if (numSubscribers >= 2) {
verify(mDataSubscriber2).onFailure(mDataSource);
}
verifyFailed(resultRef, mException);
}
private void testClose(Throwable throwable) {
mDataSource.close();
verifyClosed(FINISHED, throwable);
}
private void testClose(boolean isFinished, int numSubscribers) {
mDataSource.close();
if (!isFinished) {
verify(mRequestListener).onRequestCancellation(mRequestId);
if (numSubscribers >= 1) {
verify(mDataSubscriber1).onCancellation(mDataSource);
}
if (numSubscribers >= 2) {
verify(mDataSubscriber2).onCancellation(mDataSource);
}
}
verifyClosed(isFinished, null);
}
@Test
public void testInitialState() {
verifyInitial();
}
@Test
public void test_C_a() {
testClose(NOT_FINISHED, 1);
testSubscribe(NO_INTERACTIONS);
}
@Test
public void test_C_I_a() {
testClose(NOT_FINISHED, 1);
mInternalConsumer.onNewResult(mResultRef2, INTERMEDIATE);
verifyClosed(NOT_FINISHED, null);
testSubscribe(NO_INTERACTIONS);
}
@Test
public void test_C_L_a() {
testClose(NOT_FINISHED, 1);
mInternalConsumer.onNewResult(mResultRef2, LAST);
verifyClosed(NOT_FINISHED, null);
testSubscribe(NO_INTERACTIONS);
}
@Test
public void testC_F_a() {
testClose(NOT_FINISHED, 1);
mInternalConsumer.onFailure(mException);
verifyClosed(NOT_FINISHED, null);
testSubscribe(NO_INTERACTIONS);
}
@Test
public void test_I_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
testSubscribe(ON_NEW_RESULT);
testClose(NOT_FINISHED, 2);
}
@Test
public void test_I_I_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
testNewResult(mResultRef2, INTERMEDIATE, 1);
testSubscribe(ON_NEW_RESULT);
testClose(NOT_FINISHED, 2);
}
@Test
public void test_I_I_L_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
testNewResult(mResultRef2, INTERMEDIATE, 1);
testNewResult(mResultRef3, LAST, 1);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_I_I_F_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
testNewResult(mResultRef2, INTERMEDIATE, 1);
testFailure(mResultRef2, 1);
testSubscribe(ON_FAILURE);
testClose(mException);
}
@Test
public void test_I_L_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
testNewResult(mResultRef2, LAST, 1);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_I_F_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
testFailure(mResultRef1, 1);
testSubscribe(ON_FAILURE);
testClose(mException);
}
@Test
public void test_L_a_C() {
testNewResult(mResultRef1, LAST, 1);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_L_I_a_C() {
testNewResult(mResultRef1, LAST, 1);
mInternalConsumer.onNewResult(mResultRef2, INTERMEDIATE);
verifyWithResult(mResultRef1, LAST);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_L_L_a_C() {
testNewResult(mResultRef1, LAST, 1);
mInternalConsumer.onNewResult(mResultRef2, LAST);
verifyWithResult(mResultRef1, LAST);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_L_F_a_C() {
testNewResult(mResultRef1, LAST, 1);
mInternalConsumer.onFailure(mException);
verifyWithResult(mResultRef1, LAST);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_F_a_C() {
testFailure(null, 1);
testSubscribe(ON_FAILURE);
testClose(mException);
}
@Test
public void test_F_I_a_C() {
testFailure(null, 1);
mInternalConsumer.onNewResult(mResultRef1, INTERMEDIATE);
verifyFailed(null, mException);
testSubscribe(ON_FAILURE);
testClose(mException);
}
@Test
public void test_F_L_a_C() {
testFailure(null, 1);
mInternalConsumer.onNewResult(mResultRef1, LAST);
verifyFailed(null, mException);
testSubscribe(ON_FAILURE);
testClose(mException);
}
@Test
public void test_F_F_a_C() {
testFailure(null, 1);
mInternalConsumer.onFailure(mock(Throwable.class));
verifyFailed(null, mException);
testSubscribe(ON_FAILURE);
testClose(mException);
}
@Test
public void test_NI_S_a_C() {
mInternalConsumer.onNewResult(null, INTERMEDIATE);
verify(mDataSubscriber1).onNewResult(mDataSource);
verifyWithResult(null, INTERMEDIATE);
testNewResult(mResultRef1, LAST, 1);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
@Test
public void test_NI_a_NL_C() {
mInternalConsumer.onNewResult(null, INTERMEDIATE);
verify(mDataSubscriber1).onNewResult(mDataSource);
verifyWithResult(null, INTERMEDIATE);
testSubscribe(NO_INTERACTIONS);
mInternalConsumer.onNewResult(null, LAST);
verify(mRequestListener).onRequestSuccess(
mSettableProducerContext.getImageRequest(),
mRequestId,
mSettableProducerContext.isPrefetch());
verify(mDataSubscriber1).onNewResult(mDataSource);
verify(mDataSubscriber2).onNewResult(mDataSource);
verifyWithResult(null, LAST);
testClose(FINISHED, 2);
}
@Test
public void test_I_NL_a_C() {
testNewResult(mResultRef1, INTERMEDIATE, 1);
mInternalConsumer.onNewResult(null, LAST);
verify(mRequestListener).onRequestSuccess(
mSettableProducerContext.getImageRequest(),
mRequestId,
mSettableProducerContext.isPrefetch());
verify(mDataSubscriber1).onNewResult(mDataSource);
verifyWithResult(null, LAST);
testSubscribe(ON_NEW_RESULT);
testClose(FINISHED, 2);
}
}
|
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.rules.genrule;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.testutil.TestConstants.GENRULE_SETUP;
import static com.google.devtools.build.lib.testutil.TestConstants.GENRULE_SETUP_PATH;
import static org.junit.Assert.fail;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.ShellConfiguration;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.configuredtargets.FileConfiguredTarget;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.testutil.TestRuleClassProvider;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests of {@link BazelGenRule}. */
@RunWith(JUnit4.class)
public final class GenRuleConfiguredTargetTest extends BuildViewTestCase {
private static final Pattern SETUP_COMMAND_PATTERN =
Pattern.compile(".*/genrule-setup.sh;\\s+(?<command>.*)");
private static void assertCommandEquals(String expected, String command) {
// Ensure the command after the genrule setup is correct.
Matcher m = SETUP_COMMAND_PATTERN.matcher(command);
if (m.matches()) {
command = m.group("command");
}
assertThat(command).isEqualTo(expected);
}
public void createFiles() throws Exception {
scratch.file(
"hello/BUILD",
"genrule(",
" name = 'z',",
" outs = ['x/y'],",
" cmd = 'echo hi > $(@D)/y',",
")",
"genrule(",
" name = 'w',",
" outs = ['a/b', 'c/d'],",
" cmd = 'echo hi | tee $(@D)/a/b $(@D)/c/d',",
")");
}
@Override
protected ConfiguredRuleClassProvider createRuleClassProvider() {
ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder();
TestRuleClassProvider.addStandardRules(builder);
return builder.addRuleDefinition(new TestRuleClassProvider.MakeVariableTesterRule()).build();
}
@Test
public void testToolchainOverridesJavabase() throws Exception {
scratch.file("a/BUILD",
"genrule(name='gr', srcs=[], outs=['out'], cmd='JAVABASE=$(JAVABASE)', toolchains=[':v'])",
"make_variable_tester(name='v', variables={'JAVABASE': 'REPLACED'})");
String cmd = getCommand("//a:gr");
assertThat(cmd).endsWith("JAVABASE=REPLACED");
}
@Test
public void testD() throws Exception {
createFiles();
ConfiguredTarget z = getConfiguredTarget("//hello:z");
Artifact y = getFilesToBuild(z).getSingleton();
assertThat(y.getRootRelativePath()).isEqualTo(PathFragment.create("hello/x/y"));
}
@Test
public void testDMultiOutput() throws Exception {
createFiles();
ConfiguredTarget z = getConfiguredTarget("//hello:w");
List<Artifact> files = getFilesToBuild(z).toList();
assertThat(files).hasSize(2);
assertThat(files.get(0).getRootRelativePath()).isEqualTo(PathFragment.create("hello/a/b"));
assertThat(files.get(1).getRootRelativePath()).isEqualTo(PathFragment.create("hello/c/d"));
}
@Test
public void testOutsWithSameNameAsRule() throws Exception {
// The error was demoted to a warning.
// Re-enable after June 1 2008 when we make it an error again.
checkWarning(
"genrule2",
"hello_world",
"target 'hello_world' is both a rule and a file;",
"genrule(name = 'hello_world',",
"srcs = ['ignore_me.txt'],",
"outs = ['message.txt', 'hello_world'],",
"cmd = 'echo \"Hello, world.\" >$(location message.txt)')");
}
@Test
public void testFilesToBuildIsOuts() throws Exception {
scratch.file(
"genrule1/BUILD",
"genrule(name = 'hello_world',",
"srcs = ['ignore_me.txt'],",
"outs = ['message.txt'],",
"cmd = 'echo \"Hello, world.\" >$(location message.txt)')");
Artifact messageArtifact = getFileConfiguredTarget("//genrule1:message.txt").getArtifact();
assertThat(getFilesToBuild(getConfiguredTarget("//genrule1:hello_world")).toList())
.containsExactly(messageArtifact);
}
@Test
public void testActionIsShellCommand() throws Exception {
scratch.file(
"genrule1/BUILD",
"genrule(name = 'hello_world',",
"srcs = ['ignore_me.txt'],",
"outs = ['message.txt'],",
"cmd = 'echo \"Hello, world.\" >$(location message.txt)')");
Artifact messageArtifact = getFileConfiguredTarget("//genrule1:message.txt").getArtifact();
SpawnAction shellAction = (SpawnAction) getGeneratingAction(messageArtifact);
Artifact ignoreMeArtifact = getFileConfiguredTarget("//genrule1:ignore_me.txt").getArtifact();
Artifact genruleSetupArtifact = getFileConfiguredTarget(GENRULE_SETUP).getArtifact();
assertThat(shellAction).isNotNull();
assertThat(shellAction.getInputs().toList())
.containsExactly(ignoreMeArtifact, genruleSetupArtifact);
assertThat(shellAction.getOutputs()).containsExactly(messageArtifact);
String expected = "echo \"Hello, world.\" >" + messageArtifact.getExecPathString();
assertThat(shellAction.getArguments().get(0)).isEqualTo(
targetConfig.getFragment(ShellConfiguration.class).getShellExecutable().getPathString());
assertThat(shellAction.getArguments().get(1)).isEqualTo("-c");
assertCommandEquals(expected, shellAction.getArguments().get(2));
}
@Test
public void testDependentGenrule() throws Exception {
scratch.file(
"genrule1/BUILD",
"genrule(name = 'hello_world',",
"srcs = ['ignore_me.txt'],",
"outs = ['message.txt'],",
"cmd = 'echo \"Hello, world.\" >$(location message.txt)')");
scratch.file(
"genrule2/BUILD",
"genrule(name = 'goodbye_world',",
"srcs = ['goodbye.txt', '//genrule1:hello_world'],",
"outs = ['farewell.txt'],",
"cmd = 'echo $(SRCS) >$(location farewell.txt)')");
getConfiguredTarget("//genrule2:goodbye_world");
Artifact farewellArtifact = getFileConfiguredTarget("//genrule2:farewell.txt").getArtifact();
Artifact goodbyeArtifact = getFileConfiguredTarget("//genrule2:goodbye.txt").getArtifact();
Artifact messageArtifact = getFileConfiguredTarget("//genrule1:message.txt").getArtifact();
Artifact genruleSetupArtifact = getFileConfiguredTarget(GENRULE_SETUP).getArtifact();
SpawnAction shellAction = (SpawnAction) getGeneratingAction(farewellArtifact);
// inputs = { "goodbye.txt", "//genrule1:message.txt" }
assertThat(shellAction.getInputs().toList())
.containsExactly(goodbyeArtifact, messageArtifact, genruleSetupArtifact);
// outputs = { "farewell.txt" }
assertThat(shellAction.getOutputs()).containsExactly(farewellArtifact);
String expected =
"echo "
+ goodbyeArtifact.getExecPathString()
+ " "
+ messageArtifact.getExecPathString()
+ " >"
+ farewellArtifact.getExecPathString();
assertCommandEquals(expected, shellAction.getArguments().get(2));
}
/**
* Ensure that the actions / artifacts created by genrule dependencies allow us to follow the
* chain of generated files backward.
*/
@Test
public void testDependenciesViaFiles() throws Exception {
scratch.file(
"foo/BUILD",
"genrule(name = 'bar',",
" srcs = ['bar_in.txt'],",
" cmd = 'touch $(OUTS)',",
" outs = ['bar_out.txt'])",
"genrule(name = 'baz',",
" srcs = ['bar_out.txt'],",
" cmd = 'touch $(OUTS)',",
" outs = ['baz_out.txt'])");
FileConfiguredTarget bazOutTarget = getFileConfiguredTarget("//foo:baz_out.txt");
Action bazAction = getGeneratingAction(bazOutTarget.getArtifact());
Artifact barOut = bazAction.getInputs().toList().get(0);
assertThat(barOut.getExecPath().endsWith(PathFragment.create("foo/bar_out.txt"))).isTrue();
Action barAction = getGeneratingAction(barOut);
Artifact barIn = barAction.getInputs().toList().get(0);
assertThat(barIn.getExecPath().endsWith(PathFragment.create("foo/bar_in.txt"))).isTrue();
}
/** Ensure that variable $(@D) gets expanded correctly in the genrule cmd. */
@Test
public void testOutputDirExpansion() throws Exception {
scratch.file(
"foo/BUILD",
"genrule(name = 'bar',",
" srcs = ['bar_in.txt'],",
" cmd = 'touch $(@D)',",
" outs = ['bar/bar_out.txt'])",
"genrule(name = 'baz',",
" srcs = ['bar/bar_out.txt'],",
" cmd = 'touch $(@D)',",
" outs = ['logs/baz_out.txt', 'logs/baz.log'])");
getConfiguredTarget("//foo:bar");
FileConfiguredTarget bazOutTarget = getFileConfiguredTarget("//foo:logs/baz_out.txt");
SpawnAction bazAction = (SpawnAction) getGeneratingAction(bazOutTarget.getArtifact());
// Make sure the expansion for $(@D) results in the
// directory of the BUILD file ("foo"), not the common parent
// directory of the output files ("logs")
String bazExpected =
"touch "
+ bazOutTarget
.getArtifact()
.getExecPath()
.getParentDirectory()
.getParentDirectory()
.getPathString();
assertCommandEquals(bazExpected, bazAction.getArguments().get(2));
assertThat(bazAction.getArguments().get(2)).endsWith("/foo");
getConfiguredTarget("//foo:bar");
Artifact barOut = bazAction.getInputs().toList().get(0);
assertThat(barOut.getExecPath().endsWith(PathFragment.create("foo/bar/bar_out.txt"))).isTrue();
SpawnAction barAction = (SpawnAction) getGeneratingAction(barOut);
String barExpected = "touch " + barOut.getExecPath().getParentDirectory().getPathString();
assertCommandEquals(barExpected, barAction.getArguments().get(2));
assertThat(bazExpected.equals(barExpected)).isFalse();
}
/** Ensure that variable $(RULE_DIR) gets expanded correctly in the genrule cmd. */
@Test
public void testRuleDirExpansion() throws Exception {
scratch.file(
"foo/BUILD",
"genrule(name = 'bar',",
" srcs = ['bar_in.txt'],",
" cmd = 'touch $(RULEDIR)',",
" outs = ['bar/bar_out.txt'])",
"genrule(name = 'baz',",
" srcs = ['bar/bar_out.txt'],",
" cmd = 'touch $(RULEDIR)',",
" outs = ['baz/baz_out.txt', 'logs/baz.log'])");
// Make sure the expansion for $(RULE_DIR) results in the directory of the BUILD file ("foo")
String expectedRegex = "touch b.{4}-out.*foo";
assertThat(getCommand("//foo:bar")).containsMatch(expectedRegex);
assertThat(getCommand("//foo:baz")).containsMatch(expectedRegex);
}
// Returns the expansion of 'cmd' for the specified genrule.
private String getCommand(String label) throws Exception {
return getSpawnAction(label).getArguments().get(2);
}
// Returns the SpawnAction for the specified genrule.
private SpawnAction getSpawnAction(String label) throws Exception {
return (SpawnAction)
getGeneratingAction(getFilesToBuild(getConfiguredTarget(label)).toList().get(0));
}
@Test
public void testMessage() throws Exception {
scratch.file(
"genrule3/BUILD",
"genrule(name = 'hello_world',",
" srcs = ['ignore_me.txt'],",
" outs = ['hello.txt'],",
" cmd = 'echo \"Hello, world.\" >hello.txt')",
"genrule(name = 'goodbye_world',",
" srcs = ['ignore_me.txt'],",
" outs = ['goodbye.txt'],",
" message = 'Generating message',",
" cmd = 'echo \"Goodbye, world.\" >goodbye.txt')");
assertThat(getSpawnAction("//genrule3:hello_world").getProgressMessage())
.isEqualTo("Executing genrule //genrule3:hello_world");
assertThat(getSpawnAction("//genrule3:goodbye_world").getProgressMessage())
.isEqualTo("Generating message //genrule3:goodbye_world");
}
/** Ensure that labels from binary targets expand to the executable */
@Test
public void testBinaryTargetsExpandToExecutable() throws Exception {
scratch.file(
"genrule3/BUILD",
"genrule(name = 'hello_world',",
" srcs = ['ignore_me.txt'],",
" tools = ['echo'],",
" outs = ['message.txt'],",
" cmd = '$(location :echo) \"Hello, world.\" >message.txt')",
"cc_binary(name = 'echo',",
" srcs = ['echo.cc'])");
String regex = "b.{4}-out/.*/bin/genrule3/echo(\\.exe)? \"Hello, world.\" >message.txt";
assertThat(getCommand("//genrule3:hello_world")).containsMatch(regex);
}
@Test
public void testOutputToBindir() throws Exception {
scratch.file(
"x/BUILD",
"genrule(name='bin', ",
" outs=['bin.out'],",
" cmd=':',",
" output_to_bindir=1)",
"genrule(name='genfiles', ",
" outs=['genfiles.out'],",
" cmd=':',",
" output_to_bindir=0)");
assertThat(getFileConfiguredTarget("//x:bin.out").getArtifact())
.isEqualTo(getBinArtifact("bin.out", getConfiguredTarget("//x:bin")));
assertThat(getFileConfiguredTarget("//x:genfiles.out").getArtifact())
.isEqualTo(getGenfilesArtifact("genfiles.out", "//x:genfiles"));
}
@Test
public void testMultipleOutputsToBindir() throws Exception {
scratch.file(
"x/BUILD",
"genrule(name='bin', ",
" outs=['bin_a.out', 'bin_b.out'],",
" cmd=':',",
" output_to_bindir=1)",
"genrule(name='genfiles', ",
" outs=['genfiles_a.out', 'genfiles_b.out'],",
" cmd=':',",
" output_to_bindir=0)");
ConfiguredTarget binCt = getConfiguredTarget("//x:bin");
ConfiguredTarget genCt = getConfiguredTarget("//x:genfiles");
assertThat(getFileConfiguredTarget("//x:bin_a.out").getArtifact())
.isEqualTo(getBinArtifact("bin_a.out", binCt));
assertThat(getFileConfiguredTarget("//x:bin_b.out").getArtifact())
.isEqualTo(getBinArtifact("bin_b.out", binCt));
assertThat(getFileConfiguredTarget("//x:genfiles_a.out").getArtifact())
.isEqualTo(getGenfilesArtifact("genfiles_a.out", genCt));
assertThat(getFileConfiguredTarget("//x:genfiles_b.out").getArtifact())
.isEqualTo(getGenfilesArtifact("genfiles_b.out", genCt));
}
@Test
public void testMultipleOutsPreservesOrdering() throws Exception {
scratch.file(
"multiple/outs/BUILD",
"genrule(name='test', ",
" outs=['file1.out', 'file2.out'],",
" cmd='touch $(OUTS)')");
String regex =
"touch b.{4}-out/.*/multiple/outs/file1.out "
+ "b.{4}-out/.*/multiple/outs/file2.out";
assertThat(getCommand("//multiple/outs:test")).containsMatch(regex);
}
@Test
public void testToolsAreHostConfiguration() throws Exception {
scratch.file(
"config/BUILD",
"genrule(name='src', outs=['src.out'], cmd=':')",
"genrule(name='tool', outs=['tool.out'], cmd=':')",
"genrule(name='config', ",
" srcs=[':src'], tools=[':tool'], outs=['out'],",
" cmd='$(location :tool)')");
ConfiguredTarget parentTarget = getConfiguredTarget("//config");
Iterable<ConfiguredTarget> prereqs = getDirectPrerequisites(parentTarget);
boolean foundSrc = false;
boolean foundTool = false;
boolean foundSetup = false;
for (ConfiguredTarget prereq : prereqs) {
String name = prereq.getLabel().getName();
switch (name) {
case "src":
assertConfigurationsEqual(getConfiguration(parentTarget), getConfiguration(prereq));
foundSrc = true;
break;
case "tool":
assertConfigurationsEqual(getHostConfiguration(), getConfiguration(prereq));
foundTool = true;
break;
case GENRULE_SETUP_PATH:
assertThat(getConfiguration(prereq)).isNull();
foundSetup = true;
break;
default:
fail("unexpected prerequisite " + prereq + " (name: " + name + ")");
}
}
assertThat(foundSrc).isTrue();
assertThat(foundTool).isTrue();
assertThat(foundSetup).isTrue();
}
@Test
public void testLabelsContainingAtDAreExpanded() throws Exception {
scratch.file(
"puck/BUILD",
"genrule(name='gen', ",
" tools=['puck'],",
" outs=['out'],",
" cmd='echo $(@D)')");
String regex = "echo b.{4}-out/.*/puck";
assertThat(getCommand("//puck:gen")).containsMatch(regex);
}
@Test
public void testGetExecutable() throws Exception {
ConfiguredTarget turtle =
scratchConfiguredTarget(
"java/com/google/turtle",
"turtle_bootstrap",
"genrule(name = 'turtle_bootstrap',",
" srcs = ['Turtle.java'],",
" outs = ['turtle'],",
" executable = 1,",
" cmd = 'touch $(OUTS)')");
assertThat(getExecutable(turtle).getExecPath().getBaseName()).isEqualTo("turtle");
}
@Test
public void testGetExecutableForNonExecutableOut() throws Exception {
ConfiguredTarget turtle =
scratchConfiguredTarget(
"java/com/google/turtle",
"turtle_bootstrap",
"genrule(name = 'turtle_bootstrap',",
" srcs = ['Turtle.java'],",
" outs = ['debugdata.txt'],",
" cmd = 'touch $(OUTS)')");
assertThat(getExecutable(turtle)).isNull();
}
@Test
public void testGetExecutableForMultipleOuts() throws Exception {
ConfiguredTarget turtle =
scratchConfiguredTarget(
"java/com/google/turtle",
"turtle_bootstrap",
"genrule(name = 'turtle_bootstrap',",
" srcs = ['Turtle.java'],",
" outs = ['turtle', 'debugdata.txt'],",
" cmd = 'touch $(OUTS)')");
assertThat(getExecutable(turtle)).isNull();
}
@Test
public void testGetExecutableFailsForMultipleOutputs() throws Exception {
// Multiple output files are invalid when executable=1.
checkError(
"bad",
"bad",
"in executable attribute of genrule rule //bad:bad: "
+ "if genrules produce executables, they are allowed only one output. "
+ "If you need the executable=1 argument, then you should split this genrule into "
+ "genrules producing single outputs",
"genrule(name = 'bad',",
" outs = [ 'bad_out1', 'bad_out2' ],",
" executable = 1,",
" cmd = 'touch $(OUTS)')");
}
@Test
public void testEmptyOutsError() throws Exception {
checkError(
"x",
"x",
"Genrules without outputs don't make sense",
"genrule(name = 'x', outs = [], cmd='echo')");
}
@Test
public void testGenruleSetup() throws Exception {
scratch.file(
"foo/BUILD",
"genrule(name = 'foo_sh',",
" outs = [ 'foo.sh' ],", // Shell script files are known to be executable.
" cmd = 'touch $@')");
assertThat(getCommand("//foo:foo_sh")).contains(GENRULE_SETUP_PATH);
}
private void createStampingTargets() throws Exception {
scratch.file(
"u/BUILD",
"genrule(name='foo_stamp', srcs=[], outs=['uu'], stamp=1, cmd='')",
"genrule(name='foo_nostamp', srcs=[], outs=['vv'], stamp=0, cmd='')",
"genrule(name='foo_default', srcs=[], outs=['xx'], cmd='')");
}
private void assertStamped(String target) throws Exception {
assertStamped(getConfiguredTarget(target));
}
private void assertStamped(ConfiguredTarget target) {
Artifact out = getFilesToBuild(target).toList().get(0);
List<String> inputs = ActionsTestUtil.baseArtifactNames(getGeneratingAction(out).getInputs());
assertThat(inputs).containsAtLeast("build-info.txt", "build-changelist.txt");
}
private void assertNotStamped(String target) throws Exception {
assertNotStamped(getConfiguredTarget(target));
}
private void assertNotStamped(ConfiguredTarget target) {
Artifact out = getFilesToBuild(target).toList().get(0);
List<String> inputs = ActionsTestUtil.baseArtifactNames(getGeneratingAction(out).getInputs());
assertThat(inputs).doesNotContain("build-info.txt");
assertThat(inputs).doesNotContain("build-changelist.txt");
}
@Test
public void testStampingWithNoStamp() throws Exception {
useConfiguration("--nostamp");
createStampingTargets();
assertStamped("//u:foo_stamp");
assertStamped(getHostConfiguredTarget("//u:foo_stamp"));
assertNotStamped("//u:foo_nostamp");
assertNotStamped(getHostConfiguredTarget("//u:foo_nostamp"));
assertNotStamped("//u:foo_default");
}
@Test
public void testStampingWithStamp() throws Exception {
useConfiguration("--stamp");
createStampingTargets();
assertStamped("//u:foo_stamp");
assertStamped(getHostConfiguredTarget("//u:foo_stamp"));
//assertStamped("//u:foo_nostamp");
assertNotStamped(getHostConfiguredTarget("//u:foo_nostamp"));
assertNotStamped("//u:foo_default");
}
@Test
public void testRequiresDarwin() throws Exception {
scratch.file(
"foo/BUILD",
"genrule(name='darwin', srcs=[], outs=['macout'], cmd='', tags=['requires-darwin'])");
SpawnAction action = getSpawnAction("//foo:darwin");
assertThat(action.getExecutionInfo().keySet()).contains("requires-darwin");
// requires-darwin causes /bin/bash to be hard-coded, see CommandHelper.shellPath().
assertThat(action.getCommandFilename())
.isEqualTo("/bin/bash");
}
@Test
public void testJarError() throws Exception {
checkError(
"foo",
"grj",
"in cmd attribute of genrule rule //foo:grj: $(JAR) not defined",
"genrule(name='grj',"
+ " srcs = [],"
+ " outs=['grj'],"
+ " cmd='$(JAR) foo bar')");
}
/** Regression test for b/15589451. */
@Test
public void testDuplicateLocalFlags() throws Exception {
scratch.file(
"foo/BUILD",
"genrule(name='g',"
+ " srcs = [],"
+ " outs = ['grj'],"
+ " cmd ='echo g',"
+ " local = 1,"
+ " tags = ['local'])");
getConfiguredTarget("//foo:g");
assertNoEvents();
}
@Test
public void testExecToolsAreExecConfiguration() throws Exception {
scratch.file(
"config/BUILD",
"genrule(name='src', outs=['src.out'], cmd=':')",
"genrule(name='exec_tool', outs=['exec_tool.out'], cmd=':')",
"genrule(name='config', ",
" srcs=[':src'], exec_tools=[':exec_tool'], outs=['out'],",
" cmd='$(location :exec_tool)')");
ConfiguredTarget parentTarget = getConfiguredTarget("//config");
// Cannot use getDirectPrerequisites, as this re-configures that target incorrectly.
Artifact out = getFilesToBuild(parentTarget).toList().get(0);
assertThat(getGeneratingAction(out).getTools().toList()).hasSize(1);
Artifact execTool = getGeneratingAction(out).getTools().getSingleton();
// This is the output dir fragment for the execution transition.
assertThat(execTool.getExecPathString()).contains("-exec-");
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.raptor.storage;
import com.facebook.presto.orc.OrcDataSource;
import com.facebook.presto.orc.OrcRecordReader;
import com.facebook.presto.raptor.RaptorColumnHandle;
import com.facebook.presto.raptor.backup.BackupManager;
import com.facebook.presto.raptor.backup.BackupStore;
import com.facebook.presto.raptor.backup.FileBackupStore;
import com.facebook.presto.raptor.metadata.ColumnStats;
import com.facebook.presto.raptor.metadata.ShardDelta;
import com.facebook.presto.raptor.metadata.ShardInfo;
import com.facebook.presto.raptor.metadata.ShardManager;
import com.facebook.presto.raptor.metadata.ShardRecorder;
import com.facebook.presto.raptor.storage.InMemoryShardRecorder.RecordedShard;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.NodeManager;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.predicate.NullableValue;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.SqlDate;
import com.facebook.presto.spi.type.SqlTimestamp;
import com.facebook.presto.spi.type.SqlVarbinary;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.TestingNodeManager;
import com.facebook.presto.type.TypeRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.joda.time.Days;
import org.joda.time.chrono.ISOChronology;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.IDBI;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.BitSet;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.raptor.metadata.TestDatabaseShardManager.createShardManager;
import static com.facebook.presto.raptor.storage.OrcTestingUtil.createReader;
import static com.facebook.presto.raptor.storage.OrcTestingUtil.octets;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream;
import static com.facebook.presto.testing.MaterializedResult.resultBuilder;
import static com.facebook.presto.testing.TestingConnectorSession.SESSION;
import static com.google.common.hash.Hashing.md5;
import static com.google.common.io.Files.createTempDir;
import static com.google.common.io.Files.hash;
import static io.airlift.json.JsonCodec.jsonCodec;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.slice.Slices.wrappedBuffer;
import static io.airlift.testing.FileUtils.deleteRecursively;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.lang.String.format;
import static org.joda.time.DateTimeZone.UTC;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import static org.testng.FileAssert.assertFile;
@Test(singleThreaded = true)
public class TestOrcStorageManager
{
private static final JsonCodec<ShardDelta> SHARD_DELTA_CODEC = jsonCodec(ShardDelta.class);
private static final ISOChronology UTC_CHRONOLOGY = ISOChronology.getInstance(UTC);
private static final DateTime EPOCH = new DateTime(0, UTC_CHRONOLOGY);
private static final String CURRENT_NODE = "node";
private static final String CONNECTOR_ID = "test";
private static final long TRANSACTION_ID = 123;
private static final int DELETION_THREADS = 2;
private static final Duration SHARD_RECOVERY_TIMEOUT = new Duration(30, TimeUnit.SECONDS);
private static final int MAX_SHARD_ROWS = 100;
private static final DataSize MAX_FILE_SIZE = new DataSize(1, MEGABYTE);
private static final Duration MISSING_SHARD_DISCOVERY = new Duration(5, TimeUnit.MINUTES);
private static final ReaderAttributes READER_ATTRIBUTES = new ReaderAttributes(new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE));
private final NodeManager nodeManager = new TestingNodeManager();
private Handle dummyHandle;
private File temporary;
private StorageService storageService;
private ShardRecoveryManager recoveryManager;
private FileBackupStore fileBackupStore;
private Optional<BackupStore> backupStore;
private InMemoryShardRecorder shardRecorder;
@BeforeMethod
public void setup()
throws Exception
{
temporary = createTempDir();
File directory = new File(temporary, "data");
storageService = new FileStorageService(directory);
storageService.start();
File backupDirectory = new File(temporary, "backup");
fileBackupStore = new FileBackupStore(backupDirectory);
fileBackupStore.start();
backupStore = Optional.of(fileBackupStore);
IDBI dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime());
dummyHandle = dbi.open();
ShardManager shardManager = createShardManager(dbi);
Duration discoveryInterval = new Duration(5, TimeUnit.MINUTES);
recoveryManager = new ShardRecoveryManager(storageService, backupStore, nodeManager, shardManager, discoveryInterval, 10);
shardRecorder = new InMemoryShardRecorder();
}
@AfterMethod(alwaysRun = true)
public void tearDown()
throws Exception
{
if (dummyHandle != null) {
dummyHandle.close();
}
deleteRecursively(temporary);
}
@Test
public void testWriter()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager();
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(10));
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello")
.row(456L, "bye")
.build();
sink.appendPages(pages);
// shard is not recorded until flush
assertEquals(shardRecorder.getShards().size(), 0);
sink.flush();
// shard is recorded after flush
List<RecordedShard> recordedShards = shardRecorder.getShards();
assertEquals(recordedShards.size(), 1);
List<ShardInfo> shards = sink.commit();
assertEquals(shards.size(), 1);
ShardInfo shardInfo = Iterables.getOnlyElement(shards);
UUID shardUuid = shardInfo.getShardUuid();
File file = storageService.getStorageFile(shardUuid);
File backupFile = fileBackupStore.getBackupFile(shardUuid);
assertEquals(recordedShards.get(0).getTransactionId(), TRANSACTION_ID);
assertEquals(recordedShards.get(0).getShardUuid(), shardUuid);
assertEquals(shardInfo.getRowCount(), 2);
assertEquals(shardInfo.getCompressedSize(), file.length());
// verify primary and backup shard exist
assertFile(file, "primary shard");
assertFile(backupFile, "backup shard");
assertFileEquals(file, backupFile);
// remove primary shard to force recovery from backup
assertTrue(file.delete());
assertTrue(file.getParentFile().delete());
assertFalse(file.exists());
recoveryManager.restoreFromBackup(shardUuid, OptionalLong.empty());
try (OrcDataSource dataSource = manager.openShard(shardUuid, READER_ATTRIBUTES)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.nextBatch(), 2);
Block column0 = reader.readBlock(BIGINT, 0);
assertEquals(column0.isNull(0), false);
assertEquals(column0.isNull(1), false);
assertEquals(BIGINT.getLong(column0, 0), 123L);
assertEquals(BIGINT.getLong(column0, 1), 456L);
Block column1 = reader.readBlock(createVarcharType(10), 1);
assertEquals(createVarcharType(10).getSlice(column1, 0), utf8Slice("hello"));
assertEquals(createVarcharType(10).getSlice(column1, 1), utf8Slice("bye"));
assertEquals(reader.nextBatch(), -1);
}
}
@Test
public void testReader()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager();
List<Long> columnIds = ImmutableList.of(2L, 4L, 6L, 7L, 8L, 9L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(10), VARBINARY, DATE, BOOLEAN, DOUBLE);
byte[] bytes1 = octets(0x00, 0xFE, 0xFF);
byte[] bytes3 = octets(0x01, 0x02, 0x19, 0x80);
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
Object[][] doubles = {
{881L, "-inf", null, null, null, Double.NEGATIVE_INFINITY},
{882L, "+inf", null, null, null, Double.POSITIVE_INFINITY},
{883L, "nan", null, null, null, Double.NaN},
{884L, "min", null, null, null, Double.MIN_VALUE},
{885L, "max", null, null, null, Double.MAX_VALUE},
{886L, "pzero", null, null, null, 0.0},
{887L, "nzero", null, null, null, -0.0},
};
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello", wrappedBuffer(bytes1), sqlDate(2001, 8, 22).getDays(), true, 123.45)
.row(null, null, null, null, null, null)
.row(456L, "bye", wrappedBuffer(bytes3), sqlDate(2005, 4, 22).getDays(), false, 987.65)
.rows(doubles)
.build();
sink.appendPages(pages);
List<ShardInfo> shards = sink.commit();
assertEquals(shards.size(), 1);
UUID uuid = Iterables.getOnlyElement(shards).getShardUuid();
MaterializedResult expected = resultBuilder(SESSION, columnTypes)
.row(123L, "hello", sqlBinary(bytes1), sqlDate(2001, 8, 22), true, 123.45)
.row(null, null, null, null, null, null)
.row(456L, "bye", sqlBinary(bytes3), sqlDate(2005, 4, 22), false, 987.65)
.rows(doubles)
.build();
// no tuple domain (all)
TupleDomain<RaptorColumnHandle> tupleDomain = TupleDomain.all();
try (ConnectorPageSource pageSource = getPageSource(manager, columnIds, columnTypes, uuid, tupleDomain)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes);
assertEquals(result.getRowCount(), expected.getRowCount());
assertEquals(result, expected);
}
// tuple domain within the column range
tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.<RaptorColumnHandle, NullableValue>builder()
.put(new RaptorColumnHandle("test", "c1", 2, BIGINT), NullableValue.of(BIGINT, 124L))
.build());
try (ConnectorPageSource pageSource = getPageSource(manager, columnIds, columnTypes, uuid, tupleDomain)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes);
assertEquals(result.getRowCount(), expected.getRowCount());
}
// tuple domain outside the column range
tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.<RaptorColumnHandle, NullableValue>builder()
.put(new RaptorColumnHandle("test", "c1", 2, BIGINT), NullableValue.of(BIGINT, 122L))
.build());
try (ConnectorPageSource pageSource = getPageSource(manager, columnIds, columnTypes, uuid, tupleDomain)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes);
assertEquals(result.getRowCount(), 0);
}
}
@Test
public void testRewriter()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager();
long transactionId = TRANSACTION_ID;
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(10));
// create file with 2 rows
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello")
.row(456L, "bye")
.build();
sink.appendPages(pages);
List<ShardInfo> shards = sink.commit();
assertEquals(shardRecorder.getShards().size(), 1);
// delete one row
BitSet rowsToDelete = new BitSet();
rowsToDelete.set(0);
Collection<Slice> fragments = manager.rewriteShard(transactionId, OptionalInt.empty(), shards.get(0).getShardUuid(), rowsToDelete);
Slice shardDelta = Iterables.getOnlyElement(fragments);
ShardDelta shardDeltas = jsonCodec(ShardDelta.class).fromJson(shardDelta.getBytes());
ShardInfo shardInfo = Iterables.getOnlyElement(shardDeltas.getNewShards());
// check that output file has one row
assertEquals(shardInfo.getRowCount(), 1);
// check that storage file is same as backup file
File storageFile = storageService.getStorageFile(shardInfo.getShardUuid());
File backupFile = fileBackupStore.getBackupFile(shardInfo.getShardUuid());
assertFileEquals(storageFile, backupFile);
// verify recorded shard
List<RecordedShard> recordedShards = shardRecorder.getShards();
assertEquals(recordedShards.size(), 2);
assertEquals(recordedShards.get(1).getTransactionId(), TRANSACTION_ID);
assertEquals(recordedShards.get(1).getShardUuid(), shardInfo.getShardUuid());
}
@Test
public void testWriterRollback()
throws Exception
{
// verify staging directory does not exist
File staging = new File(new File(temporary, "data"), "staging");
assertFalse(staging.exists());
// create a shard in staging
OrcStorageManager manager = createOrcStorageManager();
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(10));
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello")
.row(456L, "bye")
.build();
sink.appendPages(pages);
sink.flush();
// verify shard exists in staging
String[] files = staging.list();
assertEquals(files.length, 1);
assertTrue(files[0].endsWith(".orc"));
// rollback should cleanup staging files
sink.rollback();
assertEquals(staging.list(), new String[] {});
}
@Test
public void testShardStatsBigint()
{
List<ColumnStats> stats = columnStats(types(BIGINT),
row(2L),
row(-3L),
row(5L));
assertColumnStats(stats, 1, -3L, 5L);
}
@Test
public void testShardStatsDouble()
{
List<ColumnStats> stats = columnStats(types(DOUBLE),
row(2.5),
row(-4.1),
row(6.6));
assertColumnStats(stats, 1, -4.1, 6.6);
}
@Test
public void testShardStatsBigintDouble()
{
List<ColumnStats> stats = columnStats(types(BIGINT, DOUBLE),
row(-3L, 6.6),
row(5L, -4.1));
assertColumnStats(stats, 1, -3L, 5L);
assertColumnStats(stats, 2, -4.1, 6.6);
}
@Test
public void testShardStatsDoubleMinMax()
{
List<ColumnStats> stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.MIN_VALUE),
row(4.5));
assertColumnStats(stats, 1, Double.MIN_VALUE, 4.5);
stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.MAX_VALUE),
row(4.5));
assertColumnStats(stats, 1, 3.2, Double.MAX_VALUE);
}
@Test
public void testShardStatsDoubleNotFinite()
{
List<ColumnStats> stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.NEGATIVE_INFINITY),
row(4.5));
assertColumnStats(stats, 1, null, 4.5);
stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.POSITIVE_INFINITY),
row(4.5));
assertColumnStats(stats, 1, 3.2, null);
stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.NaN),
row(4.5));
assertColumnStats(stats, 1, 3.2, 4.5);
}
@Test
public void testShardStatsVarchar()
{
List<ColumnStats> stats = columnStats(
types(createVarcharType(10)),
row(utf8Slice("hello")),
row(utf8Slice("bye")),
row(utf8Slice("foo")));
assertColumnStats(stats, 1, "bye", "hello");
}
@Test
public void testShardStatsBigintVarbinary()
{
List<ColumnStats> stats = columnStats(types(BIGINT, VARBINARY),
row(5L, wrappedBuffer(octets(0x00))),
row(3L, wrappedBuffer(octets(0x01))));
assertColumnStats(stats, 1, 3L, 5L);
assertNoColumnStats(stats, 2);
}
@Test
public void testShardStatsDateTimestamp()
{
long minDate = sqlDate(2001, 8, 22).getDays();
long maxDate = sqlDate(2005, 4, 22).getDays();
long maxTimestamp = sqlTimestamp(2002, 4, 13, 6, 7, 8).getMillisUtc();
long minTimestamp = sqlTimestamp(2001, 3, 15, 9, 10, 11).getMillisUtc();
List<ColumnStats> stats = columnStats(types(DATE, TIMESTAMP),
row(minDate, maxTimestamp),
row(maxDate, minTimestamp));
assertColumnStats(stats, 1, minDate, maxDate);
assertColumnStats(stats, 2, minTimestamp, maxTimestamp);
}
@Test
public void testMaxShardRows()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager(2, new DataSize(2, MEGABYTE));
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(10));
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello")
.row(456L, "bye")
.build();
sink.appendPages(pages);
assertTrue(sink.isFull());
}
@Test
public void testMaxFileSize()
throws Exception
{
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(5));
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello")
.row(456L, "bye")
.build();
// Set maxFileSize to 1 byte, so adding any page makes the StoragePageSink full
OrcStorageManager manager = createOrcStorageManager(20, new DataSize(1, BYTE));
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
sink.appendPages(pages);
assertTrue(sink.isFull());
}
private static ConnectorPageSource getPageSource(
OrcStorageManager manager,
List<Long> columnIds,
List<Type> columnTypes,
UUID uuid,
TupleDomain<RaptorColumnHandle> tupleDomain)
{
return manager.getPageSource(uuid, OptionalInt.empty(), columnIds, columnTypes, tupleDomain, READER_ATTRIBUTES);
}
private static StoragePageSink createStoragePageSink(StorageManager manager, List<Long> columnIds, List<Type> columnTypes)
{
long transactionId = TRANSACTION_ID;
return manager.createStoragePageSink(transactionId, OptionalInt.empty(), columnIds, columnTypes, false);
}
private OrcStorageManager createOrcStorageManager()
{
return createOrcStorageManager(MAX_SHARD_ROWS, MAX_FILE_SIZE);
}
private OrcStorageManager createOrcStorageManager(int maxShardRows, DataSize maxFileSize)
{
return createOrcStorageManager(storageService, backupStore, recoveryManager, shardRecorder, maxShardRows, maxFileSize);
}
public static OrcStorageManager createOrcStorageManager(IDBI dbi, File temporary)
throws IOException
{
return createOrcStorageManager(dbi, temporary, MAX_SHARD_ROWS);
}
public static OrcStorageManager createOrcStorageManager(IDBI dbi, File temporary, int maxShardRows)
throws IOException
{
File directory = new File(temporary, "data");
StorageService storageService = new FileStorageService(directory);
storageService.start();
File backupDirectory = new File(temporary, "backup");
FileBackupStore fileBackupStore = new FileBackupStore(backupDirectory);
fileBackupStore.start();
Optional<BackupStore> backupStore = Optional.of(fileBackupStore);
ShardManager shardManager = createShardManager(dbi);
ShardRecoveryManager recoveryManager = new ShardRecoveryManager(
storageService,
backupStore,
new TestingNodeManager(),
shardManager,
MISSING_SHARD_DISCOVERY,
10);
return createOrcStorageManager(
storageService,
backupStore,
recoveryManager,
new InMemoryShardRecorder(),
maxShardRows,
MAX_FILE_SIZE);
}
public static OrcStorageManager createOrcStorageManager(
StorageService storageService,
Optional<BackupStore> backupStore,
ShardRecoveryManager recoveryManager,
ShardRecorder shardRecorder,
int maxShardRows,
DataSize maxFileSize)
{
return new OrcStorageManager(
CURRENT_NODE,
storageService,
backupStore,
SHARD_DELTA_CODEC,
READER_ATTRIBUTES,
new BackupManager(backupStore, 1),
recoveryManager,
shardRecorder,
new TypeRegistry(),
CONNECTOR_ID,
DELETION_THREADS,
SHARD_RECOVERY_TIMEOUT,
maxShardRows,
maxFileSize,
new DataSize(0, BYTE));
}
private static void assertFileEquals(File actual, File expected)
throws IOException
{
assertEquals(hash(actual, md5()), hash(expected, md5()));
}
private static void assertColumnStats(List<ColumnStats> list, long columnId, Object min, Object max)
{
for (ColumnStats stats : list) {
if (stats.getColumnId() == columnId) {
assertEquals(stats.getMin(), min);
assertEquals(stats.getMax(), max);
return;
}
}
fail(format("no stats for column: %s: %s", columnId, list));
}
private static void assertNoColumnStats(List<ColumnStats> list, long columnId)
{
for (ColumnStats stats : list) {
assertNotEquals(stats.getColumnId(), columnId);
}
}
private static List<Type> types(Type... types)
{
return ImmutableList.copyOf(types);
}
private static Object[] row(Object... values)
{
return values;
}
private List<ColumnStats> columnStats(List<Type> columnTypes, Object[]... rows)
{
ImmutableList.Builder<Long> list = ImmutableList.builder();
for (long i = 1; i <= columnTypes.size(); i++) {
list.add(i);
}
List<Long> columnIds = list.build();
OrcStorageManager manager = createOrcStorageManager();
StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes);
sink.appendPages(rowPagesBuilder(columnTypes).rows(rows).build());
List<ShardInfo> shards = sink.commit();
assertEquals(shards.size(), 1);
return Iterables.getOnlyElement(shards).getColumnStats();
}
private static SqlVarbinary sqlBinary(byte[] bytes)
{
return new SqlVarbinary(bytes);
}
private static SqlDate sqlDate(int year, int month, int day)
{
DateTime date = new DateTime(year, month, day, 0, 0, 0, 0, UTC);
return new SqlDate(Days.daysBetween(EPOCH, date).getDays());
}
private static SqlTimestamp sqlTimestamp(int year, int month, int day, int hour, int minute, int second)
{
DateTime dateTime = new DateTime(year, month, day, hour, minute, second, 0, UTC);
return new SqlTimestamp(dateTime.getMillis(), UTC_KEY);
}
}
|
|
/*
* Copyright 2006 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.intelliLang;
import com.intellij.lang.Language;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.command.undo.GlobalUndoableAction;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.command.undo.UndoableAction;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.extensions.PluginDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.psi.PsiCompiledElement;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.util.CachedValue;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.ArrayUtil;
import com.intellij.util.CachedValueImpl;
import com.intellij.util.FileContentUtil;
import com.intellij.util.PairProcessor;
import com.intellij.util.containers.*;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.intellij.plugins.intelliLang.inject.InjectorUtils;
import org.intellij.plugins.intelliLang.inject.LanguageInjectionConfigBean;
import org.intellij.plugins.intelliLang.inject.LanguageInjectionSupport;
import org.intellij.plugins.intelliLang.inject.config.BaseInjection;
import org.intellij.plugins.intelliLang.inject.config.InjectionPlace;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.*;
/**
* Configuration that holds configured xml tag, attribute and method parameter
* injection settings as well as the annotations to use for injection, pattern
* validation and for substituting non-compile time constant expression.
*
* Making it a service may result in FileContentUtil.reparseFiles at a random loading moment which may cause
* mysterious PSI validity losses
*/
public class Configuration extends SimpleModificationTracker implements PersistentStateComponent<Element>, ModificationTracker {
static final Logger LOG = Logger.getInstance(Configuration.class.getName());
private static final Condition<BaseInjection> LANGUAGE_INJECTION_CONDITION =
o -> Language.findLanguageByID(o.getInjectedLanguageId()) != null;
@State(name = Configuration.COMPONENT_NAME, defaultStateAsResource = true, storages = @Storage("IntelliLang.xml"))
public static class App extends Configuration {
private final List<BaseInjection> myDefaultInjections;
private final AdvancedConfiguration myAdvancedConfiguration;
App() {
myDefaultInjections = loadDefaultInjections();
myAdvancedConfiguration = new AdvancedConfiguration();
}
@Override
public List<BaseInjection> getDefaultInjections() {
return myDefaultInjections;
}
@Override
public AdvancedConfiguration getAdvancedConfiguration() {
return myAdvancedConfiguration;
}
@Override
public void loadState(final Element element) {
myAdvancedConfiguration.loadState(element);
super.loadState(element);
}
@Override
public Element getState() {
final Element element = new Element(COMPONENT_NAME);
myAdvancedConfiguration.writeState(element);
return getState(element);
}
}
@State(name = Configuration.COMPONENT_NAME, defaultStateAsResource = true, storages = @Storage("IntelliLang.xml"))
public static class Prj extends Configuration {
private final Configuration myParentConfiguration;
Prj(final Configuration configuration) {
myParentConfiguration = configuration;
}
@Override
public AdvancedConfiguration getAdvancedConfiguration() {
return myParentConfiguration.getAdvancedConfiguration();
}
@Override
public List<BaseInjection> getDefaultInjections() {
return myParentConfiguration.getDefaultInjections();
}
@Override
protected Collection<BaseInjection> getAllInjections() {
Collection<BaseInjection> injections = super.getAllInjections();
injections.addAll(myParentConfiguration.getAllInjections());
return injections;
}
@NotNull
@Override
public List<BaseInjection> getInjections(final String injectorId) {
return ContainerUtil.concat(myParentConfiguration.getInjections(injectorId), getOwnInjections(injectorId));
}
public Configuration getParentConfiguration() {
return myParentConfiguration;
}
public List<BaseInjection> getOwnInjections(final String injectorId) {
return super.getInjections(injectorId);
}
@Override
public long getModificationCount() {
return super.getModificationCount() + myParentConfiguration.getModificationCount();
}
@Override
public boolean replaceInjections(List<? extends BaseInjection> newInjections,
List<? extends BaseInjection> originalInjections,
boolean forceLevel) {
if (!forceLevel && !originalInjections.isEmpty()) {
if (myParentConfiguration.replaceInjections(Collections.emptyList(), originalInjections, false)) {
myParentConfiguration.replaceInjections(newInjections, Collections.emptyList(), false);
return true;
}
}
return super.replaceInjections(newInjections, originalInjections, forceLevel);
}
}
public enum InstrumentationType {
NONE, ASSERT, EXCEPTION
}
public enum DfaOption {
OFF, RESOLVE, ASSIGNMENTS, DFA
}
@NonNls public static final String COMPONENT_NAME = "LanguageInjectionConfiguration";
// element names
@NonNls private static final String INSTRUMENTATION_TYPE_NAME = "INSTRUMENTATION";
@NonNls private static final String LANGUAGE_ANNOTATION_NAME = "LANGUAGE_ANNOTATION";
@NonNls private static final String PATTERN_ANNOTATION_NAME = "PATTERN_ANNOTATION";
@NonNls private static final String SUBST_ANNOTATION_NAME = "SUBST_ANNOTATION";
@NonNls private static final String RESOLVE_REFERENCES = "RESOLVE_REFERENCES";
@NonNls private static final String LOOK_FOR_VAR_ASSIGNMENTS = "LOOK_FOR_VAR_ASSIGNMENTS";
@NonNls private static final String USE_DFA_IF_AVAILABLE = "USE_DFA_IF_AVAILABLE";
@NonNls private static final String INCLUDE_UNCOMPUTABLES_AS_LITERALS = "INCLUDE_UNCOMPUTABLES_AS_LITERALS";
@NonNls private static final String SOURCE_MODIFICATION_ALLOWED = "SOURCE_MODIFICATION_ALLOWED";
private final Map<String, List<BaseInjection>> myInjections = new ConcurrentFactoryMap<String, List<BaseInjection>>() {
@Override
protected List<BaseInjection> create(final String key) {
return ContainerUtil.createLockFreeCopyOnWriteList();
}
};
protected Collection<BaseInjection> getAllInjections() {
List<BaseInjection> injections = new ArrayList<>();
for (List<BaseInjection> list : myInjections.values()) {
injections.addAll(list);
}
return injections;
}
private final CachedValue<MultiMap<String, BaseInjection>> myInjectionsById = new CachedValueImpl<>(() -> {
MultiMap<String, BaseInjection> map = new MultiMap<>();
for (BaseInjection injection : getAllInjections()) {
map.putValue(injection.getInjectedLanguageId(), injection);
}
return CachedValueProvider.Result.create(map, this);
});
public Configuration() {
}
public AdvancedConfiguration getAdvancedConfiguration() {
throw new UnsupportedOperationException("getAdvancedConfiguration should not be called");
}
@Override
public void loadState(final Element element) {
myInjections.clear();
List<Element> injectionElements = element.getChildren("injection");
if (!injectionElements.isEmpty()) {
final Map<String, LanguageInjectionSupport> supports = new THashMap<>();
for (LanguageInjectionSupport support : InjectorUtils.getActiveInjectionSupports()) {
supports.put(support.getId(), support);
}
for (Element child : injectionElements) {
final String key = child.getAttributeValue("injector-id");
final LanguageInjectionSupport support = supports.get(key);
final BaseInjection injection = support == null ? new BaseInjection(key) : support.createInjection(child);
injection.loadState(child);
InjectionPlace[] places = dropKnownInvalidPlaces(injection.getInjectionPlaces());
if (places != null) { // not all places were removed
injection.setInjectionPlaces(places);
myInjections.get(key).add(injection);
}
}
}
importPlaces(getDefaultInjections());
}
@Nullable
private static InjectionPlace[] dropKnownInvalidPlaces(InjectionPlace[] places) {
InjectionPlace[] result = places;
for (InjectionPlace place : places) {
if (place.getText().contains("matches(\"[^${}/\\\\]+\")")) {
result = ArrayUtil.remove(result, place);
}
}
return places.length != 0 && result.length == 0? null : result;
}
private static boolean readBoolean(Element element, String key, boolean defValue) {
final String value = JDOMExternalizerUtil.readField(element, key);
if (value == null) return defValue;
return Boolean.parseBoolean(value);
}
private static List<BaseInjection> loadDefaultInjections() {
final List<Configuration> cfgList = new ArrayList<>();
final Set<Object> visited = new THashSet<>();
for (LanguageInjectionConfigBean configBean : Extensions.getExtensions(LanguageInjectionSupport.CONFIG_EP_NAME)) {
PluginDescriptor descriptor = configBean.getPluginDescriptor();
final ClassLoader loader = descriptor.getPluginClassLoader();
try {
final Enumeration<URL> enumeration = loader.getResources(configBean.getConfigUrl());
if (enumeration == null || !enumeration.hasMoreElements()) {
LOG.warn(descriptor.getPluginId() +": " + configBean.getConfigUrl() + " was not found");
}
else {
while (enumeration.hasMoreElements()) {
URL url = enumeration.nextElement();
if (!visited.add(url.getFile())) continue; // for DEBUG mode
InputStream stream = null;
try {
stream = url.openStream();
cfgList.add(load(stream));
}
catch (Exception e) {
LOG.warn(e);
}
finally {
if (stream != null) {
stream.close();
}
}
}
}
}
catch (Exception e) {
LOG.warn(e);
}
}
final List<BaseInjection> defaultInjections = new ArrayList<>();
for (String supportId : InjectorUtils.getActiveInjectionSupportIds()) {
for (Configuration cfg : cfgList) {
final List<BaseInjection> imported = cfg.getInjections(supportId);
defaultInjections.addAll(imported);
}
}
return defaultInjections;
}
@Override
public Element getState() {
return getState(new Element(COMPONENT_NAME));
}
protected Element getState(Element element) {
Comparator<BaseInjection> comparator = (o1, o2) -> {
int rc = Comparing.compare(o1.getDisplayName(), o2.getDisplayName());
if (rc != 0) return rc;
return ContainerUtil.compareLexicographically(
Arrays.asList(o1.getInjectionPlaces()),
Arrays.asList(o2.getInjectionPlaces()),
(o11, o22) -> {
if (o11.isEnabled() && !o22.isEnabled()) return -1;
if (!o11.isEnabled() && o22.isEnabled()) return 1;
return Comparing.compare(o11.getElementPattern().toString(), o22.getElementPattern().toString());
});
};
for (String key : ContainerUtil.newTreeSet(myInjections.keySet())) {
Set<BaseInjection> injections = ContainerUtil.newHashSet(myInjections.get(key));
injections.removeAll(getDefaultInjections());
for (BaseInjection injection : ContainerUtil.sorted(injections, comparator)) {
element.addContent(injection.getState());
}
}
return element;
}
public static Configuration getInstance() {
return ServiceManager.getService(Configuration.class);
}
public static Configuration getProjectInstance(Project project) {
return ServiceManager.getService(project, Configuration.class);
}
public List<BaseInjection> getDefaultInjections() {
return Collections.emptyList();
}
public Collection<BaseInjection> getInjectionsByLanguageId(String languageId) {
return myInjectionsById.getValue().get(languageId);
}
@Nullable
public static Configuration load(final InputStream is) throws IOException, JDOMException {
final List<Element> elements = new ArrayList<>();
final Element rootElement = JDOMUtil.load(is);
final Element state;
if (rootElement.getName().equals(COMPONENT_NAME)) {
state = rootElement;
}
else {
elements.add(rootElement);
//noinspection unchecked
elements.addAll(rootElement.getChildren("component"));
state = ContainerUtil.find(elements, element -> "component".equals(element.getName()) && COMPONENT_NAME.equals(element.getAttributeValue("name")));
}
if (state != null) {
final Configuration cfg = new Configuration();
cfg.loadState(state);
return cfg;
}
return null;
}
private int importPlaces(final List<BaseInjection> injections) {
final Map<String, Set<BaseInjection>> map = ContainerUtil.classify(injections.iterator(), new Convertor<BaseInjection, String>() {
@Override
public String convert(final BaseInjection o) {
return o.getSupportId();
}
});
List<BaseInjection> originalInjections = new ArrayList<>();
List<BaseInjection> newInjections = new ArrayList<>();
for (String supportId : InjectorUtils.getActiveInjectionSupportIds()) {
final Set<BaseInjection> importingInjections = map.get(supportId);
if (importingInjections == null) continue;
importInjections(getInjections(supportId), importingInjections, originalInjections, newInjections);
}
if (!newInjections.isEmpty()) configurationModified();
replaceInjections(newInjections, originalInjections, true);
return newInjections.size();
}
static void importInjections(final Collection<BaseInjection> existingInjections, final Collection<BaseInjection> importingInjections,
final Collection<BaseInjection> originalInjections, final Collection<BaseInjection> newInjections) {
final MultiValuesMap<InjectionPlace, BaseInjection> placeMap = new MultiValuesMap<>();
for (BaseInjection exising : existingInjections) {
for (InjectionPlace place : exising.getInjectionPlaces()) {
placeMap.put(place, exising);
}
}
main: for (BaseInjection other : importingInjections) {
InjectionPlace[] places = other.getInjectionPlaces();
if (places.length == 0) {
if (!existingInjections.contains(other)) newInjections.add(other);
continue;
}
final Set<BaseInjection> matchingInjections = JBIterable.of(places)
.flatten(o -> JBIterable.from(placeMap.get(o))).toSet();
if (matchingInjections.isEmpty()) {
newInjections.add(other);
}
else {
BaseInjection existing = null;
for (BaseInjection injection : matchingInjections) {
if (injection.equals(other)) continue main;
if (existing == null && injection.sameLanguageParameters(other)) {
existing = injection;
}
}
if (existing == null) continue main; // skip!! language changed
final BaseInjection newInjection = existing.copy();
newInjection.mergeOriginalPlacesFrom(other, true);
if (!newInjection.equals(existing)) {
originalInjections.add(existing);
newInjections.add(newInjection);
}
}
}
}
private void configurationModified() {
incModificationCount();
}
@Nullable
public BaseInjection findExistingInjection(@NotNull final BaseInjection injection) {
final List<BaseInjection> list = getInjections(injection.getSupportId());
for (BaseInjection cur : list) {
if (cur.intersectsWith(injection)) return cur;
}
return null;
}
public boolean setHostInjectionEnabled(final PsiLanguageInjectionHost host, final Collection<String> languages, final boolean enabled) {
List<BaseInjection> originalInjections = new ArrayList<>();
List<BaseInjection> newInjections = new ArrayList<>();
for (LanguageInjectionSupport support : InjectorUtils.getActiveInjectionSupports()) {
for (BaseInjection injection : getInjections(support.getId())) {
if (!languages.contains(injection.getInjectedLanguageId())) continue;
boolean replace = false;
final ArrayList<InjectionPlace> newPlaces = new ArrayList<>();
for (InjectionPlace place : injection.getInjectionPlaces()) {
if (place.isEnabled() != enabled && place.getElementPattern() != null &&
(place.getElementPattern().accepts(host) || place.getElementPattern().accepts(host.getParent()))) {
newPlaces.add(place.enabled(enabled));
replace = true;
}
else newPlaces.add(place);
}
if (replace) {
originalInjections.add(injection);
final BaseInjection newInjection = injection.copy();
newInjection.setInjectionPlaces(newPlaces.toArray(new InjectionPlace[newPlaces.size()]));
newInjections.add(newInjection);
}
}
}
if (!originalInjections.isEmpty()) {
replaceInjectionsWithUndo(host.getProject(), newInjections, originalInjections, Collections.emptyList());
return true;
}
return false;
}
protected void setInjections(Collection<BaseInjection> injections) {
for (BaseInjection injection : injections) {
myInjections.get(injection.getSupportId()).add(injection);
}
}
/**
* @param injectorId see {@link LanguageInjectionSupport#getId()}
*/
@NotNull
public List<BaseInjection> getInjections(final String injectorId) {
return Collections.unmodifiableList(myInjections.get(injectorId));
}
public void replaceInjectionsWithUndo(final Project project,
final List<? extends BaseInjection> newInjections,
final List<? extends BaseInjection> originalInjections,
final List<? extends PsiElement> psiElementsToRemove) {
replaceInjectionsWithUndo(project, newInjections, originalInjections, psiElementsToRemove,
(add, remove) -> {
replaceInjectionsWithUndoInner(add, remove);
if (ContainerUtil.find(add, LANGUAGE_INJECTION_CONDITION) != null || ContainerUtil.find(remove,
LANGUAGE_INJECTION_CONDITION) != null) {
FileContentUtil.reparseOpenedFiles();
}
return true;
});
}
protected void replaceInjectionsWithUndoInner(final List<? extends BaseInjection> add, final List<? extends BaseInjection> remove) {
replaceInjections(add, remove, false);
}
public static <T> void replaceInjectionsWithUndo(final Project project, final T add, final T remove,
final List<? extends PsiElement> psiElementsToRemove,
final PairProcessor<T, T> actualProcessor) {
final UndoableAction action = new GlobalUndoableAction() {
@Override
public void undo() {
actualProcessor.process(remove, add);
}
@Override
public void redo() {
actualProcessor.process(add, remove);
}
};
final List<PsiFile> psiFiles = ContainerUtil.mapNotNull(psiElementsToRemove, o -> o instanceof PsiCompiledElement ? null : o.getContainingFile());
new WriteCommandAction.Simple(project, "Language Injection Configuration Update", PsiUtilCore.toPsiFileArray(psiFiles)) {
@Override
public void run() {
for (PsiElement annotation : psiElementsToRemove) {
if (!annotation.isValid()) continue;
annotation.delete();
}
actualProcessor.process(add, remove);
UndoManager.getInstance(project).undoableActionPerformed(action);
}
@Override
protected UndoConfirmationPolicy getUndoConfirmationPolicy() {
return UndoConfirmationPolicy.REQUEST_CONFIRMATION;
}
}.execute();
}
public boolean replaceInjections(List<? extends BaseInjection> newInjections,
List<? extends BaseInjection> originalInjections,
boolean forceLevel) {
boolean changed = false;
for (BaseInjection injection : originalInjections) {
changed |= myInjections.get(injection.getSupportId()).remove(injection);
}
for (BaseInjection injection : newInjections) {
changed |= myInjections.get(injection.getSupportId()).add(injection);
}
if (changed) {
configurationModified();
}
return changed;
}
public static class AdvancedConfiguration {
// runtime pattern validation instrumentation
@NotNull private InstrumentationType myInstrumentationType = InstrumentationType.ASSERT;
// annotation class names
@NotNull private String myLanguageAnnotation;
@NotNull private String myPatternAnnotation;
@NotNull private String mySubstAnnotation;
private boolean myIncludeUncomputablesAsLiterals;
private DfaOption myDfaOption = DfaOption.RESOLVE;
private boolean mySourceModificationAllowed;
// cached annotation name pairs
private Pair<String, ? extends Set<String>> myLanguageAnnotationPair;
private Pair<String, ? extends Set<String>> myPatternAnnotationPair;
private Pair<String, ? extends Set<String>> mySubstAnnotationPair;
public AdvancedConfiguration() {
setLanguageAnnotation("org.intellij.lang.annotations.Language");
setPatternAnnotation("org.intellij.lang.annotations.Pattern");
setSubstAnnotation("org.intellij.lang.annotations.Subst");
}
public String getLanguageAnnotationClass() {
return myLanguageAnnotation;
}
public String getPatternAnnotationClass() {
return myPatternAnnotation;
}
public String getSubstAnnotationClass() {
return mySubstAnnotation;
}
public void setInstrumentationType(@Nullable String type) {
if (type != null) {
setInstrumentationType(InstrumentationType.valueOf(type));
}
}
public void setInstrumentationType(@NotNull InstrumentationType type) {
myInstrumentationType = type;
}
public void setLanguageAnnotation(@Nullable String languageAnnotation) {
if (languageAnnotation == null) return;
myLanguageAnnotation = languageAnnotation;
myLanguageAnnotationPair = Pair.create(languageAnnotation, Collections.singleton(languageAnnotation));
}
public Pair<String, ? extends Set<String>> getLanguageAnnotationPair() {
return myLanguageAnnotationPair;
}
public void setPatternAnnotation(@Nullable String patternAnnotation) {
if (patternAnnotation == null) return;
myPatternAnnotation = patternAnnotation;
myPatternAnnotationPair = Pair.create(patternAnnotation, Collections.singleton(patternAnnotation));
}
public Pair<String, ? extends Set<String>> getPatternAnnotationPair() {
return myPatternAnnotationPair;
}
public void setSubstAnnotation(@Nullable String substAnnotation) {
if (substAnnotation == null) return;
mySubstAnnotation = substAnnotation;
mySubstAnnotationPair = Pair.create(substAnnotation, Collections.singleton(substAnnotation));
}
public Pair<String, ? extends Set<String>> getSubstAnnotationPair() {
return mySubstAnnotationPair;
}
public boolean isIncludeUncomputablesAsLiterals() {
return myIncludeUncomputablesAsLiterals;
}
public void setIncludeUncomputablesAsLiterals(boolean flag) {
myIncludeUncomputablesAsLiterals = flag;
}
@NotNull
public DfaOption getDfaOption() {
return myDfaOption;
}
public void setDfaOption(@NotNull final DfaOption dfaOption) {
myDfaOption = dfaOption;
}
public boolean isSourceModificationAllowed() {
return mySourceModificationAllowed;
}
public void setSourceModificationAllowed(boolean sourceModificationAllowed) {
mySourceModificationAllowed = sourceModificationAllowed;
}
public InstrumentationType getInstrumentation() {
return myInstrumentationType;
}
private void writeState(final Element element) {
JDOMExternalizerUtil.writeField(element, INSTRUMENTATION_TYPE_NAME, myInstrumentationType.toString());
JDOMExternalizerUtil.writeField(element, LANGUAGE_ANNOTATION_NAME, myLanguageAnnotation);
JDOMExternalizerUtil.writeField(element, PATTERN_ANNOTATION_NAME, myPatternAnnotation);
JDOMExternalizerUtil.writeField(element, SUBST_ANNOTATION_NAME, mySubstAnnotation);
if (myIncludeUncomputablesAsLiterals) {
JDOMExternalizerUtil.writeField(element, INCLUDE_UNCOMPUTABLES_AS_LITERALS, "true");
}
if (mySourceModificationAllowed) {
JDOMExternalizerUtil.writeField(element, SOURCE_MODIFICATION_ALLOWED, "true");
}
switch (myDfaOption) {
case OFF:
break;
case RESOLVE:
JDOMExternalizerUtil.writeField(element, RESOLVE_REFERENCES, Boolean.TRUE.toString());
break;
case ASSIGNMENTS:
JDOMExternalizerUtil.writeField(element, LOOK_FOR_VAR_ASSIGNMENTS, Boolean.TRUE.toString());
break;
case DFA:
JDOMExternalizerUtil.writeField(element, USE_DFA_IF_AVAILABLE, Boolean.TRUE.toString());
break;
}
}
private void loadState(final Element element) {
setInstrumentationType(JDOMExternalizerUtil.readField(element, INSTRUMENTATION_TYPE_NAME));
setLanguageAnnotation(JDOMExternalizerUtil.readField(element, LANGUAGE_ANNOTATION_NAME));
setPatternAnnotation(JDOMExternalizerUtil.readField(element, PATTERN_ANNOTATION_NAME));
setSubstAnnotation(JDOMExternalizerUtil.readField(element, SUBST_ANNOTATION_NAME));
if (readBoolean(element, RESOLVE_REFERENCES, true)) {
setDfaOption(DfaOption.RESOLVE);
}
if (readBoolean(element, LOOK_FOR_VAR_ASSIGNMENTS, false)) {
setDfaOption(DfaOption.ASSIGNMENTS);
}
if (readBoolean(element, USE_DFA_IF_AVAILABLE, false)) {
setDfaOption(DfaOption.DFA);
}
setIncludeUncomputablesAsLiterals(readBoolean(element, INCLUDE_UNCOMPUTABLES_AS_LITERALS, false));
setSourceModificationAllowed(readBoolean(element, SOURCE_MODIFICATION_ALLOWED, false));
}
}
}
|
|
/*
* Copyright 2010-2012 Hannes Wallnoefer <hannes@helma.at>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ringojs.engine;
import org.mozilla.javascript.BaseFunction;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.NativeObject;
import org.mozilla.javascript.ScriptRuntime;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.mozilla.javascript.Undefined;
import org.mozilla.javascript.WrappedException;
import org.mozilla.javascript.Wrapper;
import org.ringojs.repository.FileRepository;
import org.ringojs.repository.Repository;
import org.ringojs.repository.ZipRepository;
import org.ringojs.util.ScriptUtils;
import org.ringojs.util.StringUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.ref.SoftReference;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Require extends BaseFunction {
RhinoEngine engine;
RingoGlobal scope;
static Method getMain;
static {
try {
getMain = Require.class.getDeclaredMethod("getMain", ScriptableObject.class);
} catch (NoSuchMethodException nsm) {
throw new NoSuchMethodError("getMain");
}
}
public Require(RhinoEngine engine, RingoGlobal scope) {
super(scope, ScriptableObject.getClassPrototype(scope, "Function"));
this.engine = engine;
this.scope = scope;
// Set up require.main property as setter - note that accessing this will cause
// the main module to be loaded, which may result in problems if engine setup
// isn't finished yet. Alas, the CommonJS Modules spec requires us to do this.
int attr = DONTENUM | PERMANENT | READONLY;
defineProperty("main", this, getMain, null, attr);
defineProperty("paths", new ModulePath(), attr);
defineProperty("extensions", new Extensions(), attr);
}
@Override
public Object call(Context cx, Scriptable scope, Scriptable thisObj, Object[] args) {
if (args.length != 1 || !(args[0] instanceof CharSequence)) {
throw Context.reportRuntimeError(
"require() expects a single string argument");
}
ModuleScope moduleScope = thisObj instanceof ModuleScope ?
(ModuleScope) thisObj : null;
try {
RingoWorker worker = engine.getCurrentWorker();
if (worker == null) {
worker = engine.getMainWorker();
}
String arg = args[0].toString();
Scriptable module = worker.loadModule(cx, arg, moduleScope);
return module instanceof ModuleScope ?
((ModuleScope)module).getExports() : module;
} catch (FileNotFoundException notFound) {
throw Context.reportRuntimeError("Cannot find module '" + args[0] + "'");
} catch (IOException iox) {
throw Context.reportRuntimeError("Error loading module '" + args[0] + "': " + iox);
}
}
@Override
public int getArity() {
return 1;
}
public Object getMain(ScriptableObject thisObj) {
try {
ModuleScope main = engine.getMainModuleScope();
return main != null ? main.getModuleObject() : Undefined.instance;
} catch (Exception x) {
return Undefined.instance;
}
}
class Extensions extends NativeObject {
public Extensions() {
setParentScope(scope);
setPrototype(ScriptableObject.getClassPrototype(scope, "Object"));
}
@Override
public void put(String name, Scriptable start, Object value) {
engine.addModuleLoader(name, value);
super.put(name, start, value);
}
@Override
public void delete(String name) {
engine.removeModuleLoader(name);
super.delete(name);
}
}
class ModulePath extends ScriptableObject {
List<Repository> paths;
Map<String, SoftReference<Repository>> cache =
new HashMap<String, SoftReference<Repository>>();
public ModulePath() {
this.paths = engine.getRepositories();
for (Repository repo : paths) {
cache.put(repo.getPath(), new SoftReference<Repository>(repo));
}
setParentScope(scope);
setPrototype(ScriptableObject.getClassPrototype(scope, "Array"));
defineProperty("length", Integer.valueOf(this.paths.size()), DONTENUM);
}
@Override
public String getClassName() {
return "ModulePath";
}
@Override
public void put(int index, Scriptable start, Object value) {
if (paths != null) {
Repository repo;
try {
repo = toRepository(value);
} catch (IOException iox) {
throw new WrappedException(iox);
}
while (index >= paths.size()) {
paths.add(null);
}
paths.set(index, repo);
defineProperty("length", Integer.valueOf(paths.size()), DONTENUM);
} else {
super.put(index, start, value);
}
}
@Override
public void put(String id, Scriptable start, Object value) {
if (paths != null && "length".equals(id)) {
int length = ScriptUtils.toInt(value, -1);
if (length < 0) {
throw Context.reportRuntimeError("Invalid length value: " + value);
}
while (length > paths.size()) {
paths.add(null);
}
while (length < paths.size()) {
paths.remove(length);
}
}
super.put(id, start, value);
}
@Override
public Object get(int index, Scriptable start) {
if (paths != null) {
Repository value = index < paths.size() ? paths.get(index) : null;
return value == null ? NOT_FOUND : value.getPath();
}
return super.get(index, start);
}
@Override
public boolean has(int index, Scriptable start) {
if (paths != null) {
return index >= 0 && index < paths.size();
}
return super.has(index, start);
}
@Override
public Object[] getIds() {
if (paths != null) {
Object[] ids = new Object[paths.size()];
for (int i = 0; i < ids.length; i++) {
ids[i] = Integer.valueOf(i);
}
return ids;
}
return super.getIds();
}
private Repository toRepository(Object value) throws IOException {
if (value instanceof Wrapper) {
value = ((Wrapper) value).unwrap();
}
Repository repo = null;
if (value instanceof Repository) {
repo = (Repository) value;
// repositories in module search path must be configured as root repository
repo.setRoot();
cache.put(repo.getPath(), new SoftReference<Repository>(repo));
} else if (value != null && value != Undefined.instance) {
String str = ScriptRuntime.toString(value);
SoftReference<Repository> ref = cache.get(str);
repo = ref == null ? null : ref.get();
if (repo == null) {
File file = new File(str);
if (file.isFile() && StringUtils.isZipOrJarFile(str)) {
repo = new ZipRepository(str);
} else {
repo = new FileRepository(str);
}
cache.put(repo.getPath(), new SoftReference<Repository>(repo));
}
} else {
throw Context.reportRuntimeError("Invalid module path item: " + value);
}
return repo;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.data.binary;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.DecimalDataUtils;
import org.apache.flink.table.runtime.util.SegmentsUtil;
import org.apache.flink.table.runtime.util.StringUtf8Utils;
import org.apache.flink.table.utils.EncodingUtils;
import java.math.BigDecimal;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
import static org.apache.flink.table.data.binary.BinaryStringData.fromAddress;
import static org.apache.flink.table.data.binary.BinaryStringData.fromBytes;
import static org.apache.flink.table.data.binary.BinaryStringData.fromString;
import static org.apache.flink.table.data.binary.BinaryStringData.numBytesForFirstByte;
/** Util for {@link BinaryStringData}. */
public class BinaryStringDataUtil {
public static final BinaryStringData[] EMPTY_STRING_ARRAY = new BinaryStringData[0];
private static final List<BinaryStringData> TRUE_STRINGS =
Stream.of("t", "true", "y", "yes", "1")
.map(BinaryStringData::fromString)
.peek(BinaryStringData::ensureMaterialized)
.collect(Collectors.toList());
private static final List<BinaryStringData> FALSE_STRINGS =
Stream.of("f", "false", "n", "no", "0")
.map(BinaryStringData::fromString)
.peek(BinaryStringData::ensureMaterialized)
.collect(Collectors.toList());
private static byte[] getTmpBytes(BinaryStringData str, int sizeInBytes) {
byte[] bytes = SegmentsUtil.allocateReuseBytes(sizeInBytes);
SegmentsUtil.copyToBytes(str.getSegments(), str.getOffset(), bytes, 0, sizeInBytes);
return bytes;
}
/**
* Splits the provided text into an array, separator string specified.
*
* <p>The separator is not included in the returned String array. Adjacent separators are
* treated as separators for empty tokens.
*
* <p>A {@code null} separator splits on whitespace.
*
* <pre>
* "".splitByWholeSeparatorPreserveAllTokens(*) = []
* "ab de fg".splitByWholeSeparatorPreserveAllTokens(null) = ["ab", "de", "fg"]
* "ab de fg".splitByWholeSeparatorPreserveAllTokens(null) = ["ab", "", "", "de", "fg"]
* "ab:cd:ef".splitByWholeSeparatorPreserveAllTokens(":") = ["ab", "cd", "ef"]
* "ab-!-cd-!-ef".splitByWholeSeparatorPreserveAllTokens("-!-") = ["ab", "cd", "ef"]
* </pre>
*
* <p>Note: returned binary strings reuse memory segments from the input str.
*
* @param separator String containing the String to be used as a delimiter, {@code null} splits
* on whitespace
* @return an array of parsed Strings, {@code null} if null String was input
*/
public static BinaryStringData[] splitByWholeSeparatorPreserveAllTokens(
BinaryStringData str, BinaryStringData separator) {
str.ensureMaterialized();
final int sizeInBytes = str.getSizeInBytes();
MemorySegment[] segments = str.getSegments();
int offset = str.getOffset();
if (sizeInBytes == 0) {
return EMPTY_STRING_ARRAY;
}
if (separator == null || EMPTY_UTF8.equals(separator)) {
// Split on whitespace.
return splitByWholeSeparatorPreserveAllTokens(str, fromString(" "));
}
separator.ensureMaterialized();
int sepSize = separator.getSizeInBytes();
MemorySegment[] sepSegs = separator.getSegments();
int sepOffset = separator.getOffset();
final ArrayList<BinaryStringData> substrings = new ArrayList<>();
int beg = 0;
int end = 0;
while (end < sizeInBytes) {
end =
SegmentsUtil.find(
segments,
offset + beg,
sizeInBytes - beg,
sepSegs,
sepOffset,
sepSize)
- offset;
if (end > -1) {
if (end > beg) {
// The following is OK, because String.substring( beg, end ) excludes
// the character at the position 'end'.
substrings.add(fromAddress(segments, offset + beg, end - beg));
// Set the starting point for the next search.
// The following is equivalent to beg = end + (separatorLength - 1) + 1,
// which is the right calculation:
beg = end + sepSize;
} else {
// We found a consecutive occurrence of the separator.
substrings.add(EMPTY_UTF8);
beg = end + sepSize;
}
} else {
// String.substring( beg ) goes from 'beg' to the end of the String.
substrings.add(fromAddress(segments, offset + beg, sizeInBytes - beg));
end = sizeInBytes;
}
}
return substrings.toArray(new BinaryStringData[0]);
}
/** Decide boolean representation of a string. */
public static Boolean toBooleanSQL(BinaryStringData str) {
BinaryStringData lowerCase = str.toLowerCase();
return TRUE_STRINGS.contains(lowerCase)
? Boolean.TRUE
: (FALSE_STRINGS.contains(lowerCase) ? Boolean.FALSE : null);
}
/** Calculate the hash value of a given string use {@link MessageDigest}. */
public static BinaryStringData hash(BinaryStringData str, MessageDigest md) {
return fromString(EncodingUtils.hex(md.digest(str.toBytes())));
}
public static BinaryStringData hash(BinaryStringData str, String algorithm)
throws NoSuchAlgorithmException {
return hash(str, MessageDigest.getInstance(algorithm));
}
/**
* Parses this BinaryStringData to DecimalData.
*
* @return DecimalData value if the parsing was successful, or null if overflow
* @throws NumberFormatException if the parsing failed.
*/
public static DecimalData toDecimal(BinaryStringData str, int precision, int scale) {
str.ensureMaterialized();
if (DecimalDataUtils.isByteArrayDecimal(precision)
|| DecimalDataUtils.isByteArrayDecimal(str.getSizeInBytes())) {
return toBigPrecisionDecimal(str, precision, scale);
}
int sizeInBytes = str.getSizeInBytes();
return toDecimalFromBytes(precision, scale, getTmpBytes(str, sizeInBytes), 0, sizeInBytes);
}
private static DecimalData toDecimalFromBytes(
int precision, int scale, byte[] bytes, int offset, int sizeInBytes) {
// Data in DecimalData is stored by one long value if `precision` <=
// DecimalData.MAX_LONG_DIGITS.
// In this case we can directly extract the value from memory segment.
int i = 0;
// Remove white spaces at the beginning
byte b = 0;
while (i < sizeInBytes) {
b = bytes[offset + i];
if (b != ' ' && b != '\n' && b != '\t') {
break;
}
i++;
}
if (i == sizeInBytes) {
// all whitespaces
return null;
}
// ======= begin significant part =======
final boolean negative = b == '-';
if (negative || b == '+') {
i++;
if (i == sizeInBytes) {
// only contains prefix plus/minus
return null;
}
}
long significand = 0;
int exp = 0;
int significandLen = 0, pointPos = -1;
while (i < sizeInBytes) {
b = bytes[offset + i];
i++;
if (b >= '0' && b <= '9') {
// No need to worry about overflow, because sizeInBytes <=
// DecimalData.MAX_LONG_DIGITS
significand = significand * 10 + (b - '0');
significandLen++;
} else if (b == '.') {
if (pointPos >= 0) {
// More than one decimal point
return null;
}
pointPos = significandLen;
} else {
break;
}
}
if (pointPos < 0) {
pointPos = significandLen;
}
if (negative) {
significand = -significand;
}
// ======= end significand part =======
// ======= begin exponential part =======
if ((b == 'e' || b == 'E') && i < sizeInBytes) {
b = bytes[offset + i];
final boolean expNegative = b == '-';
if (expNegative || b == '+') {
i++;
if (i == sizeInBytes) {
return null;
}
}
int expDigits = 0;
// As `precision` <= 18, value absolute range is limited to 10^-18 ~ 10^18.
// The worst case is <18-digits>E-36
final int expStopValue = 40;
while (i < sizeInBytes) {
b = bytes[offset + i];
i++;
if (b >= '0' && b <= '9') {
// No need to worry about larger exponents,
// because they will produce overflow or underflow
if (expDigits < expStopValue) {
expDigits = expDigits * 10 + (b - '0');
}
} else {
break;
}
}
if (expNegative) {
expDigits = -expDigits;
}
exp += expDigits;
}
exp -= significandLen - pointPos;
// ======= end exponential part =======
// Check for invalid character at the end
while (i < sizeInBytes) {
b = bytes[offset + i];
i++;
// White spaces are allowed at the end
if (b != ' ' && b != '\n' && b != '\t') {
return null;
}
}
// Round exp to scale
int change = exp + scale;
if (significandLen + change > precision) {
// Overflow
return null;
}
if (change >= 0) {
significand *= DecimalDataUtils.power10(change);
} else {
int k = negative ? -5 : 5;
significand =
(significand + k * DecimalDataUtils.power10(-change - 1))
/ DecimalDataUtils.power10(-change);
}
return DecimalData.fromUnscaledLong(significand, precision, scale);
}
private static DecimalData toBigPrecisionDecimal(
BinaryStringData str, int precision, int scale) {
// As data in DecimalData is currently stored by BigDecimal if `precision` >
// DecimalData.MAX_LONG_DIGITS,
// and BigDecimal only supports String or char[] for its constructor,
// we can't directly extract the value from BinaryStringData.
//
// As BigDecimal(char[], int, int) is faster than BigDecimal(String, int, int),
// we extract char[] from the memory segment and pass it to the constructor of BigDecimal.
int sizeInBytes = str.getSizeInBytes();
int offset = str.getOffset();
MemorySegment[] segments = str.getSegments();
char[] chars = SegmentsUtil.allocateReuseChars(sizeInBytes);
int len;
if (segments.length == 1) {
len = StringUtf8Utils.decodeUTF8Strict(segments[0], offset, sizeInBytes, chars);
} else {
byte[] bytes = SegmentsUtil.allocateReuseBytes(sizeInBytes);
SegmentsUtil.copyToBytes(segments, offset, bytes, 0, sizeInBytes);
len = StringUtf8Utils.decodeUTF8Strict(bytes, 0, sizeInBytes, chars);
}
if (len < 0) {
return null;
} else {
// Trim white spaces
int start = 0, end = len;
for (int i = 0; i < len; i++) {
if (chars[i] != ' ' && chars[i] != '\n' && chars[i] != '\t') {
start = i;
break;
}
}
for (int i = len - 1; i >= 0; i--) {
if (chars[i] != ' ' && chars[i] != '\n' && chars[i] != '\t') {
end = i + 1;
break;
}
}
try {
BigDecimal bd = new BigDecimal(chars, start, end - start);
return DecimalData.fromBigDecimal(bd, precision, scale);
} catch (NumberFormatException nfe) {
return null;
}
}
}
/**
* Parses this BinaryStringData to Long.
*
* <p>Note that, in this method we accumulate the result in negative format, and convert it to
* positive format at the end, if this string is not started with '-'. This is because min value
* is bigger than max value in digits, e.g. Long.MAX_VALUE is '9223372036854775807' and
* Long.MIN_VALUE is '-9223372036854775808'.
*
* <p>This code is mostly copied from LazyLong.parseLong in Hive.
*
* @return Long value if the parsing was successful else null.
*/
public static Long toLong(BinaryStringData str) {
int sizeInBytes = str.getSizeInBytes();
byte[] tmpBytes = getTmpBytes(str, sizeInBytes);
if (sizeInBytes == 0) {
return null;
}
int i = 0;
byte b = tmpBytes[i];
final boolean negative = b == '-';
if (negative || b == '+') {
i++;
if (sizeInBytes == 1) {
return null;
}
}
long result = 0;
final byte separator = '.';
final int radix = 10;
final long stopValue = Long.MIN_VALUE / radix;
while (i < sizeInBytes) {
b = tmpBytes[i];
i++;
if (b == separator) {
// We allow decimals and will return a truncated integral in that case.
// Therefore we won't throw an exception here (checking the fractional
// part happens below.)
break;
}
int digit;
if (b >= '0' && b <= '9') {
digit = b - '0';
} else {
return null;
}
// We are going to process the new digit and accumulate the result. However, before
// doing this, if the result is already smaller than the
// stopValue(Long.MIN_VALUE / radix), then result * 10 will definitely be smaller
// than minValue, and we can stop.
if (result < stopValue) {
return null;
}
result = result * radix - digit;
// Since the previous result is less than or equal to
// stopValue(Long.MIN_VALUE / radix), we can just use `result > 0` to check overflow.
// If result overflows, we should stop.
if (result > 0) {
return null;
}
}
// This is the case when we've encountered a decimal separator. The fractional
// part will not change the number, but we will verify that the fractional part
// is well formed.
while (i < sizeInBytes) {
byte currentByte = tmpBytes[i];
if (currentByte < '0' || currentByte > '9') {
return null;
}
i++;
}
if (!negative) {
result = -result;
if (result < 0) {
return null;
}
}
return result;
}
/**
* Parses this BinaryStringData to Int.
*
* <p>Note that, in this method we accumulate the result in negative format, and convert it to
* positive format at the end, if this string is not started with '-'. This is because min value
* is bigger than max value in digits, e.g. Integer.MAX_VALUE is '2147483647' and
* Integer.MIN_VALUE is '-2147483648'.
*
* <p>This code is mostly copied from LazyInt.parseInt in Hive.
*
* <p>Note that, this method is almost same as `toLong`, but we leave it duplicated for
* performance reasons, like Hive does.
*
* @return Integer value if the parsing was successful else null.
*/
public static Integer toInt(BinaryStringData str) {
int sizeInBytes = str.getSizeInBytes();
byte[] tmpBytes = getTmpBytes(str, sizeInBytes);
if (sizeInBytes == 0) {
return null;
}
int i = 0;
byte b = tmpBytes[i];
final boolean negative = b == '-';
if (negative || b == '+') {
i++;
if (sizeInBytes == 1) {
return null;
}
}
int result = 0;
final byte separator = '.';
final int radix = 10;
final long stopValue = Integer.MIN_VALUE / radix;
while (i < sizeInBytes) {
b = tmpBytes[i];
i++;
if (b == separator) {
// We allow decimals and will return a truncated integral in that case.
// Therefore we won't throw an exception here (checking the fractional
// part happens below.)
break;
}
int digit;
if (b >= '0' && b <= '9') {
digit = b - '0';
} else {
return null;
}
// We are going to process the new digit and accumulate the result. However, before
// doing this, if the result is already smaller than the
// stopValue(Long.MIN_VALUE / radix), then result * 10 will definitely be smaller
// than minValue, and we can stop.
if (result < stopValue) {
return null;
}
result = result * radix - digit;
// Since the previous result is less than or equal to
// stopValue(Long.MIN_VALUE / radix), we can just use `result > 0` to check overflow.
// If result overflows, we should stop.
if (result > 0) {
return null;
}
}
// This is the case when we've encountered a decimal separator. The fractional
// part will not change the number, but we will verify that the fractional part
// is well formed.
while (i < sizeInBytes) {
byte currentByte = tmpBytes[i];
if (currentByte < '0' || currentByte > '9') {
return null;
}
i++;
}
if (!negative) {
result = -result;
if (result < 0) {
return null;
}
}
return result;
}
public static Short toShort(BinaryStringData str) {
Integer intValue = toInt(str);
if (intValue != null) {
short result = intValue.shortValue();
if (result == intValue) {
return result;
}
}
return null;
}
public static Byte toByte(BinaryStringData str) {
Integer intValue = toInt(str);
if (intValue != null) {
byte result = intValue.byteValue();
if (result == intValue) {
return result;
}
}
return null;
}
public static Double toDouble(BinaryStringData str) {
try {
return Double.valueOf(str.toString());
} catch (NumberFormatException e) {
return null;
}
}
public static Float toFloat(BinaryStringData str) {
try {
return Float.valueOf(str.toString());
} catch (NumberFormatException e) {
return null;
}
}
/**
* Parse target string as key-value string and return the value matches key name. If accept any
* null arguments, return null. example: keyvalue('k1=v1;k2=v2', ';', '=', 'k2') = 'v2'
* keyvalue('k1:v1,k2:v2', ',', ':', 'k3') = NULL
*
* @param split1 separator between key-value tuple.
* @param split2 separator between key and value.
* @param keyName name of the key whose value you want return.
* @return target value.
*/
public static BinaryStringData keyValue(
BinaryStringData str, byte split1, byte split2, BinaryStringData keyName) {
str.ensureMaterialized();
if (keyName == null || keyName.getSizeInBytes() == 0) {
return null;
}
if (str.inFirstSegment() && keyName.inFirstSegment()) {
// position in byte
int byteIdx = 0;
// position of last split1
int lastSplit1Idx = -1;
while (byteIdx < str.getSizeInBytes()) {
// If find next split1 in str, process current kv
if (str.getSegments()[0].get(str.getOffset() + byteIdx) == split1) {
int currentKeyIdx = lastSplit1Idx + 1;
// If key of current kv is keyName, return the value directly
BinaryStringData value =
findValueOfKey(str, split2, keyName, currentKeyIdx, byteIdx);
if (value != null) {
return value;
}
lastSplit1Idx = byteIdx;
}
byteIdx++;
}
// process the string which is not ends with split1
int currentKeyIdx = lastSplit1Idx + 1;
return findValueOfKey(str, split2, keyName, currentKeyIdx, str.getSizeInBytes());
} else {
return keyValueSlow(str, split1, split2, keyName);
}
}
private static BinaryStringData findValueOfKey(
BinaryStringData str, byte split, BinaryStringData keyName, int start, int end) {
int keyNameLen = keyName.getSizeInBytes();
for (int idx = start; idx < end; idx++) {
if (str.getSegments()[0].get(str.getOffset() + idx) == split) {
if (idx == start + keyNameLen
&& str.getSegments()[0].equalTo(
keyName.getSegments()[0],
str.getOffset() + start,
keyName.getOffset(),
keyNameLen)) {
int valueIdx = idx + 1;
int valueLen = end - valueIdx;
byte[] bytes = new byte[valueLen];
str.getSegments()[0].get(str.getOffset() + valueIdx, bytes, 0, valueLen);
return fromBytes(bytes, 0, valueLen);
} else {
return null;
}
}
}
return null;
}
private static BinaryStringData keyValueSlow(
BinaryStringData str, byte split1, byte split2, BinaryStringData keyName) {
// position in byte
int byteIdx = 0;
// position of last split1
int lastSplit1Idx = -1;
while (byteIdx < str.getSizeInBytes()) {
// If find next split1 in str, process current kv
if (str.byteAt(byteIdx) == split1) {
int currentKeyIdx = lastSplit1Idx + 1;
BinaryStringData value =
findValueOfKeySlow(str, split2, keyName, currentKeyIdx, byteIdx);
if (value != null) {
return value;
}
lastSplit1Idx = byteIdx;
}
byteIdx++;
}
int currentKeyIdx = lastSplit1Idx + 1;
return findValueOfKeySlow(str, split2, keyName, currentKeyIdx, str.getSizeInBytes());
}
private static BinaryStringData findValueOfKeySlow(
BinaryStringData str, byte split, BinaryStringData keyName, int start, int end) {
int keyNameLen = keyName.getSizeInBytes();
for (int idx = start; idx < end; idx++) {
if (str.byteAt(idx) == split) {
if (idx == start + keyNameLen
&& SegmentsUtil.equals(
str.getSegments(),
str.getOffset() + start,
keyName.getSegments(),
keyName.getOffset(),
keyNameLen)) {
int valueIdx = idx + 1;
byte[] bytes =
SegmentsUtil.copyToBytes(
str.getSegments(), str.getOffset() + valueIdx, end - valueIdx);
return fromBytes(bytes);
} else {
return null;
}
}
}
return null;
}
public static BinaryStringData substringSQL(BinaryStringData str, int pos) {
return substringSQL(str, pos, Integer.MAX_VALUE);
}
public static BinaryStringData substringSQL(BinaryStringData str, int pos, int length) {
if (length < 0) {
return null;
}
str.ensureMaterialized();
if (str.equals(EMPTY_UTF8)) {
return EMPTY_UTF8;
}
int start;
int end;
int numChars = str.numChars();
if (pos > 0) {
start = pos - 1;
if (start >= numChars) {
return EMPTY_UTF8;
}
} else if (pos < 0) {
start = numChars + pos;
if (start < 0) {
return EMPTY_UTF8;
}
} else {
start = 0;
}
if ((numChars - start) < length) {
end = numChars;
} else {
end = start + length;
}
return str.substring(start, end);
}
/**
* Concatenates input strings together into a single string. Returns NULL if any argument is
* NULL.
*/
public static BinaryStringData concat(BinaryStringData... inputs) {
return concat(Arrays.asList(inputs));
}
public static BinaryStringData concat(Iterable<BinaryStringData> inputs) {
// Compute the total length of the result.
int totalLength = 0;
for (BinaryStringData input : inputs) {
if (input == null) {
return null;
}
input.ensureMaterialized();
totalLength += input.getSizeInBytes();
}
// Allocate a new byte array, and copy the inputs one by one into it.
final byte[] result = new byte[totalLength];
int offset = 0;
for (BinaryStringData input : inputs) {
if (input != null) {
int len = input.getSizeInBytes();
SegmentsUtil.copyToBytes(
input.getSegments(), input.getOffset(), result, offset, len);
offset += len;
}
}
return fromBytes(result);
}
/**
* Concatenates input strings together into a single string using the separator. Returns NULL If
* the separator is NULL.
*
* <p>Note: CONCAT_WS() does not skip any empty strings, however it does skip any NULL values
* after the separator. For example, concat_ws(",", "a", null, "c") would yield "a,c".
*/
public static BinaryStringData concatWs(
BinaryStringData separator, BinaryStringData... inputs) {
return concatWs(separator, Arrays.asList(inputs));
}
public static BinaryStringData concatWs(
BinaryStringData separator, Iterable<BinaryStringData> inputs) {
if (null == separator) {
return null;
}
separator.ensureMaterialized();
int numInputBytes = 0; // total number of bytes from the inputs
int numInputs = 0; // number of non-null inputs
for (BinaryStringData input : inputs) {
if (input != null) {
input.ensureMaterialized();
numInputBytes += input.getSizeInBytes();
numInputs++;
}
}
if (numInputs == 0) {
// Return an empty string if there is no input, or all the inputs are null.
return EMPTY_UTF8;
}
// Allocate a new byte array, and copy the inputs one by one into it.
// The size of the new array is the size of all inputs, plus the separators.
final byte[] result =
new byte[numInputBytes + (numInputs - 1) * separator.getSizeInBytes()];
int offset = 0;
int j = 0;
for (BinaryStringData input : inputs) {
if (input != null) {
int len = input.getSizeInBytes();
SegmentsUtil.copyToBytes(
input.getSegments(), input.getOffset(), result, offset, len);
offset += len;
j++;
// Add separator if this is not the last input.
if (j < numInputs) {
SegmentsUtil.copyToBytes(
separator.getSegments(),
separator.getOffset(),
result,
offset,
separator.getSizeInBytes());
offset += separator.getSizeInBytes();
}
}
}
return fromBytes(result);
}
/**
* Reverse each character in current string.
*
* @return a new string which character order is reverse to current string.
*/
public static BinaryStringData reverse(BinaryStringData str) {
str.ensureMaterialized();
if (str.inFirstSegment()) {
byte[] result = new byte[str.getSizeInBytes()];
// position in byte
int byteIdx = 0;
while (byteIdx < str.getSizeInBytes()) {
int charBytes = numBytesForFirstByte(str.getByteOneSegment(byteIdx));
str.getSegments()[0].get(
str.getOffset() + byteIdx,
result,
result.length - byteIdx - charBytes,
charBytes);
byteIdx += charBytes;
}
return BinaryStringData.fromBytes(result);
} else {
return reverseMultiSegs(str);
}
}
private static BinaryStringData reverseMultiSegs(BinaryStringData str) {
byte[] result = new byte[str.getSizeInBytes()];
// position in byte
int byteIdx = 0;
int segSize = str.getSegments()[0].size();
BinaryStringData.SegmentAndOffset index = str.firstSegmentAndOffset(segSize);
while (byteIdx < str.getSizeInBytes()) {
int charBytes = numBytesForFirstByte(index.value());
SegmentsUtil.copyMultiSegmentsToBytes(
str.getSegments(),
str.getOffset() + byteIdx,
result,
result.length - byteIdx - charBytes,
charBytes);
byteIdx += charBytes;
index.skipBytes(charBytes, segSize);
}
return BinaryStringData.fromBytes(result);
}
/**
* Walk each character of current string from both ends, remove the character if it is in trim
* string. Return the new substring which both ends trim characters have been removed.
*
* @param trimStr the trim string
* @return A subString which both ends trim characters have been removed.
*/
public static BinaryStringData trim(BinaryStringData str, BinaryStringData trimStr) {
if (trimStr == null) {
return null;
}
return trimRight(trimLeft(str, trimStr), trimStr);
}
public static BinaryStringData trimLeft(BinaryStringData str) {
str.ensureMaterialized();
if (str.inFirstSegment()) {
int s = 0;
// skip all of the space (0x20) in the left side
while (s < str.getSizeInBytes() && str.getByteOneSegment(s) == 0x20) {
s++;
}
if (s == str.getSizeInBytes()) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryStringInOneSeg(s, str.getSizeInBytes() - s);
}
} else {
return trimLeftSlow(str);
}
}
private static BinaryStringData trimLeftSlow(BinaryStringData str) {
int s = 0;
int segSize = str.getSegments()[0].size();
BinaryStringData.SegmentAndOffset front = str.firstSegmentAndOffset(segSize);
// skip all of the space (0x20) in the left side
while (s < str.getSizeInBytes() && front.value() == 0x20) {
s++;
front.nextByte(segSize);
}
if (s == str.getSizeInBytes()) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryString(s, str.getSizeInBytes() - 1);
}
}
public static boolean isSpaceString(BinaryStringData str) {
if (str.javaObject != null) {
return str.javaObject.equals(" ");
} else {
return str.byteAt(0) == ' ';
}
}
/**
* Walk each character of current string from left end, remove the character if it is in trim
* string. Stops at the first character which is not in trim string. Return the new substring.
*
* @param trimStr the trim string
* @return A subString which removes all of the character from the left side that is in trim
* string.
*/
public static BinaryStringData trimLeft(BinaryStringData str, BinaryStringData trimStr) {
str.ensureMaterialized();
if (trimStr == null) {
return null;
}
trimStr.ensureMaterialized();
if (isSpaceString(trimStr)) {
return trimLeft(str);
}
if (str.inFirstSegment()) {
int searchIdx = 0;
while (searchIdx < str.getSizeInBytes()) {
int charBytes = numBytesForFirstByte(str.getByteOneSegment(searchIdx));
BinaryStringData currentChar = str.copyBinaryStringInOneSeg(searchIdx, charBytes);
// try to find the matching for the character in the trimString characters.
if (trimStr.contains(currentChar)) {
searchIdx += charBytes;
} else {
break;
}
}
// empty string
if (searchIdx >= str.getSizeInBytes()) {
return EMPTY_UTF8;
} else {
return str.copyBinaryStringInOneSeg(searchIdx, str.getSizeInBytes() - searchIdx);
}
} else {
return trimLeftSlow(str, trimStr);
}
}
private static BinaryStringData trimLeftSlow(BinaryStringData str, BinaryStringData trimStr) {
int searchIdx = 0;
int segSize = str.getSegments()[0].size();
BinaryStringData.SegmentAndOffset front = str.firstSegmentAndOffset(segSize);
while (searchIdx < str.getSizeInBytes()) {
int charBytes = numBytesForFirstByte(front.value());
BinaryStringData currentChar =
str.copyBinaryString(searchIdx, searchIdx + charBytes - 1);
if (trimStr.contains(currentChar)) {
searchIdx += charBytes;
front.skipBytes(charBytes, segSize);
} else {
break;
}
}
if (searchIdx == str.getSizeInBytes()) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryString(searchIdx, str.getSizeInBytes() - 1);
}
}
public static BinaryStringData trimRight(BinaryStringData str) {
str.ensureMaterialized();
if (str.inFirstSegment()) {
int e = str.getSizeInBytes() - 1;
// skip all of the space (0x20) in the right side
while (e >= 0 && str.getByteOneSegment(e) == 0x20) {
e--;
}
if (e < 0) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryStringInOneSeg(0, e + 1);
}
} else {
return trimRightSlow(str);
}
}
private static BinaryStringData trimRightSlow(BinaryStringData str) {
int e = str.getSizeInBytes() - 1;
int segSize = str.getSegments()[0].size();
BinaryStringData.SegmentAndOffset behind = str.lastSegmentAndOffset(segSize);
// skip all of the space (0x20) in the right side
while (e >= 0 && behind.value() == 0x20) {
e--;
behind.previousByte(segSize);
}
if (e < 0) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryString(0, e);
}
}
/**
* Walk each character of current string from right end, remove the character if it is in trim
* string. Stops at the first character which is not in trim string. Return the new substring.
*
* @param trimStr the trim string
* @return A subString which removes all of the character from the right side that is in trim
* string.
*/
public static BinaryStringData trimRight(BinaryStringData str, BinaryStringData trimStr) {
str.ensureMaterialized();
if (trimStr == null) {
return null;
}
trimStr.ensureMaterialized();
if (isSpaceString(trimStr)) {
return trimRight(str);
}
if (str.inFirstSegment()) {
int charIdx = 0;
int byteIdx = 0;
// each element in charLens is length of character in the source string
int[] charLens = new int[str.getSizeInBytes()];
// each element in charStartPos is start position of first byte in the source string
int[] charStartPos = new int[str.getSizeInBytes()];
while (byteIdx < str.getSizeInBytes()) {
charStartPos[charIdx] = byteIdx;
charLens[charIdx] = numBytesForFirstByte(str.getByteOneSegment(byteIdx));
byteIdx += charLens[charIdx];
charIdx++;
}
// searchIdx points to the first character which is not in trim string from the right
// end.
int searchIdx = str.getSizeInBytes() - 1;
charIdx -= 1;
while (charIdx >= 0) {
BinaryStringData currentChar =
str.copyBinaryStringInOneSeg(charStartPos[charIdx], charLens[charIdx]);
if (trimStr.contains(currentChar)) {
searchIdx -= charLens[charIdx];
} else {
break;
}
charIdx--;
}
if (searchIdx < 0) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryStringInOneSeg(0, searchIdx + 1);
}
} else {
return trimRightSlow(str, trimStr);
}
}
private static BinaryStringData trimRightSlow(BinaryStringData str, BinaryStringData trimStr) {
int charIdx = 0;
int byteIdx = 0;
int segSize = str.getSegments()[0].size();
BinaryStringData.SegmentAndOffset index = str.firstSegmentAndOffset(segSize);
// each element in charLens is length of character in the source string
int[] charLens = new int[str.getSizeInBytes()];
// each element in charStartPos is start position of first byte in the source string
int[] charStartPos = new int[str.getSizeInBytes()];
while (byteIdx < str.getSizeInBytes()) {
charStartPos[charIdx] = byteIdx;
int charBytes = numBytesForFirstByte(index.value());
charLens[charIdx] = charBytes;
byteIdx += charBytes;
charIdx++;
index.skipBytes(charBytes, segSize);
}
// searchIdx points to the first character which is not in trim string from the right
// end.
int searchIdx = str.getSizeInBytes() - 1;
charIdx -= 1;
while (charIdx >= 0) {
BinaryStringData currentChar =
str.copyBinaryString(
charStartPos[charIdx], charStartPos[charIdx] + charLens[charIdx] - 1);
if (trimStr.contains(currentChar)) {
searchIdx -= charLens[charIdx];
} else {
break;
}
charIdx--;
}
if (searchIdx < 0) {
// empty string
return EMPTY_UTF8;
} else {
return str.copyBinaryString(0, searchIdx);
}
}
public static BinaryStringData trim(
BinaryStringData str, boolean leading, boolean trailing, BinaryStringData seek) {
str.ensureMaterialized();
if (seek == null) {
return null;
}
if (leading && trailing) {
return trim(str, seek);
} else if (leading) {
return trimLeft(str, seek);
} else if (trailing) {
return trimRight(str, seek);
} else {
return str;
}
}
public static String safeToString(BinaryStringData str) {
if (str == null) {
return null;
} else {
return str.toString();
}
}
}
|
|
/**
* Copyright (C) 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja.cache;
import java.io.NotSerializableException;
import java.io.Serializable;
import java.util.Map;
import net.sf.ehcache.CacheException;
import ninja.utils.TimeUtil;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* A convenience class to access the underlaying cache implementation.
*
* Makes getting and setting of objects a lot simpler.
*
* This class originates from Play 1.2.5's excellent cache implementation.
*
* @author ra
*
*/
@Singleton
public class NinjaCache {
final Cache cache;
@Inject
public NinjaCache(Cache cache) {
this.cache = cache;
}
/**
* Add an element only if it doesn't exist.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
*/
public void add(String key, Object value, String expiration) {
checkSerializable(value);
cache.add(key, value, TimeUtil.parseDuration(expiration));
}
/**
* Add an element only if it doesn't exist, and return only when
* the element is effectively cached.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
* @return If the element an eventually been cached
*/
public boolean safeAdd(String key, Object value, String expiration) {
checkSerializable(value);
return cache.safeAdd(key, value, TimeUtil.parseDuration(expiration));
}
/**
* Add an element only if it doesn't exist and store it indefinitely.
* @param key Element key
* @param value Element value
*/
public void add(String key, Object value) {
checkSerializable(value);
cache.add(key, value, TimeUtil.parseDuration(null));
}
/**
* Set an element.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
*/
public void set(String key, Object value, String expiration) {
checkSerializable(value);
cache.set(key, value, TimeUtil.parseDuration(expiration));
}
/**
* Set an element and return only when the element is effectively cached.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
* @return If the element an eventually been cached
*/
public boolean safeSet(String key, Object value, String expiration) {
checkSerializable(value);
return cache.safeSet(key, value, TimeUtil.parseDuration(expiration));
}
/**
* Set an element and store it indefinitely.
* @param key Element key
* @param value Element value
*/
public void set(String key, Object value) {
checkSerializable(value);
cache.set(key, value, TimeUtil.parseDuration(null));
}
/**
* Replace an element only if it already exists.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
*/
public void replace(String key, Object value, String expiration) {
checkSerializable(value);
cache.replace(key, value, TimeUtil.parseDuration(expiration));
}
/**
* Replace an element only if it already exists and return only when the
* element is effectively cached.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
* @return If the element an eventually been cached
*/
public boolean safeReplace(String key, Object value, String expiration) {
checkSerializable(value);
return cache.safeReplace(key, value, TimeUtil.parseDuration(expiration));
}
/**
* Replace an element only if it already exists and store it indefinitely.
* @param key Element key
* @param value Element value
*/
public void replace(String key, Object value) {
checkSerializable(value);
cache.replace(key, value, TimeUtil.parseDuration(null));
}
/**
* Increment the element value (must be a Number).
* @param key Element key
* @param by The incr value
* @return The new value
*/
public long incr(String key, int by) {
return cache.incr(key, by);
}
/**
* Increment the element value (must be a Number) by 1.
* @param key Element key
* @return The new value
*/
public long incr(String key) {
return cache.incr(key, 1);
}
/**
* Decrement the element value (must be a Number).
* @param key Element key
* @param by The decr value
* @return The new value
*/
public long decr(String key, int by) {
return cache.decr(key, by);
}
/**
* Decrement the element value (must be a Number) by 1.
* @param key Element key
* @return The new value
*/
public long decr(String key) {
return cache.decr(key, 1);
}
/**
* Retrieve an object.
* @param key The element key
* @return The element value or null
*/
public Object get(String key) {
return cache.get(key);
}
/**
* Bulk retrieve.
* @param key List of keys
* @return Map of keys & values
*/
public Map<String, Object> get(String... key) {
return cache.get(key);
}
/**
* Delete an element from the cache.
* @param key The element key
*/
public void delete(String key) {
cache.delete(key);
}
/**
* Delete an element from the cache and return only when the
* element is effectively removed.
* @param key The element key
* @return If the element an eventually been deleted
*/
public boolean safeDelete(String key) {
return cache.safeDelete(key);
}
/**
* Clear all data from cache.
*/
public void clear() {
cache.clear();
}
/**
* Convenient clazz to get a value a class type;
* @param <T> The needed type
* @param key The element key
* @param clazz The type class
* @return The element value or null
*/
@SuppressWarnings("unchecked")
public <T> T get(String key, Class<T> clazz) {
return (T) cache.get(key);
}
/**
* Utility that check that an object is serializable.
*/
void checkSerializable(Object value) {
if (value != null && !(value instanceof Serializable)) {
throw new CacheException(
"Cannot cache a non-serializable value of type "
+ value.getClass().getName(),
new NotSerializableException(value.getClass().getName()));
}
}
}
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.cesarvaliente.tldrlegal;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.support.v17.leanback.app.BackgroundManager;
import android.support.v17.leanback.app.BrowseFragment;
import android.support.v17.leanback.widget.ArrayObjectAdapter;
import android.support.v17.leanback.widget.HeaderItem;
import android.support.v17.leanback.widget.ImageCardView;
import android.support.v17.leanback.widget.ListRow;
import android.support.v17.leanback.widget.ListRowPresenter;
import android.support.v17.leanback.widget.OnItemViewClickedListener;
import android.support.v17.leanback.widget.OnItemViewSelectedListener;
import android.support.v17.leanback.widget.Presenter;
import android.support.v17.leanback.widget.Row;
import android.support.v17.leanback.widget.RowPresenter;
import android.support.v4.app.ActivityOptionsCompat;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
public class MainFragment extends BrowseFragment {
private static final String TAG = "MainFragment";
private static final int BACKGROUND_UPDATE_DELAY = 300;
private static final int GRID_ITEM_WIDTH = 200;
private static final int GRID_ITEM_HEIGHT = 200;
private static final int NUM_ROWS = 6;
private static final int NUM_COLS = 15;
private ArrayObjectAdapter mRowsAdapter;
private Drawable mDefaultBackground;
private Target mBackgroundTarget;
private DisplayMetrics mMetrics;
private Timer mBackgroundTimer;
private final Handler mHandler = new Handler();
private URI mBackgroundURI;
Movie mMovie;
CardPresenter mCardPresenter;
@Override
public void onActivityCreated(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onActivityCreated(savedInstanceState);
prepareBackgroundManager();
setupUIElements();
loadRows();
setupEventListeners();
}
@Override
public void onDestroy() {
super.onDestroy();
if (null != mBackgroundTimer) {
Log.d(TAG, "onDestroy: " + mBackgroundTimer.toString());
mBackgroundTimer.cancel();
}
}
private void loadRows() {
List<Movie> list = MovieList.setupMovies();
mRowsAdapter = new ArrayObjectAdapter(new ListRowPresenter());
mCardPresenter = new CardPresenter();
int i;
for (i = 0; i < NUM_ROWS; i++) {
if (i != 0) {
Collections.shuffle(list);
}
ArrayObjectAdapter listRowAdapter = new ArrayObjectAdapter(mCardPresenter);
for (int j = 0; j < NUM_COLS; j++) {
listRowAdapter.add(list.get(j % 5));
}
HeaderItem header = new HeaderItem(i, MovieList.MOVIE_CATEGORY[i]);
mRowsAdapter.add(new ListRow(header, listRowAdapter));
}
HeaderItem gridHeader = new HeaderItem(i, "PREFERENCES");
GridItemPresenter mGridPresenter = new GridItemPresenter();
ArrayObjectAdapter gridRowAdapter = new ArrayObjectAdapter(mGridPresenter);
gridRowAdapter.add(getResources().getString(R.string.grid_view));
gridRowAdapter.add(getString(R.string.error_fragment));
gridRowAdapter.add(getResources().getString(R.string.personal_settings));
mRowsAdapter.add(new ListRow(gridHeader, gridRowAdapter));
setAdapter(mRowsAdapter);
}
private void prepareBackgroundManager() {
BackgroundManager backgroundManager = BackgroundManager.getInstance(getActivity());
backgroundManager.attach(getActivity().getWindow());
mBackgroundTarget = new PicassoBackgroundManagerTarget(backgroundManager);
mDefaultBackground = getResources().getDrawable(R.drawable.default_background);
mMetrics = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(mMetrics);
}
private void setupUIElements() {
// setBadgeDrawable(getActivity().getResources().getDrawable(
// R.drawable.videos_by_google_banner));
setTitle(getString(R.string.browse_title)); // Badge, when set, takes precedent
// over title
setHeadersState(HEADERS_ENABLED);
setHeadersTransitionOnBackEnabled(true);
// set fastLane (or headers) background color
setBrandColor(getResources().getColor(R.color.fastlane_background));
// set search icon color
setSearchAffordanceColor(getResources().getColor(R.color.search_opaque));
}
private void setupEventListeners() {
setOnSearchClickedListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Toast.makeText(getActivity(), "Implement your own in-app search", Toast.LENGTH_LONG)
.show();
}
});
setOnItemViewClickedListener(new ItemViewClickedListener());
setOnItemViewSelectedListener(new ItemViewSelectedListener());
}
private final class ItemViewClickedListener implements OnItemViewClickedListener {
@Override
public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item,
RowPresenter.ViewHolder rowViewHolder, Row row) {
if (item instanceof Movie) {
Movie movie = (Movie) item;
Log.d(TAG, "Item: " + item.toString());
Intent intent = new Intent(getActivity(), DetailsActivity.class);
intent.putExtra(DetailsActivity.MOVIE, movie);
Bundle bundle = ActivityOptionsCompat.makeSceneTransitionAnimation(
getActivity(),
((ImageCardView) itemViewHolder.view).getMainImageView(),
DetailsActivity.SHARED_ELEMENT_NAME).toBundle();
getActivity().startActivity(intent, bundle);
} else if (item instanceof String) {
if (((String) item).indexOf(getString(R.string.error_fragment)) >= 0) {
Intent intent = new Intent(getActivity(), BrowseErrorActivity.class);
startActivity(intent);
} else {
Toast.makeText(getActivity(), ((String) item), Toast.LENGTH_SHORT)
.show();
}
}
}
}
private final class ItemViewSelectedListener implements OnItemViewSelectedListener {
@Override
public void onItemSelected(Presenter.ViewHolder itemViewHolder, Object item,
RowPresenter.ViewHolder rowViewHolder, Row row) {
if (item instanceof Movie) {
mBackgroundURI = ((Movie) item).getBackgroundImageURI();
startBackgroundTimer();
}
}
}
protected void setDefaultBackground(Drawable background) {
mDefaultBackground = background;
}
protected void setDefaultBackground(int resourceId) {
mDefaultBackground = getResources().getDrawable(resourceId);
}
protected void updateBackground(URI uri) {
Picasso.with(getActivity())
.load(uri.toString())
.resize(mMetrics.widthPixels, mMetrics.heightPixels)
.centerCrop()
.error(mDefaultBackground)
.into(mBackgroundTarget);
}
protected void updateBackground(Drawable drawable) {
BackgroundManager.getInstance(getActivity()).setDrawable(drawable);
}
protected void clearBackground() {
BackgroundManager.getInstance(getActivity()).setDrawable(mDefaultBackground);
}
private void startBackgroundTimer() {
if (null != mBackgroundTimer) {
mBackgroundTimer.cancel();
}
mBackgroundTimer = new Timer();
mBackgroundTimer.schedule(new UpdateBackgroundTask(), BACKGROUND_UPDATE_DELAY);
}
private class UpdateBackgroundTask extends TimerTask {
@Override
public void run() {
mHandler.post(new Runnable() {
@Override
public void run() {
if (mBackgroundURI != null) {
updateBackground(mBackgroundURI);
}
}
});
}
}
private class GridItemPresenter extends Presenter {
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent) {
TextView view = new TextView(parent.getContext());
view.setLayoutParams(new ViewGroup.LayoutParams(GRID_ITEM_WIDTH, GRID_ITEM_HEIGHT));
view.setFocusable(true);
view.setFocusableInTouchMode(true);
view.setBackgroundColor(getResources().getColor(R.color.default_background));
view.setTextColor(Color.WHITE);
view.setGravity(Gravity.CENTER);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(ViewHolder viewHolder, Object item) {
((TextView) viewHolder.view).setText((String) item);
}
@Override
public void onUnbindViewHolder(ViewHolder viewHolder) {
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util.future;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.AbstractQueuedSynchronizer;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.IgniteFutureCancelledCheckedException;
import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.lang.IgniteInClosure;
import org.jetbrains.annotations.Nullable;
/**
* Future adapter.
*/
public class GridFutureAdapter<R> extends AbstractQueuedSynchronizer implements IgniteInternalFuture<R> {
/** */
private static final long serialVersionUID = 0L;
/** Initial state. */
private static final int INIT = 0;
/** Cancelled state. */
private static final int CANCELLED = 1;
/** Done state. */
private static final int DONE = 2;
/** */
private static final byte ERR = 1;
/** */
private static final byte RES = 2;
/** */
private byte resFlag;
/** Result. */
@GridToStringInclude
private Object res;
/** Future start time. */
private final long startTime = U.currentTimeMillis();
/** Future end time. */
private volatile long endTime;
/** */
private boolean ignoreInterrupts;
/** */
@GridToStringExclude
private IgniteInClosure<? super IgniteInternalFuture<R>> lsnr;
/** {@inheritDoc} */
@Override public long startTime() {
return startTime;
}
/** {@inheritDoc} */
@Override public long duration() {
long endTime = this.endTime;
return endTime == 0 ? U.currentTimeMillis() - startTime : endTime - startTime;
}
/**
* @param ignoreInterrupts Ignore interrupts flag.
*/
public void ignoreInterrupts(boolean ignoreInterrupts) {
this.ignoreInterrupts = ignoreInterrupts;
}
/**
* @return Future end time.
*/
public long endTime() {
return endTime;
}
/** {@inheritDoc} */
@Override public Throwable error() {
return (resFlag == ERR) ? (Throwable)res : null;
}
/** {@inheritDoc} */
@Override public R result() {
return resFlag == RES ? (R)res : null;
}
/** {@inheritDoc} */
@Override public R get() throws IgniteCheckedException {
return get0(ignoreInterrupts);
}
/** {@inheritDoc} */
@Override public R getUninterruptibly() throws IgniteCheckedException {
return get0(true);
}
/** {@inheritDoc} */
@Override public R get(long timeout) throws IgniteCheckedException {
// Do not replace with static import, as it may not compile.
return get(timeout, TimeUnit.MILLISECONDS);
}
/** {@inheritDoc} */
@Override public R get(long timeout, TimeUnit unit) throws IgniteCheckedException {
A.ensure(timeout >= 0, "timeout cannot be negative: " + timeout);
A.notNull(unit, "unit");
try {
return get0(unit.toNanos(timeout));
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IgniteInterruptedCheckedException("Got interrupted while waiting for future to complete.", e);
}
}
/**
* Internal get routine.
*
* @param ignoreInterrupts Whether to ignore interrupts.
* @return Result.
* @throws IgniteCheckedException If failed.
*/
private R get0(boolean ignoreInterrupts) throws IgniteCheckedException {
try {
if (endTime == 0) {
if (ignoreInterrupts)
acquireShared(0);
else
acquireSharedInterruptibly(0);
}
if (getState() == CANCELLED)
throw new IgniteFutureCancelledCheckedException("Future was cancelled: " + this);
assert resFlag != 0;
if (resFlag == ERR)
throw U.cast((Throwable)res);
return (R)res;
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IgniteInterruptedCheckedException(e);
}
}
/**
* @param nanosTimeout Timeout (nanoseconds).
* @return Result.
* @throws InterruptedException If interrupted.
* @throws IgniteFutureTimeoutCheckedException If timeout reached before computation completed.
* @throws IgniteCheckedException If error occurred.
*/
@Nullable protected R get0(long nanosTimeout) throws InterruptedException, IgniteCheckedException {
if (endTime == 0 && !tryAcquireSharedNanos(0, nanosTimeout))
throw new IgniteFutureTimeoutCheckedException("Timeout was reached before computation completed.");
if (getState() == CANCELLED)
throw new IgniteFutureCancelledCheckedException("Future was cancelled: " + this);
assert resFlag != 0;
if (resFlag == ERR)
throw U.cast((Throwable)res);
return (R)res;
}
/** {@inheritDoc} */
@Override public void listen(IgniteInClosure<? super IgniteInternalFuture<R>> lsnr0) {
assert lsnr0 != null;
boolean done = isDone();
if (!done) {
synchronized (this) {
done = isDone(); // Double check.
if (!done) {
if (lsnr == null)
lsnr = lsnr0;
else if (lsnr instanceof ArrayListener)
((ArrayListener)lsnr).add(lsnr0);
else
lsnr = (IgniteInClosure)new ArrayListener<IgniteInternalFuture>(lsnr, lsnr0);
return;
}
}
}
assert done;
notifyListener(lsnr0);
}
/** {@inheritDoc} */
@Override public <T> IgniteInternalFuture<T> chain(final IgniteClosure<? super IgniteInternalFuture<R>, T> doneCb) {
return new ChainFuture<>(this, doneCb);
}
/**
* Notifies all registered listeners.
*/
private void notifyListeners() {
IgniteInClosure<? super IgniteInternalFuture<R>> lsnr0;
synchronized (this) {
lsnr0 = lsnr;
if (lsnr0 == null)
return;
lsnr = null;
}
assert lsnr0 != null;
notifyListener(lsnr0);
}
/**
* Notifies single listener.
*
* @param lsnr Listener.
*/
private void notifyListener(IgniteInClosure<? super IgniteInternalFuture<R>> lsnr) {
assert lsnr != null;
try {
lsnr.apply(this);
}
catch (IllegalStateException e) {
U.error(logger(), "Failed to notify listener (is grid stopped?) [fut=" + this +
", lsnr=" + lsnr + ", err=" + e.getMessage() + ']', e);
}
catch (RuntimeException | Error e) {
U.error(logger(), "Failed to notify listener: " + lsnr, e);
throw e;
}
}
/**
* Default no-op implementation that always returns {@code false}.
* Futures that do support cancellation should override this method
* and call {@link #onCancelled()} callback explicitly if cancellation
* indeed did happen.
*/
@Override public boolean cancel() throws IgniteCheckedException {
return false;
}
/** {@inheritDoc} */
@Override public boolean isDone() {
// Don't check for "valid" here, as "done" flag can be read
// even in invalid state.
return endTime != 0;
}
/**
* @return Checks is future is completed with exception.
*/
public boolean isFailed() {
// Must read endTime first.
return endTime != 0 && resFlag == ERR;
}
/** {@inheritDoc} */
@Override public boolean isCancelled() {
return getState() == CANCELLED;
}
/**
* Callback to notify that future is finished with {@code null} result.
* This method must delegate to {@link #onDone(Object, Throwable)} method.
*
* @return {@code True} if result was set by this call.
*/
public final boolean onDone() {
return onDone(null, null);
}
/**
* Callback to notify that future is finished.
* This method must delegate to {@link #onDone(Object, Throwable)} method.
*
* @param res Result.
* @return {@code True} if result was set by this call.
*/
public final boolean onDone(@Nullable R res) {
return onDone(res, null);
}
/**
* Callback to notify that future is finished.
* This method must delegate to {@link #onDone(Object, Throwable)} method.
*
* @param err Error.
* @return {@code True} if result was set by this call.
*/
public final boolean onDone(@Nullable Throwable err) {
return onDone(null, err);
}
/**
* Callback to notify that future is finished. Note that if non-{@code null} exception is passed in
* the result value will be ignored.
*
* @param res Optional result.
* @param err Optional error.
* @return {@code True} if result was set by this call.
*/
public boolean onDone(@Nullable R res, @Nullable Throwable err) {
return onDone(res, err, false);
}
/**
* @param res Result.
* @param err Error.
* @param cancel {@code True} if future is being cancelled.
* @return {@code True} if result was set by this call.
*/
private boolean onDone(@Nullable R res, @Nullable Throwable err, boolean cancel) {
boolean notify = false;
try {
if (compareAndSetState(INIT, cancel ? CANCELLED : DONE)) {
if (err != null) {
resFlag = ERR;
this.res = err;
}
else {
resFlag = RES;
this.res = res;
}
notify = true;
releaseShared(0);
return true;
}
return false;
}
finally {
if (notify)
notifyListeners();
}
}
/**
* Callback to notify that future is cancelled.
*
* @return {@code True} if cancel flag was set by this call.
*/
public boolean onCancelled() {
return onDone(null, null, true);
}
/** {@inheritDoc} */
@Override protected final int tryAcquireShared(int ignore) {
return endTime != 0 ? 1 : -1;
}
/** {@inheritDoc} */
@Override protected final boolean tryReleaseShared(int ignore) {
endTime = U.currentTimeMillis();
// Always signal after setting final done status.
return true;
}
/**
* @return String representation of state.
*/
private String state() {
int s = getState();
return s == INIT ? "INIT" : s == CANCELLED ? "CANCELLED" : "DONE";
}
/**
* @return Logger instance.
*/
@Nullable public IgniteLogger logger() {
return null;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridFutureAdapter.class, this, "state", state());
}
/**
*
*/
private static class ArrayListener<R> implements IgniteInClosure<IgniteInternalFuture<R>> {
/** */
private static final long serialVersionUID = 0L;
/** */
private IgniteInClosure<? super IgniteInternalFuture<R>>[] arr;
/**
* @param lsnrs Listeners.
*/
private ArrayListener(IgniteInClosure... lsnrs) {
this.arr = lsnrs;
}
/** {@inheritDoc} */
@Override public void apply(IgniteInternalFuture<R> fut) {
for (int i = 0; i < arr.length; i++)
arr[i].apply(fut);
}
/**
* @param lsnr Listener.
*/
void add(IgniteInClosure<? super IgniteInternalFuture<R>> lsnr) {
arr = Arrays.copyOf(arr, arr.length + 1);
arr[arr.length - 1] = lsnr;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(ArrayListener.class, this, "arrSize", arr.length);
}
}
/**
*
*/
private static class ChainFuture<R, T> extends GridFutureAdapter<T> {
/** */
private static final long serialVersionUID = 0L;
/** */
private GridFutureAdapter<R> fut;
/** */
private IgniteClosure<? super IgniteInternalFuture<R>, T> doneCb;
/**
*
*/
public ChainFuture() {
// No-op.
}
/**
* @param fut Future.
* @param doneCb Closure.
*/
ChainFuture(
GridFutureAdapter<R> fut,
IgniteClosure<? super IgniteInternalFuture<R>, T> doneCb
) {
this.fut = fut;
this.doneCb = doneCb;
fut.listen(new GridFutureChainListener<>(this, doneCb));
}
/** {@inheritDoc} */
@Override public String toString() {
return "ChainFuture [orig=" + fut + ", doneCb=" + doneCb + ']';
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.user.query;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import javax.jcr.NamespaceRegistry;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Value;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.QueryBuilder;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.QueryUtils;
import org.apache.jackrabbit.oak.namepath.impl.LocalNameMapper;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.namepath.impl.NamePathMapperImpl;
import org.apache.jackrabbit.oak.plugins.value.jcr.PartialValueFactory;
import org.apache.jackrabbit.oak.spi.query.QueryConstants;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.user.AuthorizableType;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class QueryUtilTest {
private final PartialValueFactory valueFactory = new PartialValueFactory(NamePathMapper.DEFAULT);
private static void assertSearchRoot(@NotNull Map<AuthorizableType, String> mapping, @NotNull ConfigurationParameters params) {
mapping.forEach((key, s) -> {
String expected = (PathUtils.denotesRoot(s)) ? QueryConstants.SEARCH_ROOT_PATH : QueryConstants.SEARCH_ROOT_PATH + s;
assertEquals(expected, QueryUtil.getSearchRoot(key, params));
});
}
@Test
public void testGetSearchRootDefault() {
Map<AuthorizableType, String> defaultPaths = ImmutableMap.of(
AuthorizableType.USER, UserConstants.DEFAULT_USER_PATH,
AuthorizableType.GROUP, UserConstants.DEFAULT_GROUP_PATH,
AuthorizableType.AUTHORIZABLE, "/rep:security/rep:authorizables");
assertSearchRoot(defaultPaths, ConfigurationParameters.EMPTY);
}
@Test
public void testGetSearchRootSingleConfiguredPath() {
String path = "/configured/user_and_group/path";
for (AuthorizableType type : AuthorizableType.values()) {
assertEquals(QueryConstants.SEARCH_ROOT_PATH + path, QueryUtil.getSearchRoot(type, ConfigurationParameters.of(UserConstants.PARAM_USER_PATH, path, UserConstants.PARAM_GROUP_PATH, path)));
}
}
@Test
public void testGetSearchRootUserPathParentOfGroup() {
ConfigurationParameters params = ConfigurationParameters.of(
UserConstants.PARAM_USER_PATH, "/configured/users",
UserConstants.PARAM_GROUP_PATH, "/configured/users/groups");
Map<AuthorizableType, String> paths = ImmutableMap.of(
AuthorizableType.USER, "/configured/users",
AuthorizableType.GROUP, "/configured/users/groups",
AuthorizableType.AUTHORIZABLE, "/configured/users");
assertSearchRoot(paths, params);
}
@Test
public void testGetSearchRootGroupPathParentOfUser() {
ConfigurationParameters params = ConfigurationParameters.of(
UserConstants.PARAM_USER_PATH, "/configured/groups/users",
UserConstants.PARAM_GROUP_PATH, "/configured/groups");
Map<AuthorizableType, String> paths = ImmutableMap.of(
AuthorizableType.USER, "/configured/groups/users",
AuthorizableType.GROUP, "/configured/groups",
AuthorizableType.AUTHORIZABLE, "/configured/groups");
assertSearchRoot(paths, params);
}
@Test
public void testGetSearchRootNoCommonAncestor() {
ConfigurationParameters params = ConfigurationParameters.of(
UserConstants.PARAM_USER_PATH, "/users",
UserConstants.PARAM_GROUP_PATH, "/groups");
Map<AuthorizableType, String> paths = ImmutableMap.of(
AuthorizableType.USER, "/users",
AuthorizableType.GROUP, "/groups",
AuthorizableType.AUTHORIZABLE, "/");
assertSearchRoot(paths, params);
}
@Test
public void testGetSearchRootConfiguredPathDenotesRoot() {
ConfigurationParameters params = ConfigurationParameters.of(
UserConstants.PARAM_USER_PATH, "/users",
UserConstants.PARAM_GROUP_PATH, "/");
Map<AuthorizableType, String> paths = ImmutableMap.of(
AuthorizableType.USER, "/users",
AuthorizableType.GROUP, "/",
AuthorizableType.AUTHORIZABLE, "/");
assertSearchRoot(paths, params);
}
@Test
public void testGetSearchRoot() {
ConfigurationParameters params = ConfigurationParameters.of(
UserConstants.PARAM_USER_PATH, "/configured/user/path",
UserConstants.PARAM_GROUP_PATH, "/configured/group/path");
Map<AuthorizableType, String> paths = ImmutableMap.of(
AuthorizableType.USER, "/configured/user/path",
AuthorizableType.GROUP, "/configured/group/path",
AuthorizableType.AUTHORIZABLE, "/configured");
assertSearchRoot(paths, params);
}
@Test
public void testNodeTypeName() {
Map<AuthorizableType, String> ntNames = ImmutableMap.of(
AuthorizableType.USER, UserConstants.NT_REP_USER,
AuthorizableType.GROUP, UserConstants.NT_REP_GROUP,
AuthorizableType.AUTHORIZABLE, UserConstants.NT_REP_AUTHORIZABLE);
ntNames.forEach((key, value) -> assertEquals(value, QueryUtil.getNodeTypeName(key)));
}
@Test
public void testEscapeNodeName() {
List<String> names = ImmutableList.of("name", JcrConstants.JCR_CREATED, "%name", "a%name", "name%");
for (String name : names) {
assertEquals(QueryUtils.escapeNodeName(name), QueryUtil.escapeNodeName(name));
}
}
@Test
public void testFormatString() throws Exception {
String value = "'string\\value";
assertEquals("'"+QueryUtils.escapeForQuery(value)+"'", QueryUtil.format(valueFactory.createValue(value)));
}
@Test
public void testFormatBoolean() throws Exception {
assertEquals("'"+Boolean.TRUE.toString()+"'", QueryUtil.format(valueFactory.createValue(true)));
}
@Test
public void testFormatLong() throws Exception {
Value longV = valueFactory.createValue(Long.MAX_VALUE);
assertEquals(String.valueOf(Long.MAX_VALUE), QueryUtil.format(longV));
}
@Test
public void testFormatDouble() throws Exception {
Value doubleV = valueFactory.createValue(2.3);
assertEquals(String.valueOf(2.3), QueryUtil.format(doubleV));
}
@Test
public void testFormatDate() throws Exception {
Value dateV = valueFactory.createValue(Calendar.getInstance());
String dateString = dateV.getString();
assertEquals("xs:dateTime('" + dateString + "')", QueryUtil.format(dateV));
}
@Test(expected = RepositoryException.class)
public void testFormatOtherTypes() throws Exception {
Value nameValue = valueFactory.createValue(JcrConstants.JCR_CREATED, PropertyType.NAME);
QueryUtil.format(nameValue);
}
@Test
public void testEscapeForQuery() {
NamePathMapper namePathMapper = new NamePathMapperImpl(new LocalNameMapper(
ImmutableMap.of(NamespaceRegistry.PREFIX_JCR, NamespaceRegistry.NAMESPACE_JCR),
ImmutableMap.of("myPrefix", NamespaceRegistry.NAMESPACE_JCR)));
String value = "'string\\value";
assertEquals(QueryUtils.escapeForQuery("myPrefix:"+value), QueryUtil.escapeForQuery("jcr:"+value, namePathMapper));
}
@Test
public void testGetCollation() {
assertSame(RelationOp.LT, QueryUtil.getCollation(QueryBuilder.Direction.DESCENDING));
assertSame(RelationOp.GT, QueryUtil.getCollation(QueryBuilder.Direction.ASCENDING));
}
@Test
public void testGetIDNullAuthorizable() {
assertNull(QueryUtil.getID(null));
}
@Test
public void testGetID() throws RepositoryException {
Authorizable a = when(mock(Authorizable.class).getID()).thenReturn("id").getMock();
assertEquals("id", QueryUtil.getID(a));
}
@Test
public void testGetIDThrowing() throws RepositoryException {
Authorizable a = when(mock(Authorizable.class).getID()).thenThrow(new RepositoryException()).getMock();
assertNull(QueryUtil.getID(a));
}
}
|
|
package org.dashbuilder.common.client.validation.editors;
import com.github.gwtbootstrap.client.ui.*;
import com.github.gwtbootstrap.client.ui.Image;
import com.github.gwtbootstrap.client.ui.base.IconAnchor;
import com.github.gwtbootstrap.client.ui.constants.ButtonType;
import com.google.gwt.core.client.GWT;
import com.google.gwt.editor.client.EditorError;
import com.google.gwt.editor.client.HasEditorErrors;
import com.google.gwt.editor.client.IsEditor;
import com.google.gwt.editor.client.adapters.TakesValueEditor;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiConstructor;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.*;
import java.util.*;
/**
* <p>Editor component that accepts multiple values and display each one using a given button in a dropdown button grup.</p>
* <p>The validation error messages are displayed by changing border color to RED and showing the message using a tooltip.</p>
* <p>This component is ideal for handling enums.</p>
*
* @param <T> The type of the value that contains the editor widget
*/
public class DropDownImageListEditor<T> extends Composite implements
HasConstrainedValue<T>, HasEditorErrors<T>, IsEditor<TakesValueEditor<T>> {
interface Binder extends UiBinder<Widget, DropDownImageListEditor> {
Binder BINDER = GWT.create(Binder.class);
}
interface DropDownImageListEditorStyle extends CssResource {
String errorPanel();
String errorPanelError();
String imagePointer();
}
private final List<T> values = new ArrayList<T>();
private TakesValueEditor<T> editor;
private final Map<T ,Image> images = new LinkedHashMap<T, Image>();
private T value;
private boolean isEditMode;
private int width = -1;
private int height = -1;
protected boolean fireEvents = false;
@UiField
DropDownImageListEditorStyle style;
@UiField
HTMLPanel errorPanel;
@UiField
Image currentTypeImage;
@UiField(provided = true)
DropdownButton dropDownButton;
@UiField
Tooltip errorTooltip;
private IconAnchor trigger;
@UiConstructor
public DropDownImageListEditor() {
// Create and configure the dropdown button.
dropDownButton = new DropdownButton() {
@Override
protected IconAnchor createTrigger() {
DropDownImageListEditor.this.trigger = super.createTrigger();
return DropDownImageListEditor.this.trigger;
}
};
dropDownButton.setType(ButtonType.LINK);
// UI binding.
initWidget(Binder.BINDER.createAndBindUi(this));
}
public HandlerRegistration addValueChangeHandler(final ValueChangeHandler<T> handler) {
return addHandler(handler, ValueChangeEvent.getType());
}
/**
* Returns a {@link com.google.gwt.editor.client.adapters.TakesValueEditor} backed by the ValueListBox.
*/
public TakesValueEditor<T> asEditor() {
if (editor == null) {
editor = TakesValueEditor.of(this);
}
return editor;
}
public T getValue() {
return value;
}
public void setAcceptableValues(Map<T, Image> newValues) {
values.clear();
images.clear();
if (newValues != null && !newValues.isEmpty()) {
for (Map.Entry<T, Image> entry : newValues.entrySet()) {
final T _value = entry.getKey();
final Image _image = entry.getValue();
if (width > 0 && height > 0) _image.setSize(width+"px", height+"px");
_image.addStyleName(style.imagePointer());
_image.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
if (isEditMode) setValue(_value, fireEvents);
}
});
values.add(_value);
images.put(_value, _image);
}
}
// Configure drop down button trigger.
if (values.size() == 1) {
trigger.setActive(false);
trigger.setCaret(false);
}
}
private void buildUIDropDown() {
dropDownButton.clear();
dropDownButton.addCustomTrigger(currentTypeImage);
dropDownButton.getMenuWiget().setVisible(false);
if (images!= null && images.size() > 1) {
dropDownButton.getMenuWiget().setVisible(true);
for (Map.Entry<T, Image> entry : images.entrySet()) {
if (this.value != null && !this.value.equals(entry.getKey())) dropDownButton.add(entry.getValue());
}
}
}
public void setAcceptableValues(final Collection<T> newValues) {
values.clear();
if (newValues != null) {
for (T nextNewValue : newValues) {
values.add(nextNewValue);
}
}
}
/**
* Set the value and display it in the select element. Add the value to the
* acceptable set if it is not already there.
*/
public void setValue(final T value) {
setValue(value, false);
}
public void setValue(final T value, final boolean fireEvents) {
// Disable current error markers, if present.
disableError();
if (value == this.value || (this.value != null && this.value.equals(value))) {
return;
}
T before = this.value;
this.value = value;
for (T entry : values) {
final Image image = images.get(entry);
// if (entry.equals(value)) dropDownButton.setText(value.toString());
if (entry.equals(value)) {
currentTypeImage.setUrl(image.getUrl());
currentTypeImage.setSize("16px", "16px");
}
}
// Build the drop down button.
buildUIDropDown();
if (fireEvents) {
ValueChangeEvent.fireIfNotEqual(this, before, value);
}
}
@Override
public void showErrors(List<EditorError> errors) {
boolean hasErrors = errors != null && !errors.isEmpty();
String toolTipText = null;
if (hasErrors) {
StringBuilder sb = new StringBuilder();
for (EditorError error : errors) {
sb.append("\n").append(error.getMessage());
}
if (sb.length() > 0) toolTipText = sb.substring(1);
}
if (toolTipText != null) {
enableError(toolTipText);
} else {
disableError();
}
}
private void enableError(String text) {
setTooltipText(text);
markErrorPanel(true);
}
private void disableError() {
setTooltipText(null);
markErrorPanel(false);
}
public void setEditMode(final boolean isEditMode) {
this.isEditMode = isEditMode;
}
public void setSize(final int w, final int h) {
this.width = w;
this.height = h;
}
public void clear() {
setValue(null);
}
private void markErrorPanel(boolean error) {
if (error) {
errorPanel.addStyleName(style.errorPanelError());
} else {
errorPanel.removeStyleName(style.errorPanelError());
}
}
private void setTooltipText(String text) {
if (text == null || text.trim().length() == 0) {
errorTooltip.setText("");
} else {
errorTooltip.setText(text);
}
// See issue https://github.com/gwtbootstrap/gwt-bootstrap/issues/287
errorTooltip.reconfigure();
}
}
|
|
package org.redisson.spring.support;
import java.util.Arrays;
import java.util.Collection;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.redisson.BaseTest;
import org.redisson.RedisRunner;
import org.redisson.RedissonFairLock;
import org.redisson.RedissonLiveObjectServiceTest.TestREntity;
import org.redisson.RedissonMultiLock;
import org.redisson.RedissonReadLock;
import org.redisson.RedissonRedLock;
import org.redisson.RedissonRuntimeEnvironment;
import org.redisson.RedissonWriteLock;
import org.redisson.api.LocalCachedMapOptions;
import org.redisson.api.RAtomicDouble;
import org.redisson.api.RAtomicLong;
import org.redisson.api.RBinaryStream;
import org.redisson.api.RBitSet;
import org.redisson.api.RBlockingDeque;
import org.redisson.api.RBlockingQueue;
import org.redisson.api.RBloomFilter;
import org.redisson.api.RBoundedBlockingQueue;
import org.redisson.api.RBucket;
import org.redisson.api.RBuckets;
import org.redisson.api.RCountDownLatch;
import org.redisson.api.RDelayedQueue;
import org.redisson.api.RDeque;
import org.redisson.api.RExecutorService;
import org.redisson.api.RGeo;
import org.redisson.api.RHyperLogLog;
import org.redisson.api.RKeys;
import org.redisson.api.RLexSortedSet;
import org.redisson.api.RList;
import org.redisson.api.RListMultimap;
import org.redisson.api.RLiveObject;
import org.redisson.api.RLiveObjectService;
import org.redisson.api.RLocalCachedMap;
import org.redisson.api.RLock;
import org.redisson.api.RMap;
import org.redisson.api.RMapCache;
import org.redisson.api.RObject;
import org.redisson.api.RPatternTopic;
import org.redisson.api.RPermitExpirableSemaphore;
import org.redisson.api.RPriorityDeque;
import org.redisson.api.RPriorityQueue;
import org.redisson.api.RQueue;
import org.redisson.api.RReadWriteLock;
import org.redisson.api.RRemoteService;
import org.redisson.api.RScoredSortedSet;
import org.redisson.api.RScript;
import org.redisson.api.RSemaphore;
import org.redisson.api.RSet;
import org.redisson.api.RSetCache;
import org.redisson.api.RSetMultimap;
import org.redisson.api.RSetMultimapCache;
import org.redisson.api.RSortedSet;
import org.redisson.api.RTopic;
import org.redisson.api.RemoteInvocationOptions;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
*
* @author Rui Gu (https://github.com/jackygurui)
*/
@RunWith(Parameterized.class)
public class SpringNamespaceObjectTest extends BaseTest {
private static ApplicationContext context;
@BeforeClass
public static void setupClass() throws Exception {
if (!RedissonRuntimeEnvironment.isTravis) {
startContext();
}
}
@AfterClass
public static void shutDownClass() {
if (!RedissonRuntimeEnvironment.isTravis) {
stopContext();
}
}
@Before
public void setup() throws Exception {
if (RedissonRuntimeEnvironment.isTravis) {
startContext();
}
}
@After
public void shutDown() {
if (RedissonRuntimeEnvironment.isTravis) {
stopContext();
}
}
@Override
protected boolean flushBetweenTests() {
return false;
}
public static void startContext() {
TestREntity entity = new TestREntity("live-object");
entity.setValue("1");
defaultRedisson.getLiveObjectService().merge(entity);
entity = new TestREntity("live-object-ext");
entity.setValue("1");
defaultRedisson.getLiveObjectService().merge(entity);
System.setProperty("redisAddress", RedisRunner.getDefaultRedisServerBindAddressAndPort());
context = new ClassPathXmlApplicationContext("classpath:org/redisson/spring/support/redisson_objects.xml");
}
public static void stopContext() {
((ConfigurableApplicationContext) context).close();
}
@Parameters(name = "{index}: key=[{0}], class=[{1}], parent=[{2}]")
public static Collection<Object[]> tests() {
return Arrays.asList(new Object[][]{
{"binary-stream", RBinaryStream.class, null},
{"geo", RGeo.class, null},
{"set-cache", RSetCache.class, null},
{"map-cache", RMapCache.class, null},
{"bucket", RBucket.class, null},
{"buckets", RBuckets.class, null},
{"hyper-log-log", RHyperLogLog.class, null},
{"list", RList.class, null},
{"list-multimap", RListMultimap.class, null},
{"local-cached-map", RLocalCachedMap.class, null},
{"local-options", LocalCachedMapOptions.class, null},
{"map", RMap.class, null},
{"set-multimap", RSetMultimap.class, null},
{"set-multimap-cache", RSetMultimapCache.class, null},
{"semaphore", RSemaphore.class, null},
{"permit-expirable-semaphore", RPermitExpirableSemaphore.class, null},
{"lock", RLock.class, null},
{"fair-lock", RedissonFairLock.class, null},
{"read-write-lock", RReadWriteLock.class, null},
{"read-lock", RedissonReadLock.class, "read-write-lock"},
{"write-lock", RedissonWriteLock.class, "read-write-lock"},
{"multi-lock", RedissonMultiLock.class, null},
{"lock-1", RLock.class, null},
{"fair-lock-1", RedissonFairLock.class, null},
{"read-lock-1", RedissonReadLock.class, "read-write-lock"},
{"write-lock-1", RedissonWriteLock.class, "read-write-lock"},
{"red-lock", RedissonRedLock.class, null},
{"lock-2", RLock.class, null},
{"fair-lock-2", RedissonFairLock.class, null},
{"read-lock-2", RedissonReadLock.class, "read-write-lock"},
{"write-lock-2", RedissonWriteLock.class, "read-write-lock"},
{"set", RSet.class, null},
{"sorted-set", RSortedSet.class, null},
{"scored-sorted-set", RScoredSortedSet.class, null},
{"lex-sorted-set", RLexSortedSet.class, null},
{"topic", RTopic.class, null},
{"pattern-topic", RPatternTopic.class, null},
{"queue", RQueue.class, null},
{"delayed-queue", RDelayedQueue.class, "queue"},
{"priority-queue", RPriorityQueue.class, null},
{"priority-deque", RPriorityDeque.class, null},
{"blocking-queue", RBlockingQueue.class, null},
{"bounded-blocking-queue", RBoundedBlockingQueue.class, null},
{"deque", RDeque.class, null},
{"blocking-deque", RBlockingDeque.class, null},
{"atomic-long", RAtomicLong.class, null},
{"atomic-double", RAtomicDouble.class, null},
{"count-down-latch", RCountDownLatch.class, null},
{"bit-set", RBitSet.class, null},
{"bloom-filter", RBloomFilter.class, null},
{"script", RScript.class, null},
{"executor-service", RExecutorService.class, null},
{"remote-service", RRemoteService.class, null},
{"rpc-client", org.redisson.RedissonRemoteServiceTest.RemoteInterface.class, null},
{"options", RemoteInvocationOptions.class, null},
{"keys", RKeys.class, null},
{"live-object-service", RLiveObjectService.class, null},
{"live-object", RLiveObject.class, null},
{"binary-stream-ext", RBinaryStream.class, null},
{"geo-ext", RGeo.class, null},
{"set-cache-ext", RSetCache.class, null},
{"map-cache-ext", RMapCache.class, null},
{"bucket-ext", RBucket.class, null},
{"buckets-ext", RBuckets.class, null},
{"hyper-log-log-ext", RHyperLogLog.class, null},
{"list-ext", RList.class, null},
{"list-multimap-ext", RListMultimap.class, null},
{"local-cached-map-ext", RLocalCachedMap.class, null},
{"local-options-ext", LocalCachedMapOptions.class, null},
{"map-ext", RMap.class, null},
{"set-multimap-ext", RSetMultimap.class, null},
{"set-multimap-cache-ext", RSetMultimapCache.class, null},
{"semaphore-ext", RSemaphore.class, null},
{"permit-expirable-semaphore-ext", RPermitExpirableSemaphore.class, null},
{"lock-ext", RLock.class, null},
{"fair-lock-ext", RedissonFairLock.class, null},
{"read-write-lock-ext", RReadWriteLock.class, null},
{"read-lock-ext", RedissonReadLock.class, "read-write-lock-ext"},
{"write-lock-ext", RedissonWriteLock.class, "read-write-lock-ext"},
{"multi-lock-ext", RedissonMultiLock.class, null},
{"lock-1-ext", RLock.class, null},
{"fair-lock-1-ext", RedissonFairLock.class, null},
{"read-lock-1-ext", RedissonReadLock.class, "read-write-lock-ext"},
{"write-lock-1-ext", RedissonWriteLock.class, "read-write-lock-ext"},
{"red-lock-ext", RedissonRedLock.class, null},
{"lock-2-ext", RLock.class, null},
{"fair-lock-2-ext", RedissonFairLock.class, null},
{"read-lock-2-ext", RedissonReadLock.class, "read-write-lock-ext"},
{"write-lock-2-ext", RedissonWriteLock.class, "read-write-lock-ext"},
{"set-ext", RSet.class, null},
{"sorted-set-ext", RSortedSet.class, null},
{"scored-sorted-set-ext", RScoredSortedSet.class, null},
{"lex-sorted-set-ext", RLexSortedSet.class, null},
{"topic-ext", RTopic.class, null},
{"pattern-topic-ext", RPatternTopic.class, null},
{"queue-ext", RQueue.class, null},
{"delayed-queue-ext", RDelayedQueue.class, "queue-ext"},
{"priority-queue-ext", RPriorityQueue.class, null},
{"priority-deque-ext", RPriorityDeque.class, null},
{"blocking-queue-ext", RBlockingQueue.class, null},
{"bounded-blocking-queue-ext", RBoundedBlockingQueue.class, null},
{"deque-ext", RDeque.class, null},
{"blocking-deque-ext", RBlockingDeque.class, null},
{"atomic-long-ext", RAtomicLong.class, null},
{"atomic-double-ext", RAtomicDouble.class, null},
{"count-down-latch-ext", RCountDownLatch.class, null},
{"bit-set-ext", RBitSet.class, null},
{"bloom-filter-ext", RBloomFilter.class, null},
{"script-ext", RScript.class, null},
{"executor-service-ext", RExecutorService.class, null},
{"remote-service-ext", RRemoteService.class, null},
{"rpc-client-ext", org.redisson.RedissonRemoteServiceTest.RemoteInterface.class, null},
{"options-ext", RemoteInvocationOptions.class, null},
{"keys-ext", RKeys.class, null},
{"live-object-service-ext", RLiveObjectService.class, null},
{"live-object-ext", RLiveObject.class, null},
});
}
@Parameter
public String key;
@Parameter(1)
public Class cls;
@Parameter(2)
public String parentKey;
@Test
public void testRObjects() {
Object bean = context.getBean(key);
assertTrue(cls.isInstance(bean));
if (RObject.class.isAssignableFrom(cls)) {
assertEquals(parentKey == null ? key : parentKey, RObject.class.cast(bean).getName());
}
if (RTopic.class.isAssignableFrom(cls)) {
assertEquals(key, RTopic.class.cast(bean).getChannelNames().get(0));
}
if (RPatternTopic.class.isAssignableFrom(cls)) {
assertEquals(key, RPatternTopic.class.cast(bean).getPatternNames().get(0));
}
if (RLiveObject.class.isAssignableFrom(cls)) {
assertEquals(key, RLiveObject.class.cast(bean).getLiveObjectId());
}
}
}
|
|
/*
* Copyright 2014 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.auto.common;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.getOnlyElement;
import static javax.lang.model.type.TypeKind.ARRAY;
import static javax.lang.model.type.TypeKind.DECLARED;
import static javax.lang.model.type.TypeKind.EXECUTABLE;
import static javax.lang.model.type.TypeKind.INTERSECTION;
import static javax.lang.model.type.TypeKind.TYPEVAR;
import static javax.lang.model.type.TypeKind.WILDCARD;
import com.google.common.base.Equivalence;
import com.google.common.base.Objects;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ErrorType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.IntersectionType;
import javax.lang.model.type.NoType;
import javax.lang.model.type.NullType;
import javax.lang.model.type.PrimitiveType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.WildcardType;
import javax.lang.model.util.Elements;
import javax.lang.model.util.SimpleTypeVisitor8;
import javax.lang.model.util.Types;
/**
* Utilities related to {@link TypeMirror} instances.
*
* @author Gregory Kick
* @since 2.0
*/
public final class MoreTypes {
private static final class TypeEquivalence extends Equivalence<TypeMirror> {
private static final TypeEquivalence INSTANCE = new TypeEquivalence();
@Override
protected boolean doEquivalent(TypeMirror a, TypeMirror b) {
return MoreTypes.equal(a, b, ImmutableSet.<ComparedElements>of());
}
@Override
protected int doHash(TypeMirror t) {
return MoreTypes.hash(t, ImmutableSet.<Element>of());
}
}
/**
* Returns an {@link Equivalence} that can be used to compare types. The standard way to compare
* types is {@link javax.lang.model.util.Types#isSameType Types.isSameType}, but this alternative
* may be preferred in a number of cases:
*
* <ul>
* <li>If you don't have an instance of {@code Types}.
* <li>If you want a reliable {@code hashCode()} for the types, for example to construct a set
* of types using {@link java.util.HashSet} with {@link Equivalence#wrap(Object)}.
* <li>If you want distinct type variables to be considered equal if they have the same names
* and bounds.
* <li>If you want wildcard types to compare equal if they have the same bounds. {@code
* Types.isSameType} never considers wildcards equal, even when comparing a type to itself.
* </ul>
*/
public static Equivalence<TypeMirror> equivalence() {
return TypeEquivalence.INSTANCE;
}
// So EQUAL_VISITOR can be a singleton, we maintain visiting state, in particular which types
// have been seen already, in this object.
// The logic for handling recursive types like Comparable<T extends Comparable<T>> is very tricky.
// If we're not careful we'll end up with an infinite recursion. So we record the types that
// we've already seen during the recursion, and if we see the same pair of types again we just
// return true provisionally. But "the same pair of types" is itself poorly-defined. We can't
// just say that it is an equal pair of TypeMirrors, because of course if we knew how to
// determine that then we wouldn't need the complicated type visitor at all. On the other hand,
// we can't say that it is an identical pair of TypeMirrors either, because there's no
// guarantee that the TypeMirrors for the two Ts in Comparable<T extends Comparable<T>> will be
// represented by the same object, and indeed with the Eclipse compiler they aren't. We could
// compare the corresponding Elements, since equality is well-defined there, but that's not enough
// either, because the Element for Set<Object> is the same as the one for Set<String>. So we
// approximate by comparing the Elements and, if there are any type arguments, requiring them to
// be identical. This may not be foolproof either but it is sufficient for all the cases we've
// encountered so far.
private static final class EqualVisitorParam {
TypeMirror type;
Set<ComparedElements> visiting;
}
private static class ComparedElements {
final Element a;
final ImmutableList<TypeMirror> aArguments;
final Element b;
final ImmutableList<TypeMirror> bArguments;
ComparedElements(
Element a,
ImmutableList<TypeMirror> aArguments,
Element b,
ImmutableList<TypeMirror> bArguments) {
this.a = a;
this.aArguments = aArguments;
this.b = b;
this.bArguments = bArguments;
}
@Override
public boolean equals(Object o) {
if (o instanceof ComparedElements) {
ComparedElements that = (ComparedElements) o;
int nArguments = aArguments.size();
if (!this.a.equals(that.a)
|| !this.b.equals(that.b)
|| nArguments != bArguments.size()) {
// The arguments must be the same size, but we check anyway.
return false;
}
for (int i = 0; i < nArguments; i++) {
if (aArguments.get(i) != bArguments.get(i)) {
return false;
}
}
return true;
} else {
return false;
}
}
@Override
public int hashCode() {
return a.hashCode() * 31 + b.hashCode();
}
}
private static final class EqualVisitor extends SimpleTypeVisitor8<Boolean, EqualVisitorParam> {
private static final EqualVisitor INSTANCE = new EqualVisitor();
@Override
protected Boolean defaultAction(TypeMirror a, EqualVisitorParam p) {
return a.getKind().equals(p.type.getKind());
}
@Override
public Boolean visitArray(ArrayType a, EqualVisitorParam p) {
if (p.type.getKind().equals(ARRAY)) {
ArrayType b = (ArrayType) p.type;
return equal(a.getComponentType(), b.getComponentType(), p.visiting);
}
return false;
}
@Override
public Boolean visitDeclared(DeclaredType a, EqualVisitorParam p) {
if (p.type.getKind().equals(DECLARED)) {
DeclaredType b = (DeclaredType) p.type;
Element aElement = a.asElement();
Element bElement = b.asElement();
Set<ComparedElements> newVisiting =
visitingSetPlus(
p.visiting, aElement, a.getTypeArguments(), bElement, b.getTypeArguments());
if (newVisiting.equals(p.visiting)) {
// We're already visiting this pair of elements.
// This can happen for example with Enum in Enum<E extends Enum<E>>. Return a
// provisional true value since if the Elements are not in fact equal the original
// visitor of Enum will discover that. We have to check both Elements being compared
// though to avoid missing the fact that one of the types being compared
// differs at exactly this point.
return true;
}
return aElement.equals(bElement)
&& equal(enclosingType(a), enclosingType(b), newVisiting)
&& equalLists(a.getTypeArguments(), b.getTypeArguments(), newVisiting);
}
return false;
}
@Override
@SuppressWarnings("TypeEquals")
public Boolean visitError(ErrorType a, EqualVisitorParam p) {
return a.equals(p.type);
}
@Override
public Boolean visitExecutable(ExecutableType a, EqualVisitorParam p) {
if (p.type.getKind().equals(EXECUTABLE)) {
ExecutableType b = (ExecutableType) p.type;
return equalLists(a.getParameterTypes(), b.getParameterTypes(), p.visiting)
&& equal(a.getReturnType(), b.getReturnType(), p.visiting)
&& equalLists(a.getThrownTypes(), b.getThrownTypes(), p.visiting)
&& equalLists(a.getTypeVariables(), b.getTypeVariables(), p.visiting);
}
return false;
}
@Override
public Boolean visitIntersection(IntersectionType a, EqualVisitorParam p) {
if (p.type.getKind().equals(INTERSECTION)) {
IntersectionType b = (IntersectionType) p.type;
return equalLists(a.getBounds(), b.getBounds(), p.visiting);
}
return false;
}
@Override
public Boolean visitTypeVariable(TypeVariable a, EqualVisitorParam p) {
if (p.type.getKind().equals(TYPEVAR)) {
TypeVariable b = (TypeVariable) p.type;
TypeParameterElement aElement = (TypeParameterElement) a.asElement();
TypeParameterElement bElement = (TypeParameterElement) b.asElement();
Set<ComparedElements> newVisiting = visitingSetPlus(p.visiting, aElement, bElement);
if (newVisiting.equals(p.visiting)) {
// We're already visiting this pair of elements.
// This can happen with our friend Eclipse when looking at <T extends Comparable<T>>.
// It incorrectly reports the upper bound of T as T itself.
return true;
}
// We use aElement.getBounds() instead of a.getUpperBound() to avoid having to deal with
// the different way intersection types (like <T extends Number & Comparable<T>>) are
// represented before and after Java 8. We do have an issue that this code may consider
// that <T extends Foo & Bar> is different from <T extends Bar & Foo>, but it's very
// hard to avoid that, and not likely to be much of a problem in practice.
return equalLists(aElement.getBounds(), bElement.getBounds(), newVisiting)
&& equal(a.getLowerBound(), b.getLowerBound(), newVisiting)
&& a.asElement().getSimpleName().equals(b.asElement().getSimpleName());
}
return false;
}
@Override
public Boolean visitWildcard(WildcardType a, EqualVisitorParam p) {
if (p.type.getKind().equals(WILDCARD)) {
WildcardType b = (WildcardType) p.type;
return equal(a.getExtendsBound(), b.getExtendsBound(), p.visiting)
&& equal(a.getSuperBound(), b.getSuperBound(), p.visiting);
}
return false;
}
@Override
public Boolean visitUnknown(TypeMirror a, EqualVisitorParam p) {
throw new UnsupportedOperationException();
}
private Set<ComparedElements> visitingSetPlus(
Set<ComparedElements> visiting, Element a, Element b) {
ImmutableList<TypeMirror> noArguments = ImmutableList.of();
return visitingSetPlus(visiting, a, noArguments, b, noArguments);
}
private Set<ComparedElements> visitingSetPlus(
Set<ComparedElements> visiting,
Element a,
List<? extends TypeMirror> aArguments,
Element b,
List<? extends TypeMirror> bArguments) {
ComparedElements comparedElements =
new ComparedElements(
a, ImmutableList.<TypeMirror>copyOf(aArguments),
b, ImmutableList.<TypeMirror>copyOf(bArguments));
Set<ComparedElements> newVisiting = new HashSet<ComparedElements>(visiting);
newVisiting.add(comparedElements);
return newVisiting;
}
}
@SuppressWarnings("TypeEquals")
private static boolean equal(TypeMirror a, TypeMirror b, Set<ComparedElements> visiting) {
// TypeMirror.equals is not guaranteed to return true for types that are equal, but we can
// assume that if it does return true then the types are equal. This check also avoids getting
// stuck in infinite recursion when Eclipse decrees that the upper bound of the second K in
// <K extends Comparable<K>> is a distinct but equal K.
// The javac implementation of ExecutableType, at least in some versions, does not take thrown
// exceptions into account in its equals implementation, so avoid this optimization for
// ExecutableType.
if (Objects.equal(a, b) && !(a instanceof ExecutableType)) {
return true;
}
EqualVisitorParam p = new EqualVisitorParam();
p.type = b;
p.visiting = visiting;
return (a == b) || (a != null && b != null && a.accept(EqualVisitor.INSTANCE, p));
}
/**
* Returns the type of the innermost enclosing instance, or null if there is none. This is the
* same as {@link DeclaredType#getEnclosingType()} except that it returns null rather than
* NoType for a static type. We need this because of
* <a href="https://bugs.eclipse.org/bugs/show_bug.cgi?id=508222">this bug</a> whereby
* the Eclipse compiler returns a value for static classes that is not NoType.
*/
private static TypeMirror enclosingType(DeclaredType t) {
TypeMirror enclosing = t.getEnclosingType();
if (enclosing.getKind().equals(TypeKind.NONE)
|| t.asElement().getModifiers().contains(Modifier.STATIC)) {
return null;
}
return enclosing;
}
private static boolean equalLists(
List<? extends TypeMirror> a, List<? extends TypeMirror> b, Set<ComparedElements> visiting) {
int size = a.size();
if (size != b.size()) {
return false;
}
// Use iterators in case the Lists aren't RandomAccess
Iterator<? extends TypeMirror> aIterator = a.iterator();
Iterator<? extends TypeMirror> bIterator = b.iterator();
while (aIterator.hasNext()) {
if (!bIterator.hasNext()) {
return false;
}
TypeMirror nextMirrorA = aIterator.next();
TypeMirror nextMirrorB = bIterator.next();
if (!equal(nextMirrorA, nextMirrorB, visiting)) {
return false;
}
}
return !aIterator.hasNext();
}
private static final int HASH_SEED = 17;
private static final int HASH_MULTIPLIER = 31;
private static final class HashVisitor extends SimpleTypeVisitor8<Integer, Set<Element>> {
private static final HashVisitor INSTANCE = new HashVisitor();
int hashKind(int seed, TypeMirror t) {
int result = seed * HASH_MULTIPLIER;
result += t.getKind().hashCode();
return result;
}
@Override
protected Integer defaultAction(TypeMirror e, Set<Element> visiting) {
return hashKind(HASH_SEED, e);
}
@Override
public Integer visitArray(ArrayType t, Set<Element> visiting) {
int result = hashKind(HASH_SEED, t);
result *= HASH_MULTIPLIER;
result += t.getComponentType().accept(this, visiting);
return result;
}
@Override
public Integer visitDeclared(DeclaredType t, Set<Element> visiting) {
Element element = t.asElement();
if (visiting.contains(element)) {
return 0;
}
Set<Element> newVisiting = new HashSet<Element>(visiting);
newVisiting.add(element);
int result = hashKind(HASH_SEED, t);
result *= HASH_MULTIPLIER;
result += t.asElement().hashCode();
result *= HASH_MULTIPLIER;
result += t.getEnclosingType().accept(this, newVisiting);
result *= HASH_MULTIPLIER;
result += hashList(t.getTypeArguments(), newVisiting);
return result;
}
@Override
public Integer visitExecutable(ExecutableType t, Set<Element> visiting) {
int result = hashKind(HASH_SEED, t);
result *= HASH_MULTIPLIER;
result += hashList(t.getParameterTypes(), visiting);
result *= HASH_MULTIPLIER;
result += t.getReturnType().accept(this, visiting);
result *= HASH_MULTIPLIER;
result += hashList(t.getThrownTypes(), visiting);
result *= HASH_MULTIPLIER;
result += hashList(t.getTypeVariables(), visiting);
return result;
}
@Override
public Integer visitTypeVariable(TypeVariable t, Set<Element> visiting) {
int result = hashKind(HASH_SEED, t);
result *= HASH_MULTIPLIER;
result += t.getLowerBound().accept(this, visiting);
TypeParameterElement element = (TypeParameterElement) t.asElement();
for (TypeMirror bound : element.getBounds()) {
result *= HASH_MULTIPLIER;
result += bound.accept(this, visiting);
}
return result;
}
@Override
public Integer visitWildcard(WildcardType t, Set<Element> visiting) {
int result = hashKind(HASH_SEED, t);
result *= HASH_MULTIPLIER;
result += (t.getExtendsBound() == null) ? 0 : t.getExtendsBound().accept(this, visiting);
result *= HASH_MULTIPLIER;
result += (t.getSuperBound() == null) ? 0 : t.getSuperBound().accept(this, visiting);
return result;
}
@Override
public Integer visitUnknown(TypeMirror t, Set<Element> visiting) {
throw new UnsupportedOperationException();
}
};
private static int hashList(List<? extends TypeMirror> mirrors, Set<Element> visiting) {
int result = HASH_SEED;
for (TypeMirror mirror : mirrors) {
result *= HASH_MULTIPLIER;
result += hash(mirror, visiting);
}
return result;
}
private static int hash(TypeMirror mirror, Set<Element> visiting) {
return mirror == null ? 0 : mirror.accept(HashVisitor.INSTANCE, visiting);
}
/**
* Returns the set of {@linkplain TypeElement types} that are referenced by the given {@link
* TypeMirror}.
*/
public static ImmutableSet<TypeElement> referencedTypes(TypeMirror type) {
checkNotNull(type);
ImmutableSet.Builder<TypeElement> elements = ImmutableSet.builder();
type.accept(ReferencedTypes.INSTANCE, elements);
return elements.build();
}
private static final class ReferencedTypes
extends SimpleTypeVisitor8<Void, ImmutableSet.Builder<TypeElement>> {
private static final ReferencedTypes INSTANCE = new ReferencedTypes();
@Override
public Void visitArray(ArrayType t, ImmutableSet.Builder<TypeElement> p) {
t.getComponentType().accept(this, p);
return null;
}
@Override
public Void visitDeclared(DeclaredType t, ImmutableSet.Builder<TypeElement> p) {
p.add(MoreElements.asType(t.asElement()));
for (TypeMirror typeArgument : t.getTypeArguments()) {
typeArgument.accept(this, p);
}
return null;
}
@Override
public Void visitTypeVariable(TypeVariable t, ImmutableSet.Builder<TypeElement> p) {
t.getLowerBound().accept(this, p);
t.getUpperBound().accept(this, p);
return null;
}
@Override
public Void visitWildcard(WildcardType t, ImmutableSet.Builder<TypeElement> p) {
TypeMirror extendsBound = t.getExtendsBound();
if (extendsBound != null) {
extendsBound.accept(this, p);
}
TypeMirror superBound = t.getSuperBound();
if (superBound != null) {
superBound.accept(this, p);
}
return null;
}
}
/**
* An alternate implementation of {@link Types#asElement} that does not require a {@link Types}
* instance with the notable difference that it will throw {@link IllegalArgumentException}
* instead of returning null if the {@link TypeMirror} can not be converted to an {@link Element}.
*
* @throws NullPointerException if {@code typeMirror} is {@code null}
* @throws IllegalArgumentException if {@code typeMirror} cannot be converted to an {@link
* Element}
*/
public static Element asElement(TypeMirror typeMirror) {
return typeMirror.accept(AsElementVisitor.INSTANCE, null);
}
private static final class AsElementVisitor extends SimpleTypeVisitor8<Element, Void> {
private static final AsElementVisitor INSTANCE = new AsElementVisitor();
@Override
protected Element defaultAction(TypeMirror e, Void p) {
throw new IllegalArgumentException(e + " cannot be converted to an Element");
}
@Override
public Element visitDeclared(DeclaredType t, Void p) {
return t.asElement();
}
@Override
public Element visitError(ErrorType t, Void p) {
return t.asElement();
}
@Override
public Element visitTypeVariable(TypeVariable t, Void p) {
return t.asElement();
}
};
// TODO(gak): consider removing these two methods as they're pretty trivial now
public static TypeElement asTypeElement(TypeMirror mirror) {
return MoreElements.asType(asElement(mirror));
}
public static ImmutableSet<TypeElement> asTypeElements(Iterable<? extends TypeMirror> mirrors) {
checkNotNull(mirrors);
ImmutableSet.Builder<TypeElement> builder = ImmutableSet.builder();
for (TypeMirror mirror : mirrors) {
builder.add(asTypeElement(mirror));
}
return builder.build();
}
/**
* Returns a {@link ArrayType} if the {@link TypeMirror} represents an array or throws an {@link
* IllegalArgumentException}.
*/
public static ArrayType asArray(TypeMirror maybeArrayType) {
return maybeArrayType.accept(ArrayTypeVisitor.INSTANCE, null);
}
private static final class ArrayTypeVisitor extends CastingTypeVisitor<ArrayType> {
private static final ArrayTypeVisitor INSTANCE = new ArrayTypeVisitor();
ArrayTypeVisitor() {
super("array");
}
@Override
public ArrayType visitArray(ArrayType type, Void ignore) {
return type;
}
}
/**
* Returns a {@link DeclaredType} if the {@link TypeMirror} represents a declared type such as a
* class, interface, union/compound, or enum or throws an {@link IllegalArgumentException}.
*/
public static DeclaredType asDeclared(TypeMirror maybeDeclaredType) {
return maybeDeclaredType.accept(DeclaredTypeVisitor.INSTANCE, null);
}
private static final class DeclaredTypeVisitor extends CastingTypeVisitor<DeclaredType> {
private static final DeclaredTypeVisitor INSTANCE = new DeclaredTypeVisitor();
DeclaredTypeVisitor() {
super("declared type");
}
@Override
public DeclaredType visitDeclared(DeclaredType type, Void ignore) {
return type;
}
}
/**
* Returns a {@link ExecutableType} if the {@link TypeMirror} represents an executable type such
* as may result from missing code, or bad compiles or throws an {@link IllegalArgumentException}.
*/
public static ErrorType asError(TypeMirror maybeErrorType) {
return maybeErrorType.accept(ErrorTypeVisitor.INSTANCE, null);
}
private static final class ErrorTypeVisitor extends CastingTypeVisitor<ErrorType> {
private static final ErrorTypeVisitor INSTANCE = new ErrorTypeVisitor();
ErrorTypeVisitor() {
super("error type");
}
@Override
public ErrorType visitError(ErrorType type, Void ignore) {
return type;
}
}
/**
* Returns a {@link ExecutableType} if the {@link TypeMirror} represents an executable type such
* as a method, constructor, or initializer or throws an {@link IllegalArgumentException}.
*/
public static ExecutableType asExecutable(TypeMirror maybeExecutableType) {
return maybeExecutableType.accept(ExecutableTypeVisitor.INSTANCE, null);
}
private static final class ExecutableTypeVisitor extends CastingTypeVisitor<ExecutableType> {
private static final ExecutableTypeVisitor INSTANCE = new ExecutableTypeVisitor();
ExecutableTypeVisitor() {
super("executable type");
}
@Override
public ExecutableType visitExecutable(ExecutableType type, Void ignore) {
return type;
}
}
/**
* Returns an {@link IntersectionType} if the {@link TypeMirror} represents an intersection-type
* or throws an {@link IllegalArgumentException}.
*/
public static IntersectionType asIntersection(TypeMirror maybeIntersectionType) {
return maybeIntersectionType.accept(IntersectionTypeVisitor.INSTANCE, null);
}
private static final class IntersectionTypeVisitor extends CastingTypeVisitor<IntersectionType> {
private static final IntersectionTypeVisitor INSTANCE = new IntersectionTypeVisitor();
IntersectionTypeVisitor() {
super("intersection type");
}
@Override
public IntersectionType visitIntersection(IntersectionType type, Void ignore) {
return type;
}
}
/**
* Returns a {@link NoType} if the {@link TypeMirror} represents an non-type such as void, or
* package, etc. or throws an {@link IllegalArgumentException}.
*/
public static NoType asNoType(TypeMirror maybeNoType) {
return maybeNoType.accept(NoTypeVisitor.INSTANCE, null);
}
private static final class NoTypeVisitor extends CastingTypeVisitor<NoType> {
private static final NoTypeVisitor INSTANCE = new NoTypeVisitor();
NoTypeVisitor() {
super("non-type");
}
@Override
public NoType visitNoType(NoType type, Void ignore) {
return type;
}
}
/**
* Returns a {@link NullType} if the {@link TypeMirror} represents the null type or throws an
* {@link IllegalArgumentException}.
*/
public static NullType asNullType(TypeMirror maybeNullType) {
return maybeNullType.accept(NullTypeVisitor.INSTANCE, null);
}
private static final class NullTypeVisitor extends CastingTypeVisitor<NullType> {
private static final NullTypeVisitor INSTANCE = new NullTypeVisitor();
NullTypeVisitor() {
super("null");
}
@Override
public NullType visitNull(NullType type, Void ignore) {
return type;
}
}
/**
* Returns a {@link PrimitiveType} if the {@link TypeMirror} represents a primitive type or throws
* an {@link IllegalArgumentException}.
*/
public static PrimitiveType asPrimitiveType(TypeMirror maybePrimitiveType) {
return maybePrimitiveType.accept(PrimitiveTypeVisitor.INSTANCE, null);
}
private static final class PrimitiveTypeVisitor extends CastingTypeVisitor<PrimitiveType> {
private static final PrimitiveTypeVisitor INSTANCE = new PrimitiveTypeVisitor();
PrimitiveTypeVisitor() {
super("primitive type");
}
@Override
public PrimitiveType visitPrimitive(PrimitiveType type, Void ignore) {
return type;
}
}
//
// visitUnionType would go here, but isn't relevant for annotation processors
//
/**
* Returns a {@link TypeVariable} if the {@link TypeMirror} represents a type variable or throws
* an {@link IllegalArgumentException}.
*/
public static TypeVariable asTypeVariable(TypeMirror maybeTypeVariable) {
return maybeTypeVariable.accept(TypeVariableVisitor.INSTANCE, null);
}
private static final class TypeVariableVisitor extends CastingTypeVisitor<TypeVariable> {
private static final TypeVariableVisitor INSTANCE = new TypeVariableVisitor();
TypeVariableVisitor() {
super("type variable");
}
@Override
public TypeVariable visitTypeVariable(TypeVariable type, Void ignore) {
return type;
}
}
/**
* Returns a {@link WildcardType} if the {@link TypeMirror} represents a wildcard type or throws
* an {@link IllegalArgumentException}.
*/
public static WildcardType asWildcard(TypeMirror maybeWildcardType) {
return maybeWildcardType.accept(WildcardTypeVisitor.INSTANCE, null);
}
private static final class WildcardTypeVisitor extends CastingTypeVisitor<WildcardType> {
private static final WildcardTypeVisitor INSTANCE = new WildcardTypeVisitor();
WildcardTypeVisitor() {
super("wildcard type");
}
@Override
public WildcardType visitWildcard(WildcardType type, Void ignore) {
return type;
}
}
/**
* Returns true if the raw type underlying the given {@link TypeMirror} represents a type that can
* be referenced by a {@link Class}. If this returns true, then {@link #isTypeOf} is guaranteed to
* not throw.
*/
public static boolean isType(TypeMirror type) {
return type.accept(IsTypeVisitor.INSTANCE, null);
}
private static final class IsTypeVisitor extends SimpleTypeVisitor8<Boolean, Void> {
private static final IsTypeVisitor INSTANCE = new IsTypeVisitor();
@Override
protected Boolean defaultAction(TypeMirror type, Void ignored) {
return false;
}
@Override
public Boolean visitNoType(NoType noType, Void p) {
return noType.getKind().equals(TypeKind.VOID);
}
@Override
public Boolean visitPrimitive(PrimitiveType type, Void p) {
return true;
}
@Override
public Boolean visitArray(ArrayType array, Void p) {
return true;
}
@Override
public Boolean visitDeclared(DeclaredType type, Void ignored) {
return MoreElements.isType(type.asElement());
}
}
/**
* Returns true if the raw type underlying the given {@link TypeMirror} represents the same raw
* type as the given {@link Class} and throws an IllegalArgumentException if the {@link
* TypeMirror} does not represent a type that can be referenced by a {@link Class}
*/
public static boolean isTypeOf(final Class<?> clazz, TypeMirror type) {
checkNotNull(clazz);
return type.accept(new IsTypeOf(clazz), null);
}
private static final class IsTypeOf extends SimpleTypeVisitor8<Boolean, Void> {
private final Class<?> clazz;
IsTypeOf(Class<?> clazz) {
this.clazz = clazz;
}
@Override
protected Boolean defaultAction(TypeMirror type, Void ignored) {
throw new IllegalArgumentException(type + " cannot be represented as a Class<?>.");
}
@Override
public Boolean visitNoType(NoType noType, Void p) {
if (noType.getKind().equals(TypeKind.VOID)) {
return clazz.equals(Void.TYPE);
}
throw new IllegalArgumentException(noType + " cannot be represented as a Class<?>.");
}
@Override
public Boolean visitPrimitive(PrimitiveType type, Void p) {
switch (type.getKind()) {
case BOOLEAN:
return clazz.equals(Boolean.TYPE);
case BYTE:
return clazz.equals(Byte.TYPE);
case CHAR:
return clazz.equals(Character.TYPE);
case DOUBLE:
return clazz.equals(Double.TYPE);
case FLOAT:
return clazz.equals(Float.TYPE);
case INT:
return clazz.equals(Integer.TYPE);
case LONG:
return clazz.equals(Long.TYPE);
case SHORT:
return clazz.equals(Short.TYPE);
default:
throw new IllegalArgumentException(type + " cannot be represented as a Class<?>.");
}
}
@Override
public Boolean visitArray(ArrayType array, Void p) {
return clazz.isArray() && isTypeOf(clazz.getComponentType(), array.getComponentType());
}
@Override
public Boolean visitDeclared(DeclaredType type, Void ignored) {
TypeElement typeElement;
try {
typeElement = MoreElements.asType(type.asElement());
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException(type + " does not represent a class or interface.");
}
return typeElement.getQualifiedName().contentEquals(clazz.getCanonicalName());
}
}
/**
* Returns the non-object superclass of the type with the proper type parameters.
* An absent Optional is returned if there is no non-Object superclass.
*/
public static Optional<DeclaredType> nonObjectSuperclass(final Types types, Elements elements,
DeclaredType type) {
checkNotNull(types);
checkNotNull(elements);
checkNotNull(type);
final TypeMirror objectType =
elements.getTypeElement(Object.class.getCanonicalName()).asType();
// It's guaranteed there's only a single CLASS superclass because java doesn't have multiple
// class inheritance.
TypeMirror superclass =
getOnlyElement(
FluentIterable.from(types.directSupertypes(type))
.filter(
new Predicate<TypeMirror>() {
@Override
public boolean apply(TypeMirror input) {
return input.getKind().equals(TypeKind.DECLARED)
&& (MoreElements.asType(MoreTypes.asDeclared(input).asElement()))
.getKind()
.equals(ElementKind.CLASS)
&& !types.isSameType(objectType, input);
}
}),
null);
return superclass != null
? Optional.of(MoreTypes.asDeclared(superclass))
: Optional.<DeclaredType>absent();
}
/**
* Resolves a {@link VariableElement} parameter to a method or constructor based on the given
* container, or a member of a class. For parameters to a method or constructor, the variable's
* enclosing element must be a supertype of the container type. For example, given a
* {@code container} of type {@code Set<String>}, and a variable corresponding to the {@code E e}
* parameter in the {@code Set.add(E e)} method, this will return a TypeMirror for {@code String}.
*/
public static TypeMirror asMemberOf(Types types, DeclaredType container,
VariableElement variable) {
if (variable.getKind().equals(ElementKind.PARAMETER)) {
ExecutableElement methodOrConstructor =
MoreElements.asExecutable(variable.getEnclosingElement());
ExecutableType resolvedMethodOrConstructor =
MoreTypes.asExecutable(types.asMemberOf(container, methodOrConstructor));
List<? extends VariableElement> parameters = methodOrConstructor.getParameters();
List<? extends TypeMirror> parameterTypes = resolvedMethodOrConstructor.getParameterTypes();
checkState(parameters.size() == parameterTypes.size());
for (int i = 0; i < parameters.size(); i++) {
// We need to capture the parameter type of the variable we're concerned about,
// for later printing. This is the only way to do it since we can't use
// types.asMemberOf on variables of methods.
if (parameters.get(i).equals(variable)) {
return parameterTypes.get(i);
}
}
throw new IllegalStateException("Could not find variable: " + variable);
} else {
return types.asMemberOf(container, variable);
}
}
private abstract static class CastingTypeVisitor<T> extends SimpleTypeVisitor8<T, Void> {
private final String label;
CastingTypeVisitor(String label) {
this.label = label;
}
@Override
protected T defaultAction(TypeMirror e, Void v) {
throw new IllegalArgumentException(e + " does not represent a " + label);
}
}
private MoreTypes() {}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/DescribeEffectivePatchesForPatchBaseline"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeEffectivePatchesForPatchBaselineRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the patch baseline to retrieve the effective patches for.
* </p>
*/
private String baselineId;
/**
* <p>
* The maximum number of patches to return (per page).
* </p>
*/
private Integer maxResults;
/**
* <p>
* The token for the next set of items to return. (You received this token from a previous call.)
* </p>
*/
private String nextToken;
/**
* <p>
* The ID of the patch baseline to retrieve the effective patches for.
* </p>
*
* @param baselineId
* The ID of the patch baseline to retrieve the effective patches for.
*/
public void setBaselineId(String baselineId) {
this.baselineId = baselineId;
}
/**
* <p>
* The ID of the patch baseline to retrieve the effective patches for.
* </p>
*
* @return The ID of the patch baseline to retrieve the effective patches for.
*/
public String getBaselineId() {
return this.baselineId;
}
/**
* <p>
* The ID of the patch baseline to retrieve the effective patches for.
* </p>
*
* @param baselineId
* The ID of the patch baseline to retrieve the effective patches for.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEffectivePatchesForPatchBaselineRequest withBaselineId(String baselineId) {
setBaselineId(baselineId);
return this;
}
/**
* <p>
* The maximum number of patches to return (per page).
* </p>
*
* @param maxResults
* The maximum number of patches to return (per page).
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of patches to return (per page).
* </p>
*
* @return The maximum number of patches to return (per page).
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of patches to return (per page).
* </p>
*
* @param maxResults
* The maximum number of patches to return (per page).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEffectivePatchesForPatchBaselineRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* The token for the next set of items to return. (You received this token from a previous call.)
* </p>
*
* @param nextToken
* The token for the next set of items to return. (You received this token from a previous call.)
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token for the next set of items to return. (You received this token from a previous call.)
* </p>
*
* @return The token for the next set of items to return. (You received this token from a previous call.)
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token for the next set of items to return. (You received this token from a previous call.)
* </p>
*
* @param nextToken
* The token for the next set of items to return. (You received this token from a previous call.)
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEffectivePatchesForPatchBaselineRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBaselineId() != null)
sb.append("BaselineId: ").append(getBaselineId()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeEffectivePatchesForPatchBaselineRequest == false)
return false;
DescribeEffectivePatchesForPatchBaselineRequest other = (DescribeEffectivePatchesForPatchBaselineRequest) obj;
if (other.getBaselineId() == null ^ this.getBaselineId() == null)
return false;
if (other.getBaselineId() != null && other.getBaselineId().equals(this.getBaselineId()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBaselineId() == null) ? 0 : getBaselineId().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeEffectivePatchesForPatchBaselineRequest clone() {
return (DescribeEffectivePatchesForPatchBaselineRequest) super.clone();
}
}
|
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.cede.guis;
import com.cede.lib.ProductModel;
import com.cede.lib.ProviderModel;
import com.cede.models.Provider;
import java.awt.Toolkit;
import javax.swing.ImageIcon;
import javax.swing.table.DefaultTableModel;
/**
*
* @author MHERNANDEZ
*/
public class ProveedoresDetalle extends javax.swing.JFrame {
private final ImageIcon icon;
private ProviderModel pm;
private Provider p;
private ProductModel product;
private int id;
/**
* Creates new form ProveedoresDetalle
*/
public ProveedoresDetalle(int id) {
this.id = id;
initComponents();
pm = new ProviderModel();
product = new ProductModel();
this.setLocationRelativeTo(null);
icon = new ImageIcon(getClass().getResource("/com/cede/img/header-logo.png"));
setIconImage(Toolkit.getDefaultToolkit().getImage(this.getClass().getResource("/com/cede/img/icono.png")));
fillFields();
}
public void fillFields(){
p = pm.providerDetail(id);
nombreField.setText(p.getNombre());
rfcField.setText(p.getRfc());
domicilioField.setText(p.getDomicilio());
telefonoField.setText(p.getTelefono());
idField.setText(""+p.getIdProvider());
product.ProductsProvider((DefaultTableModel)productsTable.getModel(), p.getIdProvider());
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
nombreField = new javax.swing.JTextField();
jLabel3 = new javax.swing.JLabel();
rfcField = new javax.swing.JTextField();
jLabel4 = new javax.swing.JLabel();
domicilioField = new javax.swing.JTextField();
jLabel5 = new javax.swing.JLabel();
telefonoField = new javax.swing.JTextField();
jLabel6 = new javax.swing.JLabel();
idField = new javax.swing.JTextField();
jScrollPane1 = new javax.swing.JScrollPane();
productsTable = new javax.swing.JTable();
jLabel7 = new javax.swing.JLabel();
jButton1 = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
jLabel1.setFont(new java.awt.Font("Arial Black", 1, 14)); // NOI18N
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel1.setText("Detalles Proveedor");
jLabel2.setText("Nombre: ");
nombreField.setEditable(false);
jLabel3.setText("RFC: ");
rfcField.setEditable(false);
jLabel4.setText("Domicilio:");
domicilioField.setEditable(false);
jLabel5.setText("Tel:");
telefonoField.setEditable(false);
jLabel6.setText("Id:");
idField.setEditable(false);
productsTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
}
));
jScrollPane1.setViewportView(productsTable);
jLabel7.setFont(new java.awt.Font("Arial Black", 1, 12)); // NOI18N
jLabel7.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel7.setText("Productos");
jButton1.setText("Cerrar");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addComponent(jLabel2)
.addGap(18, 18, 18)
.addComponent(nombreField, javax.swing.GroupLayout.PREFERRED_SIZE, 150, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel4)
.addComponent(jLabel6))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(domicilioField)
.addComponent(idField))))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel3)
.addComponent(jLabel5))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(rfcField)
.addComponent(telefonoField, javax.swing.GroupLayout.DEFAULT_SIZE, 150, Short.MAX_VALUE))
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel7, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 461, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(jButton1)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1)
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(nombreField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel3)
.addComponent(rfcField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(domicilioField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel5)
.addComponent(telefonoField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel6)
.addComponent(idField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addComponent(jLabel7)
.addGap(18, 18, 18)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 320, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jButton1)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
/* Here is the code to perform when the cerrar button is closed */
this.dispose();
}//GEN-LAST:event_jButton1ActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTextField domicilioField;
private javax.swing.JTextField idField;
private javax.swing.JButton jButton1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTextField nombreField;
private javax.swing.JTable productsTable;
private javax.swing.JTextField rfcField;
private javax.swing.JTextField telefonoField;
// End of variables declaration//GEN-END:variables
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.complex.impl;
import java.math.BigDecimal;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.ZeroVector;
import org.apache.arrow.vector.complex.AbstractStructVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.complex.UnionVector;
import org.apache.arrow.vector.complex.writer.FieldWriter;
import org.apache.arrow.vector.holders.DecimalHolder;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.TransferPair;
import io.netty.buffer.ArrowBuf;
/**
* This FieldWriter implementation delegates all FieldWriter API calls to an inner FieldWriter. This inner field writer
* can start as a specific type, and this class will promote the writer to a UnionWriter if a call is made that the
* specifically typed writer cannot handle. A new UnionVector is created, wrapping the original vector, and replaces the
* original vector in the parent vector, which can be either an AbstractStructVector or a ListVector.
*
* <p>The writer used can either be for single elements (struct) or lists.</p>
*/
public class PromotableWriter extends AbstractPromotableFieldWriter {
private final AbstractStructVector parentContainer;
private final ListVector listVector;
private final NullableStructWriterFactory nullableStructWriterFactory;
private int position;
private static final int MAX_DECIMAL_PRECISION = 38;
private enum State {
UNTYPED, SINGLE, UNION
}
private MinorType type;
private ArrowType arrowType;
private ValueVector vector;
private UnionVector unionVector;
private State state;
private FieldWriter writer;
/**
* Constructs a new instance.
*
* @param v The vector to write.
* @param parentContainer The parent container for the vector.
*/
public PromotableWriter(ValueVector v, AbstractStructVector parentContainer) {
this(v, parentContainer, NullableStructWriterFactory.getNullableStructWriterFactoryInstance());
}
/**
* Construcs a new instance.
*
* @param v The vector to initialize the writer with.
* @param parentContainer The parent container for the vector.
* @param nullableStructWriterFactory The factory to create the delegate writer.
*/
public PromotableWriter(
ValueVector v,
AbstractStructVector parentContainer,
NullableStructWriterFactory nullableStructWriterFactory) {
this.parentContainer = parentContainer;
this.listVector = null;
this.nullableStructWriterFactory = nullableStructWriterFactory;
init(v);
}
/**
* Constructs a new instance.
*
* @param v The vector to initialize the writer with.
* @param listVector The vector that serves as a parent of v.
*/
public PromotableWriter(ValueVector v, ListVector listVector) {
this(v, listVector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance());
}
/**
* Constructs a new instance.
*
* @param v The vector to initialize the writer with.
* @param listVector The vector that serves as a parent of v.
* @param nullableStructWriterFactory The factory to create the delegate writer.
*/
public PromotableWriter(
ValueVector v,
ListVector listVector,
NullableStructWriterFactory nullableStructWriterFactory) {
this.listVector = listVector;
this.parentContainer = null;
this.nullableStructWriterFactory = nullableStructWriterFactory;
init(v);
}
private void init(ValueVector v) {
if (v instanceof UnionVector) {
state = State.UNION;
unionVector = (UnionVector) v;
writer = new UnionWriter(unionVector, nullableStructWriterFactory);
} else if (v instanceof ZeroVector) {
state = State.UNTYPED;
} else {
setWriter(v);
}
}
@Override
public void setAddVectorAsNullable(boolean nullable) {
super.setAddVectorAsNullable(nullable);
if (writer instanceof AbstractFieldWriter) {
((AbstractFieldWriter) writer).setAddVectorAsNullable(nullable);
}
}
private void setWriter(ValueVector v) {
setWriter(v, null);
}
private void setWriter(ValueVector v, ArrowType arrowType) {
state = State.SINGLE;
vector = v;
type = v.getMinorType();
this.arrowType = arrowType;
switch (type) {
case STRUCT:
writer = nullableStructWriterFactory.build((StructVector) vector);
break;
case LIST:
writer = new UnionListWriter((ListVector) vector, nullableStructWriterFactory);
break;
case UNION:
writer = new UnionWriter((UnionVector) vector, nullableStructWriterFactory);
break;
default:
writer = type.getNewFieldWriter(vector);
break;
}
}
@Override
public void setPosition(int index) {
super.setPosition(index);
FieldWriter w = getWriter();
if (w == null) {
position = index;
} else {
w.setPosition(index);
}
}
protected FieldWriter getWriter(MinorType type) {
return getWriter(type, null);
}
protected FieldWriter getWriter(MinorType type, ArrowType arrowType) {
if (state == State.UNION) {
((UnionWriter) writer).getWriter(type);
} else if (state == State.UNTYPED) {
if (type == null) {
// ???
return null;
}
if (arrowType == null) {
arrowType = type.getType();
}
FieldType fieldType = new FieldType(addVectorAsNullable, arrowType, null, null);
ValueVector v = listVector.addOrGetVector(fieldType).getVector();
v.allocateNew();
setWriter(v, arrowType);
writer.setPosition(position);
} else if (type != this.type) {
promoteToUnion();
((UnionWriter) writer).getWriter(type);
}
return writer;
}
@Override
public boolean isEmptyStruct() {
return writer.isEmptyStruct();
}
protected FieldWriter getWriter() {
return writer;
}
private FieldWriter promoteToUnion() {
String name = vector.getField().getName();
TransferPair tp = vector.getTransferPair(vector.getMinorType().name().toLowerCase(), vector.getAllocator());
tp.transfer();
if (parentContainer != null) {
// TODO allow dictionaries in complex types
unionVector = parentContainer.addOrGetUnion(name);
unionVector.allocateNew();
} else if (listVector != null) {
unionVector = listVector.promoteToUnion();
}
unionVector.addVector((FieldVector) tp.getTo());
writer = new UnionWriter(unionVector, nullableStructWriterFactory);
writer.setPosition(idx());
for (int i = 0; i <= idx(); i++) {
unionVector.setType(i, vector.getMinorType());
}
vector = null;
state = State.UNION;
return writer;
}
@Override
public void write(DecimalHolder holder) {
// Infer decimal scale and precision
if (arrowType == null) {
arrowType = new ArrowType.Decimal(MAX_DECIMAL_PRECISION, holder.scale);
}
getWriter(MinorType.DECIMAL, arrowType).write(holder);
}
@Override
public void writeDecimal(int start, ArrowBuf buffer) {
// Cannot infer decimal scale and precision
if (arrowType == null) {
throw new IllegalStateException("Cannot infer decimal scale and precision");
}
getWriter(MinorType.DECIMAL, arrowType).writeDecimal(start, buffer);
}
@Override
public void writeDecimal(BigDecimal value) {
// Infer decimal scale and precision
if (arrowType == null) {
arrowType = new ArrowType.Decimal(MAX_DECIMAL_PRECISION, value.scale());
}
getWriter(MinorType.DECIMAL, arrowType).writeDecimal(value);
}
@Override
public void allocate() {
getWriter().allocate();
}
@Override
public void clear() {
getWriter().clear();
}
@Override
public Field getField() {
return getWriter().getField();
}
@Override
public int getValueCapacity() {
return getWriter().getValueCapacity();
}
@Override
public void close() throws Exception {
getWriter().close();
}
}
|
|
/*
* Copyright 2013 bits of proof zrt.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bitsofproof.supernode.misc;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.bouncycastle.util.Arrays;
import com.bitsofproof.supernode.common.ByteUtils;
import com.bitsofproof.supernode.common.ECKeyPair;
import com.bitsofproof.supernode.common.ValidationException;
// WORK IN PROGRESS!
public class BIPShamirSecret
{
private static final BigInteger prime16 = BigInteger.ONE.shiftLeft (16).subtract (BigInteger.valueOf (15));
private static final BIPShamirSecret ss128 = new BIPShamirSecret (16, BigInteger.ONE.shiftLeft (128).subtract (BigInteger.valueOf (159)));
private static final BIPShamirSecret ss192 = new BIPShamirSecret (24, BigInteger.ONE.shiftLeft (192).subtract (BigInteger.valueOf (237)));
private static final BIPShamirSecret ss256 = new BIPShamirSecret (32, BigInteger.ONE.shiftLeft (256).subtract (BigInteger.valueOf (189)));
private static final BIPShamirSecret ss384 = new BIPShamirSecret (48, BigInteger.ONE.shiftLeft (384).subtract (BigInteger.valueOf (317)));
private static final BIPShamirSecret ss512 = new BIPShamirSecret (64, BigInteger.ONE.shiftLeft (512).subtract (BigInteger.valueOf (569)));
private BigInteger secretModulo;
private int secretLength;
public BIPShamirSecret (int l, BigInteger m)
{
this.secretLength = l;
this.secretModulo = m;
}
public static class SecretShare
{
public int needed;
public int shareNumber;
public BigInteger share;
public byte[] fingerprint;
}
private static final byte[] legacy = { (byte) 0x1a, (byte) 0x46 };
private static final byte[] legacyShort = { (byte) 0x26, (byte) 0xf4 };
private static final byte[] compressed = { (byte) 0x1a, (byte) 0x47 };
private static final byte[] compressedShort = { (byte) 0x26, (byte) 0xf6 };
private static final byte[] bip32seed128 = { (byte) 0x0e, (byte) 0x53 };
private static final byte[] bip32seed128Short = { (byte) 0x15, (byte) 0x3d };
private static final byte[] bip32seed256 = { (byte) 0x1a, (byte) 0x49 };
private static final byte[] bip32seed256Short = { (byte) 0x26, (byte) 0xf8 };
private static final byte[] bip32seed512 = { (byte) 0x58, (byte) 0x7e };
private static final byte[] bip32seed512Short = { (byte) 0x83, (byte) 0xa33 };
public static String getShare (ECKeyPair key, int share, int needed, boolean verbose) throws ValidationException
{
SecretShare ss = ss256.getShare (key.getPrivate (), share, needed);
return ss256.serialize (key.isCompressed () ? verbose ? compressed : compressedShort :
verbose ? legacy : legacyShort, ss, verbose);
}
private static byte[] toArray (BigInteger n, int len)
{
byte[] p = n.toByteArray ();
if ( p.length != len )
{
byte[] tmp = new byte[len];
System.arraycopy (p, Math.max (0, p.length - len), tmp, Math.max (0, len - p.length), Math.min (len, p.length));
return tmp;
}
return p;
}
private String serialize (byte[] secretType, SecretShare s, boolean verbose)
{
byte[] raw;
if ( verbose )
{
raw = new byte[6 + secretLength];
}
else
{
raw = new byte[3 + secretLength];
}
System.arraycopy (secretType, 0, raw, 0, 2);
if ( verbose )
{
System.arraycopy (s.fingerprint, 0, raw, 2, 2);
raw[4] = (byte) (s.needed & 0xff - 2);
raw[5] = (byte) s.shareNumber;
}
else
{
raw[2] = (byte) s.shareNumber;
}
System.arraycopy (toArray (s.share, 32), 0, raw, verbose ? 6 : 3, secretLength);
return ByteUtils.toBase58WithChecksum (raw);
}
public static ECKeyPair reconstruct (String[] shares) throws ValidationException
{
SecretShare ss[] = new SecretShare[shares.length];
boolean comp = true;
for ( int i = 0; i < shares.length; ++i )
{
byte[] raw = ByteUtils.fromBase58WithChecksum (shares[i]);
byte[] prefix = Arrays.copyOfRange (raw, 0, 2);
boolean verbose = Arrays.areEqual (prefix, compressed) || !Arrays.areEqual (prefix, legacy);
if ( !verbose && !Arrays.areEqual (prefix, compressedShort) && !Arrays.areEqual (prefix, legacyShort) )
{
throw new ValidationException ("Not a key share");
}
ss[i] = new SecretShare ();
ss[i].shareNumber = raw[2] & 0xff;
ss[i].share = new BigInteger (1, Arrays.copyOfRange (raw, verbose ? 6 : 3, 40));
comp = raw[1] == compressed[1];
}
return new ECKeyPair (ss256.reconstruct (ss), comp);
}
private static byte[] hash (byte[] d, BigInteger mod, int length) throws ValidationException
{
MessageDigest digest;
try
{
digest = MessageDigest.getInstance ("SHA-512");
}
catch ( NoSuchAlgorithmException e )
{
throw new ValidationException (e);
}
return toArray (new BigInteger (1, digest.digest (d)).mod (mod), length);
}
private byte[] hash (byte[] d) throws ValidationException
{
return hash (d, secretModulo, secretLength);
}
public SecretShare getShare (byte[] secret, int share, int needed) throws ValidationException
{
if ( secret.length != secretLength )
{
throw new ValidationException ("Secret must be " + secretLength + " bytes");
}
if ( new BigInteger (1, secret).compareTo (secretModulo) >= 0 )
{
throw new ValidationException ("Secret is too big");
}
BigInteger[] a = new BigInteger[needed];
byte[] r = toArray (new BigInteger (1, secret), secretLength);
for ( int i = 0; i < a.length; ++i )
{
a[i] = new BigInteger (1, r);
r = hash (r);
}
int x = share + 1;
BigInteger y = a[0];
for ( int i = 1; i < needed; ++i )
{
y = y.add (BigInteger.valueOf (x).pow (i).multiply (a[i]));
}
SecretShare ss = new SecretShare ();
ss.shareNumber = (byte) share;
ss.share = y.mod (secretModulo);
ss.needed = needed;
ss.fingerprint = hash (secret, prime16, 2);
return ss;
}
public BigInteger reconstruct (SecretShare[] shares) throws ValidationException
{
for ( int i = 0; i < shares.length - 1; ++i )
{
for ( int j = 0; j < shares.length; ++j )
{
if ( i != j && shares[i].shareNumber == shares[j].shareNumber )
{
throw new ValidationException ("Shares are not unique");
}
}
}
BigInteger[] y = new BigInteger[shares.length];
for ( int i = 0; i < shares.length; ++i )
{
y[i] = shares[i].share;
}
int d, i;
for ( d = 1; d < shares.length; d++ )
{
for ( i = 0; i < shares.length - d; i++ )
{
int j = i + d;
BigInteger xi = BigInteger.valueOf (shares[i].shareNumber + 1);
BigInteger xj = BigInteger.valueOf (shares[j].shareNumber + 1);
y[i] = xj.multiply (y[i]).subtract (xi.multiply (y[i + 1])).multiply (xj.subtract (xi).modInverse (secretModulo)).mod (secretModulo);
}
}
return y[0];
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.cli;
import com.facebook.presto.client.ClientSession;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.net.HostAndPort;
import io.airlift.airline.Option;
import io.airlift.http.client.spnego.KerberosConfig;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.CharsetEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.TimeZone;
import static com.google.common.base.Preconditions.checkArgument;
import static java.nio.charset.StandardCharsets.US_ASCII;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
public class ClientOptions
{
@Option(name = "--server", title = "server", description = "Presto server location (default: localhost:8080)")
public String server = "localhost:8080";
@Option(name = "--enable-authentication", title = "enable authentication", description = "Enable client authentication")
public boolean authenticationEnabled;
@Option(name = "--krb5-remote-service-name", title = "krb5 remote service name", description = "Remote peer's kerberos service name")
public String krb5RemoteServiceName;
@Option(name = "--krb5-config-path", title = "krb5 config path", description = "Kerberos config file path (default: /etc/krb5.conf)")
public String krb5ConfigPath = "/etc/krb5.conf";
@Option(name = "--krb5-keytab-path", title = "krb5 keytab path", description = "Kerberos key table path")
public String krb5KeytabPath = "/etc/krb5.keytab";
@Option(name = "--krb5-credential-cache-path", title = "krb5 credential cache path", description = "Kerberos credential cache path")
public String krb5CredentialCachePath = defaultCredentialCachePath();
@Option(name = "--krb5-principal", title = "krb5 principal", description = "Kerberos principal to be used")
public String krb5Principal;
@Option(name = "--keystore-path", title = "keystore path", description = "Keystore path")
public String keystorePath;
@Option(name = "--keystore-password", title = "keystore password", description = "Keystore password")
public String keystorePassword;
@Option(name = "--user", title = "user", description = "Username")
public String user = System.getProperty("user.name");
@Option(name = "--source", title = "source", description = "Name of source making query")
public String source = "presto-cli";
@Option(name = "--catalog", title = "catalog", description = "Default catalog")
public String catalog = "default";
@Option(name = "--schema", title = "schema", description = "Default schema")
public String schema = "default";
@Option(name = {"-f", "--file"}, title = "file", description = "Execute statements from file and exit")
public String file;
@Option(name = "--debug", title = "debug", description = "Enable debug information")
public boolean debug;
@Option(name = "--log-levels-file", title = "log levels", description = "Configure log levels for debugging")
public String logLevelsFile;
@Option(name = "--execute", title = "execute", description = "Execute specified statements and exit")
public String execute;
@Option(name = "--output-format", title = "output-format", description = "Output format for batch mode (default: CSV)")
public OutputFormat outputFormat = OutputFormat.CSV;
@Option(name = "--session", title = "session", description = "Session property (property can be used multiple times; format is key=value)")
public final List<ClientSessionProperty> sessionProperties = new ArrayList<>();
@Option(name = "--socks-proxy", title = "socks-proxy", description = "SOCKS proxy to use for server connections")
public HostAndPort socksProxy;
public enum OutputFormat
{
ALIGNED,
VERTICAL,
CSV,
TSV,
CSV_HEADER,
TSV_HEADER,
NULL
}
public ClientSession toClientSession()
{
return new ClientSession(
parseServer(server),
user,
source,
catalog,
schema,
TimeZone.getDefault().getID(),
Locale.getDefault(),
toProperties(sessionProperties),
debug);
}
public KerberosConfig toKerberosConfig()
{
KerberosConfig config = new KerberosConfig();
if (krb5ConfigPath != null) {
config.setConfig(new File(krb5ConfigPath));
}
if (krb5KeytabPath != null) {
config.setKeytab(new File(krb5KeytabPath));
}
if (krb5CredentialCachePath != null) {
config.setCredentialCache(new File(krb5CredentialCachePath));
}
return config;
}
public static URI parseServer(String server)
{
server = server.toLowerCase(ENGLISH);
if (server.startsWith("http://") || server.startsWith("https://")) {
return URI.create(server);
}
HostAndPort host = HostAndPort.fromString(server);
try {
return new URI("http", null, host.getHostText(), host.getPortOrDefault(80), null, null, null);
}
catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
public static Map<String, String> toProperties(List<ClientSessionProperty> sessionProperties)
{
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
for (ClientSessionProperty sessionProperty : sessionProperties) {
String name = sessionProperty.getName();
if (sessionProperty.getCatalog().isPresent()) {
name = sessionProperty.getCatalog().get() + "." + name;
}
builder.put(name, sessionProperty.getValue());
}
return builder.build();
}
private static String defaultCredentialCachePath()
{
String value = System.getenv("KRB5CCNAME");
if (value != null && value.startsWith("FILE:")) {
return value.substring("FILE:".length());
}
return null;
}
public static final class ClientSessionProperty
{
private static final Splitter NAME_VALUE_SPLITTER = Splitter.on('=').limit(2);
private static final Splitter NAME_SPLITTER = Splitter.on('.');
private final Optional<String> catalog;
private final String name;
private final String value;
public ClientSessionProperty(String property)
{
List<String> nameValue = NAME_VALUE_SPLITTER.splitToList(property);
checkArgument(nameValue.size() == 2, "Session property: %s", property);
List<String> nameParts = NAME_SPLITTER.splitToList(nameValue.get(0));
checkArgument(nameParts.size() == 1 || nameParts.size() == 2, "Invalid session property: %s", property);
if (nameParts.size() == 1) {
catalog = Optional.empty();
name = nameParts.get(0);
}
else {
catalog = Optional.of(nameParts.get(0));
name = nameParts.get(1);
}
value = nameValue.get(1);
verifyProperty(catalog, name, value);
}
public ClientSessionProperty(Optional<String> catalog, String name, String value)
{
this.catalog = requireNonNull(catalog, "catalog is null");
this.name = requireNonNull(name, "name is null");
this.value = requireNonNull(value, "value is null");
verifyProperty(catalog, name, value);
}
private static void verifyProperty(Optional<String> catalog, String name, String value)
{
checkArgument(!catalog.isPresent() || !catalog.get().isEmpty(), "Invalid session property: %s.%s:%s", catalog, name, value);
checkArgument(!name.isEmpty(), "Session property name is empty");
CharsetEncoder charsetEncoder = US_ASCII.newEncoder();
checkArgument(catalog.orElse("").indexOf('=') < 0, "Session property catalog must not contain '=': %s", name);
checkArgument(charsetEncoder.canEncode(catalog.orElse("")), "Session property catalog is not US_ASCII: %s", name);
checkArgument(name.indexOf('=') < 0, "Session property name must not contain '=': %s", name);
checkArgument(charsetEncoder.canEncode(name), "Session property name is not US_ASCII: %s", name);
checkArgument(charsetEncoder.canEncode(value), "Session property value is not US_ASCII: %s", value);
}
public Optional<String> getCatalog()
{
return catalog;
}
public String getName()
{
return name;
}
public String getValue()
{
return value;
}
@Override
public String toString()
{
return (catalog.isPresent() ? catalog.get() + '.' : "") + name + '=' + value;
}
@Override
public int hashCode()
{
return Objects.hash(catalog, name, value);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
ClientSessionProperty other = (ClientSessionProperty) obj;
return Objects.equals(this.catalog, other.catalog) &&
Objects.equals(this.name, other.name) &&
Objects.equals(this.value, other.value);
}
}
}
|
|
/////////////////////////////////////////////////////////////////////////////////////////
//
// The MIT License (MIT)
//
// Copyright (c) 2014-2015 Keld Oelykke
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
/////////////////////////////////////////////////////////////////////////////////////////
package starkcoder.failfast.unit.objects.strings;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import starkcoder.failfast.FailFast;
import starkcoder.failfast.IFailFast;
import starkcoder.failfast.SFailFast;
import starkcoder.failfast.checks.Checker;
import starkcoder.failfast.checks.IChecker;
import starkcoder.failfast.contractors.CallContractor;
import starkcoder.failfast.contractors.ICallContractor;
import starkcoder.failfast.fails.FailFastException;
import starkcoder.failfast.fails.Failer;
import starkcoder.failfast.fails.IFailer;
/**
* Fail-fast unit test of {link:IObjectStringNotNullAndNotEmptyCheck} and
* {link:IObjectStringNotNullAndNotEmptyFail}.
*
* @author Keld Oelykke
*/
public class StringNotNullAndNotEmptyTest
{
private IChecker checker;
private IFailer failer;
private String toString = null;
@Override
public String toString()
{
return this.toString;
}
@Rule
public TestWatcher watcher = new TestWatcher()
{
protected void starting(Description description)
{
toString = description.getTestClass().getSimpleName() + "." + description.getMethodName();
}
};
/**
* Setup FailFast instances.
*/
@Before
public void setUp()
{
// this would be in you application startup section
ICallContractor callContractor = new CallContractor();
IFailFast failFastOrNull = new FailFast(new Checker(callContractor),
new Failer(callContractor), callContractor);
SFailFast.setFailFastOrNull(failFastOrNull);
this.checker = SFailFast.getChecker();
this.failer = SFailFast.getFailer();
}
/**
* Clear FailFast instances.
*/
@After
public void tearDown()
{
// this would be in you application shutdown section
SFailFast.setFailFastOrNull(null);
this.checker = null;
this.failer = null;
}
// 1st - caller checks
@Test(expected = IllegalArgumentException.class)
public void testStringNotNullAndNotEmptyCheckerCallerIsNull()
{
String referenceA = "foo";
if (checker.isStringNotNullAndNotEmpty(null, referenceA))
{
failer.failStringNotNullAndNotEmpty(this, "referenceA");
}
}
@Test(expected = IllegalArgumentException.class)
public void testStringNotNullAndNotEmptyFailerCallerIsNull()
{
String referenceA = "foo";
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
failer.failStringNotNullAndNotEmpty(null, "referenceA");
}
}
@Test(expected = IllegalStateException.class)
public void testStringFailerCallerIsWrong()
{
String referenceA = "foo";
if (checker.isStringNotNullAndNotEmpty(new String("Foo"), referenceA))
{
failer.failStringNotNullAndNotEmpty(new String("Bar"), "referenceA");
}
}
// 2nd - mismatch calls
@Test(expected = IllegalStateException.class)
public void testStringNotNullAndNotEmptyMismatchCheckCheck()
{
String referenceA = "foo";
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
checker.isStringNotNullAndNotEmpty(this, referenceA);
}
}
@Test(expected = IllegalStateException.class)
public void testStringNotNullAndNotEmptyMismatchFail()
{
failer.failStringNotNullAndNotEmpty(this, "referenceA");
}
@Test(expected = IllegalStateException.class)
public void testStringNotNullAndNotEmptyMismatchWrongCheck()
{
String referenceA = "";
if (checker.isStringNullOrEmpty(this, referenceA)) // wrong call
{
failer.failStringNotNullAndNotEmpty(this, "referenceA");
}
}
@Test(expected = IllegalStateException.class)
public void testStringNotNullAndNotEmptyMismatchWrongFail()
{
String referenceA = "foo";
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
failer.failStringNullOrEmpty(this, "referenceA"); // wrong call
}
}
// 3rd - normal cases
@Test(expected = FailFastException.class)
public void testStringNotNullAndNotEmptyFailNoMessage()
{
String referenceA = "foo";
try
{
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
failer.failStringNotNullAndNotEmpty(this, "referenceA");
}
}
catch (FailFastException failFastException)
{
assertEquals("Expected registered exception in failer", failFastException,
failer.getFailFastExceptionOrNull());
System.out.println(failFastException.getMessage());
throw failFastException;
}
}
@Test(expected = FailFastException.class)
public void testStringNotNullAndNotEmptyFailMessage()
{
String referenceA = "bar";
try
{
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
failer.failStringNotNullAndNotEmpty(this, "referenceA", "Extra info goes here");
}
}
catch (FailFastException failFastException)
{
assertEquals("Expected registered exception in failer", failFastException,
failer.getFailFastExceptionOrNull());
System.out.println(failFastException.getMessage());
throw failFastException;
}
}
@Test
public void testStringNullFail()
{
String referenceA = null;
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
failer.failStringNotNullAndNotEmpty(this, "referenceA");
}
assertTrue("Expected referenceA not to pass the check", true);
assertNull("Expected no registered exception in failer", failer.getFailFastExceptionOrNull());
}
@Test
public void testStringEmptyNoFail()
{
String referenceA = "";
if (checker.isStringNotNullAndNotEmpty(this, referenceA))
{
failer.failStringNotNullAndNotEmpty(this, "referenceA");
}
assertTrue("Expected referenceA not to pass the check", true);
assertNull("Expected no registered exception in failer", failer.getFailFastExceptionOrNull());
}
}
|
|
/*
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.parsepasses.contextautoesc;
import com.google.auto.value.AutoValue;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.template.soy.base.internal.SanitizedContentKind;
import com.google.template.soy.data.SanitizedContent.ContentKind;
import com.google.template.soy.error.ErrorReporter;
import com.google.template.soy.soytree.AbstractSoyNodeVisitor;
import com.google.template.soy.soytree.AutoescapeMode;
import com.google.template.soy.soytree.CallBasicNode;
import com.google.template.soy.soytree.CallDelegateNode;
import com.google.template.soy.soytree.CallNode;
import com.google.template.soy.soytree.CallParamContentNode;
import com.google.template.soy.soytree.EscapingMode;
import com.google.template.soy.soytree.ForIfemptyNode;
import com.google.template.soy.soytree.ForNode;
import com.google.template.soy.soytree.ForNonemptyNode;
import com.google.template.soy.soytree.HtmlAttributeNode;
import com.google.template.soy.soytree.HtmlAttributeValueNode;
import com.google.template.soy.soytree.HtmlCloseTagNode;
import com.google.template.soy.soytree.HtmlCommentNode;
import com.google.template.soy.soytree.HtmlContext;
import com.google.template.soy.soytree.HtmlOpenTagNode;
import com.google.template.soy.soytree.HtmlTagNode;
import com.google.template.soy.soytree.IfElseNode;
import com.google.template.soy.soytree.IfNode;
import com.google.template.soy.soytree.LetContentNode;
import com.google.template.soy.soytree.MsgFallbackGroupNode;
import com.google.template.soy.soytree.PrintDirectiveNode;
import com.google.template.soy.soytree.PrintNode;
import com.google.template.soy.soytree.RawTextNode;
import com.google.template.soy.soytree.SoyNode;
import com.google.template.soy.soytree.SoyNode.BlockNode;
import com.google.template.soy.soytree.SoyNode.CommandNode;
import com.google.template.soy.soytree.SoyNode.Kind;
import com.google.template.soy.soytree.SoyNode.ParentSoyNode;
import com.google.template.soy.soytree.SoyNode.RenderUnitNode;
import com.google.template.soy.soytree.SwitchDefaultNode;
import com.google.template.soy.soytree.SwitchNode;
import com.google.template.soy.soytree.TemplateNode;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Chooses appropriate escaping modes for <code>{print}</code> commands and derives templates as
* necessary.
*
* <p>For each template with {@code autoescape="contextual"}, assume that the template is used to
* produce an HTML fragment. Start walking the body with the {@link Context context} provided by the
* caller (typically {@link HtmlContext#HTML_PCDATA}).
*
* <ul>
* <li>For RawTextNodes, update the context based on the fragment, so seeing "<script>" will
* move us into a JavaScript context while "<!--" would move us into an HTML comment
* context.
* <li>For {@link PrintNode}s, choose an escaping convention appropriate to the current context.
* <li>For {@link IfNode}s, {@link SwitchNode}s, and looping constructs, propagate context
* separately along each path, and make sure they converge on a consistent context.
* <li>For {@link CallBasicNode}s, maybe derive the target based on current context, recursively
* propagate contexts through the derived template to compute an end context for the template.
* See fixed-point typing below for a discussion of reentrant templates and templates used in
* different contexts.
* </ul>
*
*/
final class InferenceEngine {
// States in which it is illegal to recontextualize a template.
// This is because the autoescaper relies on AST nodes produced by the parser for
// kind="attributes" and kind="html" templates. So if we recontextualize a template into an
// analagous state escaping will fail since the html nodes will not be present.
private static final ImmutableSet<HtmlContext> ILLEGAL_RECONTEXTUALIZATIONS =
Sets.immutableEnumSet(
HtmlContext.HTML_TAG,
HtmlContext.HTML_TAG_NAME,
HtmlContext.HTML_PCDATA,
HtmlContext.HTML_COMMENT,
HtmlContext.HTML_ATTRIBUTE_NAME,
HtmlContext.HTML_BEFORE_OPEN_TAG_NAME,
HtmlContext.HTML_BEFORE_CLOSE_TAG_NAME);
/**
* Infer an end context for the given template and, if requested, choose escaping directives for
* any <code>{print}</code>.
*
* @param templateNode A template that is visited in {@code startContext} and no other. If a
* template can be reached from multiple contexts, then it should be cloned. This class
* automatically does that for called templates.
* @param inferences Receives all suggested changes and inferences to tn.
* @return The end context when the given template is reached from {@code startContext}.
*/
public static Context inferTemplateEndContext(
TemplateNode templateNode,
Context startContext,
Inferences inferences,
ErrorReporter errorReporter) {
Context endContext;
AutoescapeMode autoescapeMode = templateNode.getAutoescapeMode();
InferenceEngine inferenceEngine =
new InferenceEngine(autoescapeMode, autoescapeMode, inferences, errorReporter);
// Context started off as startContext and we have propagated context through all of
// template's children, so now context is the template's end context.
endContext = inferenceEngine.infer(templateNode, startContext);
inferences.recordTemplateEndContext(templateNode.getTemplateName(), endContext);
return endContext;
}
/**
* Checks that the end context of a strict block is compatible with its start context.
*
* @throws SoyAutoescapeException if they mismatch.
*/
private static void checkStrictBlockEndContext(RenderUnitNode node, Context endContext) {
if (!endContext.isValidEndContextForContentKind(node.getContentKind())) {
String msg =
String.format(
"A strict block of kind=\"%s\" cannot end in context %s. Likely cause is %s.",
node.getContentKind().asAttributeValue(),
endContext,
endContext.getLikelyEndContextMismatchCause(node.getContentKind()));
throw SoyAutoescapeException.createWithNode(msg, node);
}
}
/**
* Applies strict contextual autoescaping to the given node's children.
*
* <p>The start context is the given node's declared {@link ContentKind}, and it is enforced that
* the block's inferred end context matches the start context.
*
* <p>This method is used to visit the content of {let} and {param} nodes with a {@code kind}
* attribute.
*/
static void inferStrictRenderUnitNode(
AutoescapeMode templateAutoescapeMode,
RenderUnitNode node,
Inferences inferences,
ErrorReporter errorReporter) {
InferenceEngine inferenceEngine =
new InferenceEngine(
AutoescapeMode.STRICT,
templateAutoescapeMode,
inferences,
errorReporter);
// Context started off as startContext and we have propagated context through all of
// node's children, so now context is the node's end context.
Context endContext =
inferenceEngine.inferChildren(
node, Context.getStartContextForContentKind(node.getContentKind()));
// Checking that start and end context is same.
checkStrictBlockEndContext(node, endContext);
}
/** The autoescaping mode in this current context. */
private final AutoescapeMode autoescapeMode;
/** The autoescape mode of the surrounding {template}. */
private final AutoescapeMode templateAutoescapeMode;
/** Receives modifications and typing inferences. */
private final Inferences inferences;
/** The escaping mode to assume when none is specified. */
private final EscapingMode defaultEscapingMode;
/** For reporting errors. */
private final ErrorReporter errorReporter;
private InferenceEngine(
AutoescapeMode autoescapeMode,
AutoescapeMode templateAutoescapeMode,
Inferences inferences,
ErrorReporter errorReporter) {
this.autoescapeMode = autoescapeMode;
this.templateAutoescapeMode = templateAutoescapeMode;
this.inferences = inferences;
this.defaultEscapingMode = EscapingMode.ESCAPE_HTML;
this.errorReporter = errorReporter;
}
private Context infer(SoyNode node, Context context) {
return new ContextPropagatingVisitor(context).exec(node);
}
private Context inferChildren(SoyNode node, Context context) {
ContextPropagatingVisitor contextPropagatingVisitor = new ContextPropagatingVisitor(context);
return contextPropagatingVisitor.execChildren(node);
}
/**
* A visitor that propagates context across a Soy AST to determine its end context. The end
* context of an AST is the one that would be reached by applying the {@link
* RawTextContextUpdater}'s HTML/CSS/JS grammar to any output of the template (where print
* commands produce innocuous strings). An innocuous string is one that is non-empty and that
* contains no special characters in HTML/CSS/JS. The string 'z' is a good example of an innocuous
* string.
*/
private final class ContextPropagatingVisitor extends AbstractSoyNodeVisitor<Context> {
private Context context;
public ContextPropagatingVisitor(Context context) {
this.context = context;
}
@Override
public Context exec(SoyNode node) {
visit(node);
return context;
}
/** Like {@link #exec(SoyNode)}, but only visits the current node's children, if any. */
public Context execChildren(SoyNode node) {
if (node instanceof ParentSoyNode<?>) {
visitChildren((ParentSoyNode<?>) node);
}
return context;
}
@Override
protected void visitTemplateNode(TemplateNode templateNode) {
Preconditions.checkState(
templateNode.getAutoescapeMode() == autoescapeMode,
"Same ContextPropagatingVisitor cannot be reused for multiple escaping modes.");
if (autoescapeMode == AutoescapeMode.STRICT) {
Preconditions.checkState(
context.isValidStartContextForContentKind(templateNode.getContentKind()),
"Strict templates may only be visited in the context for their declared content kind.");
// Normalize to the canonical context, even if we started in a similar but allowable
// context (e.g. single versus double quotes).
context = Context.getStartContextForContentKind(templateNode.getContentKind());
}
visitChildren(templateNode);
if (autoescapeMode == AutoescapeMode.STRICT) {
checkStrictBlockEndContext(templateNode, context);
}
}
/** Propagates context across raw chunks of HTML text. */
@Override
protected void visitRawTextNode(RawTextNode rawTextNode) {
context = RawTextContextUpdater.processRawText(rawTextNode, context);
}
@Override
protected void visitMsgFallbackGroupNode(MsgFallbackGroupNode node) {
if (autoescapeMode == AutoescapeMode.STRICT || autoescapeMode == AutoescapeMode.CONTEXTUAL) {
// (1) Determine the escaping we should do on the node itself, and the context we should
// parse the children in.
Optional<Context.MsgEscapingStrategy> maybeStrategy = context.getMsgEscapingStrategy(node);
if (!maybeStrategy.isPresent()) {
throw SoyAutoescapeException.createWithNode(
"Messages are not supported in this context, because it would mean asking "
+ "translators to write source code; if this is desired, try factoring the "
+ "message into a {let} block: "
+ context,
node);
}
Context.MsgEscapingStrategy strategy = maybeStrategy.get();
inferences.setEscapingDirectives(node, context, strategy.escapingModesForFullMessage);
// (2) Run the inference engine on the parts of the message in that context.
Context msgEndContext =
new InferenceEngine(
autoescapeMode,
templateAutoescapeMode,
inferences,
errorReporter)
.inferChildren(node, strategy.childContext);
// (3) Make sure the message didn't itself change context.
if (!msgEndContext.equals(strategy.childContext)) {
throw SoyAutoescapeException.createWithNode(
"Message text should not alter the escaping context. "
+ context
+ " != "
+ strategy.childContext,
node);
}
} else {
// In a non-contextual mode, we just descend into the children.
visitChildren(node);
}
}
/**
* {@link DerivedTemplateUtils Derive} a template from the given call's target if necessary, and
* figure out the template's end context.
*/
@Override
protected void visitCallNode(CallNode callNode) {
String calleeName;
if (callNode instanceof CallBasicNode) {
calleeName = ((CallBasicNode) callNode).getCalleeName();
} else {
calleeName = ((CallDelegateNode) callNode).getDelCalleeName();
}
DerivedNameAndContext derivedNameAndContext =
inferCallSite(callNode, context, calleeName, inferences);
String derivedCalleeName = derivedNameAndContext.derivedName();
if (!calleeName.equals(derivedCalleeName)) {
inferences.retargetCall(callNode, derivedCalleeName);
}
context = derivedNameAndContext.context();
visitChildren(callNode);
}
@Override
protected void visitCallParamContentNode(CallParamContentNode node) {
visitRenderUnitNode(node);
}
@Override
protected void visitLetContentNode(LetContentNode node) {
visitRenderUnitNode(node);
}
private void visitRenderUnitNode(RenderUnitNode node) {
switch (autoescapeMode) {
case CONTEXTUAL:
// if there is a kind and we are contextual, respect it, otherwise, html it is!
if (node.getContentKind() == null) {
inferInContextualModeForHtml(node);
} else {
inferInStrictMode(node);
}
break;
case STRICT:
// The CheckEscapingSanityVisitor ensures that node.getContentKind is non-null
inferInStrictMode(node);
break;
case NONCONTEXTUAL:
// do nothing. If the let content nodes with a {@code kind} attribute is in
// non-contextual template it is handled by another visitor:
// ContextualAutoescaper.NonContextualTypedRenderUnitNodesVisitor called from
// ContextualAutoescaper.
break;
}
}
@Override
protected void visitIfNode(IfNode ifNode) {
propagateAcrossDisjunction(ifNode);
}
@Override
protected void visitSwitchNode(SwitchNode switchNode) {
propagateAcrossDisjunction(switchNode);
}
/**
* Do multiple inferences so we can make sure we get to a consistent context regardless of how
* many times the loop is entered.
*/
@Override
protected void visitForNode(ForNode forNode) {
List<BlockNode> foreachChildren = forNode.getChildren();
ForNonemptyNode neNode = (ForNonemptyNode) foreachChildren.get(0);
ForIfemptyNode ieNode;
if (foreachChildren.size() == 2) {
ieNode = (ForIfemptyNode) foreachChildren.get(1);
} else if (foreachChildren.size() == 1) {
ieNode = null;
} else {
throw new AssertionError();
}
Context afterBody = context;
if (neNode != null) {
afterBody = infer(neNode, context);
// Make sure that repeated invocations of the body end up in the same state.
Context elseContext = infer(neNode, afterBody);
Optional<Context> combined = Context.union(elseContext, afterBody);
if (!combined.isPresent()) {
throw SoyAutoescapeException.createWithNode(
"{"
+ forNode.getCommandName()
+ "} body does not end in the same context after repeated entries.",
forNode);
}
afterBody = combined.get();
}
Context ifemptyContext;
if (ieNode != null) {
ifemptyContext = infer(ieNode, context);
} else {
ifemptyContext = context;
}
Optional<Context> combined = Context.union(ifemptyContext, afterBody);
if (!combined.isPresent()) {
throw SoyAutoescapeException.createWithNode(
"{"
+ forNode.getCommandName()
+ "} body "
+ (ieNode == null
? "changes context."
: "does not end in the same context as {ifempty}."),
ieNode == null ? forNode : ieNode);
}
context = combined.get();
}
/**
* Pick an escaping mode for the print node if this is in an {@code autoescape="contextual"}
* template.
*/
@Override
protected void visitPrintNode(PrintNode printNode) {
// It is an error to use autoescape-canceling print directives in strict mode unless in a
// block of kind text.
if (autoescapeMode == AutoescapeMode.STRICT && context.state != HtmlContext.TEXT) {
for (PrintDirectiveNode printDirective : printNode.getChildren()) {
if (printDirective.getName().equals("|noAutoescape")) {
// Treat noAutoescape specially:
// - It is allowed in strict sub-contexts if the surrounding template is non-strict,
// to help with migration. This does not apply to other escaping directives since
// they are just as dangerous, but less obvious to auditors.
// - It deserves a more useful error message.
if (templateAutoescapeMode == AutoescapeMode.STRICT) {
// Help the user figure out the best content kind to use, using existing heuristics.
SanitizedContentKind recommendedKind = context.getMostAppropriateContentKind();
String recommendedKindStr =
(recommendedKind == SanitizedContentKind.TEXT)
? "appropriate kind=\"...\""
: ("kind=\"" + recommendedKind.asAttributeValue() + "\"");
throw SoyAutoescapeException.createWithNode(
"noAutoescape is not allowed in strict autoescaping mode. Instead, pass in a "
+ "{param} with "
+ recommendedKindStr
+ " or SanitizedContent.",
printNode);
}
} else if (printDirective.getPrintDirective() != null
&& printDirective.getPrintDirective().shouldCancelAutoescape()) {
throw SoyAutoescapeException.createWithNode(
"Autoescape-cancelling print directives like "
+ printDirective.getName()
+ " are only allowed in kind=\"text\" blocks. If you really want to "
+ "over-escape, try using a let block: "
+ "{let $foo kind=\"text\"}"
+ printNode.toSourceString()
+ "{/let}{$foo}.",
printNode);
}
}
}
List<EscapingMode> escapingModes = inferences.getEscapingMode(printNode);
Context prev = context;
if (escapingModes.isEmpty()) { // None specified.
// The inferences set below specify which nodes to change. In the non-contextual modes,
// we leave escapingModesToSet null since no changes are to be made to this print node.
List<EscapingMode> escapingModesToSet = null;
switch (autoescapeMode) {
case STRICT:
case CONTEXTUAL:
// Infer one.
escapingModes =
escapingModesToSet = context.getEscapingModes(printNode, printNode.getChildren());
break;
case NONCONTEXTUAL:
escapingModes = ImmutableList.of(defaultEscapingMode);
break;
}
inferences.setEscapingDirectives(printNode, prev, escapingModesToSet);
} else if (!context.isCompatibleWith(escapingModes.get(0))) {
String msg =
String.format("Escaping modes %s not compatible with %s.", escapingModes, context);
throw SoyAutoescapeException.createWithNode(msg, printNode);
}
// Figure out the context at the end.
if (!escapingModes.isEmpty()
|| autoescapeMode == AutoescapeMode.CONTEXTUAL
|| autoescapeMode == AutoescapeMode.STRICT) {
// If we know the escaping mode or we're supposed to choose one, then use that.
context = context.getContextAfterDynamicValue();
} else {
// If we are not in an autoescaping template, assume that the author knows what they're
// doing and simulate an innocuous value.
context =
RawTextContextUpdater.processRawText(
new RawTextNode(-1, "z", printNode.getSourceLocation()), context);
}
}
@Override
protected void visitHtmlOpenTagNode(HtmlOpenTagNode node) {
visitHtmlTagNode(node);
}
@Override
protected void visitHtmlCloseTagNode(HtmlCloseTagNode node) {
visitHtmlTagNode(node);
}
@Override
protected void visitHtmlCommentNode(HtmlCommentNode node) {
context = context.transitionToState(HtmlContext.HTML_COMMENT);
visitChildren(node);
context = context.transitionToState(HtmlContext.HTML_PCDATA);
}
private void visitHtmlTagNode(HtmlTagNode tag) {
context =
context.transitionToState(
tag.getKind() == Kind.HTML_OPEN_TAG_NODE
? HtmlContext.HTML_BEFORE_OPEN_TAG_NAME
: HtmlContext.HTML_BEFORE_CLOSE_TAG_NAME);
// if the tag name is a constant, transition to an appropriate tag state
if (tag.getTagName().isStatic()) {
context = context.transitionToTagName(tag);
} else {
// dynamic tag name
visit(tag.getChild(0));
}
// Make sure the element type was pre-determined when setting the tag name.
Preconditions.checkArgument(context.elType != Context.ElementType.NONE);
context = context.transitionToTagBody();
// 0 is the tag name
for (int i = 1; i < tag.numChildren(); i++) {
visit(tag.getChild(i));
}
context = context.transitionToAfterTag();
}
@Override
protected void visitHtmlAttributeNode(HtmlAttributeNode node) {
SoyNode first = node.getChild(0);
if (first.getKind() == SoyNode.Kind.RAW_TEXT_NODE) {
context = context.transitionToAttrName(((RawTextNode) first).getRawText());
} else {
visit(first);
}
if (node.hasValue()) {
visit(node.getChild(1));
}
context = context.transitionToTagBody();
}
@Override
protected void visitHtmlAttributeValueNode(HtmlAttributeValueNode node) {
Context.AttributeEndDelimiter delim;
switch (node.getQuotes()) {
case DOUBLE:
delim = Context.AttributeEndDelimiter.DOUBLE_QUOTE;
break;
case NONE:
delim = Context.AttributeEndDelimiter.SPACE_OR_TAG_END;
break;
case SINGLE:
delim = Context.AttributeEndDelimiter.SINGLE_QUOTE;
break;
default:
throw new AssertionError();
}
context = context.transitionToAttrValue(delim);
visitChildren(node);
context = context.transitionToTagBody();
}
/** Handle conjunction nodes. */
@Override
protected void visitSoyNode(SoyNode node) {
if (node instanceof ParentSoyNode<?>) {
visitChildren((ParentSoyNode<?>) node);
}
}
//
// Helper methods.
/**
* Determines the content kind of the templates.
*
* <p>This relies on CheckDelegatesVisitor to print friendly messages if the deltemplates differ
* in content kind.
*/
private SanitizedContentKind getCommonContentKindIfStrict(List<TemplateNode> templates) {
if (templates.isEmpty()) {
return null;
}
SanitizedContentKind contentKind = templates.get(0).getContentKind();
for (TemplateNode template : templates) {
Preconditions.checkArgument(template.getContentKind() == contentKind);
}
return contentKind;
}
/**
* Derives a template if necessary to compute a consistent end context for a call to the named
* template.
*
* @param callNode The call node.
* @param startContext The context before the call.
* @param templateName The name of the template being called.
* @param inferences Contains a mapping of templates visible to the call site, prior typing
* decisions, and derived templates. Will receive any templates successfully derived as a
* side-effect of this call.
* @return The name of the template to call (possibly derived from templateName) and the context
* after the call ends.
*/
private DerivedNameAndContext inferCallSite(
CallNode callNode, Context startContext, String templateName, Inferences inferences) {
inferences.recordTemplateChecked(templateName);
List<TemplateNode> targets = inferences.lookupTemplates(templateName);
SanitizedContentKind calleeStrictContentKind = getCommonContentKindIfStrict(targets);
if (autoescapeMode == AutoescapeMode.STRICT) {
// We're currently in a strict mode template. Check what kind of template is being called.
if (calleeStrictContentKind != null
&& startContext.isValidStartContextForContentKind(calleeStrictContentKind)) {
// As an optimization, don't escape the call site if the callee has the right content
// kind. Since all deltemplates with the same name must be of the same kind (checked
// elsewhere), we can make this optimization even if we can't see all the deltemplates.
return DerivedNameAndContext.create(
templateName, startContext.getContextAfterDynamicValue());
} else if (calleeStrictContentKind != null || targets.isEmpty()) {
// If a strict template calls another strict template (or an unknown extern), the result
// will be escaped, so the call statement behaves effectively like a print statement.
// No re-contextualization of the callee is done.
// TODO(gboyer): Throw an exception if the list of escaping modes is empty, which
// indicates that there's no valid escaper for this context. My plan is to actually have
// getEscapingModes() itself throw the exception, but this requires some weeding out of
// bad existing templates.
inferences.setEscapingDirectives(
callNode,
startContext,
startContext.getEscapingModes(callNode, ImmutableList.<PrintDirectiveNode>of()));
return DerivedNameAndContext.create(
templateName, startContext.getContextAfterDynamicValue());
} else if (startContext.state == HtmlContext.TEXT) {
// Contextualize the callee in TEXT mode. It's okay to call any template from TEXT mode
// since TEXT doesn't make any safety guarantees.
return contextualizeCallee(callNode, startContext, templateName, inferences);
} else {
// TODO: We could easily allow this in a future release. We can contextualize the callee
// and re-escape its output. There are two options. TEXT is nicer because there's no
// re-escaping in most cases. Markup won't be preserved, but at least there will be zero
// double-escaping. HTML is more consistent because externs behave the same as interns.
throw SoyAutoescapeException.createWithNode(
"Soy strict autoescaping currently forbids calls to non-strict templates, unless "
+ "the context is kind=\"text\", since there's no guarantee the callee is safe.",
callNode);
}
} else {
// In a non-strict mode template.
if (targets.isEmpty()) {
// External template not visible to compiler -- let's pray for the best! We might end up
// calling a Javascript-escaping template from HTML or vice versa.
return DerivedNameAndContext.create(templateName, startContext);
} else if (calleeStrictContentKind != null) {
// Non-strict templates may call strict templates, but only if the context is a match.
// NOTE: While contextual templates *might* do escaping like strict in this context, it
// would silently break if the template is compiled as an extern. By having this check,
// teams can do a single monolithic compilation for error checking to prevent this.
// We're a little loose in this check to allow calling URI templates within URI
// attributes, even though it's not technically valid HTML, in order to help migration.
if (!startContext.isValidStartContextForContentKindLoose(calleeStrictContentKind)) {
String msg =
String.format(
"Cannot call strictly autoescaped template %s of kind=\"%s\" from "
+ "incompatible context %s. Strict templates generate extra code to safely "
+ "call templates of other content kinds, but non-strict templates do not.",
templateName, calleeStrictContentKind.asAttributeValue(), startContext);
throw SoyAutoescapeException.createWithNode(msg, callNode);
}
return DerivedNameAndContext.create(templateName, startContext);
} else {
// Normal contextual-to-contextual propagation.
return contextualizeCallee(callNode, startContext, templateName, inferences);
}
}
}
/**
* Creates a contextual derivative of the specified template and infers the end context.
*
* @param callNode The call site.
* @param startContext The known context to start at.
* @param calleeName The non-contextualized callee name.
* @param inferences The inferences to write to.
* @return A pairing of the new derived name and the end context.
*/
private DerivedNameAndContext contextualizeCallee(
CallNode callNode, Context startContext, String calleeName, Inferences inferences) {
// Propgate the context into the callee contextual template.
String suffix = DerivedTemplateUtils.getSuffix(startContext);
String baseName = DerivedTemplateUtils.getBaseName(calleeName);
// The derived template name.
String newCalleeName = baseName + suffix;
// Clone the templates for this new context if needed.
if (inferences.lookupTemplates(newCalleeName).isEmpty()) {
if (ILLEGAL_RECONTEXTUALIZATIONS.contains(startContext.state)) {
throw SoyAutoescapeException.createWithNode(
"Attempting to call non-strict template '"
+ baseName
+ "' in context '"
+ startContext.state
+ "'. This is no longer allowed, please migrate the callee to strict and "
+ "specify a content kind by adding a "
+ "kind=\"(html|attributes|js|css|uri)\" attribute to the callee",
callNode);
}
inferences.cloneTemplates(baseName, newCalleeName, callNode);
}
try {
Context endContext = determineContextualization(startContext, newCalleeName, inferences);
return DerivedNameAndContext.create(newCalleeName, endContext);
} catch (SoyAutoescapeException e) {
throw SoyAutoescapeException.createCausedWithNode(
"Error while re-contextualizing template "
+ calleeName
+ " in context "
+ startContext
+ ":",
e,
callNode);
}
}
/**
* Determines the end context and a set of inferences for a template in a particular context.
*
* <p>This does not create new cloned templates, but just computes contextualization on existing
* ones.
*
* @param startContext The start context we're calling these templates in.
* @param calleeName The callee's name, already modified for context.
* @param inferences The inferences to modify.
*/
private Context determineContextualization(
Context startContext, String calleeName, Inferences inferences) {
Context endContext = inferences.getTemplateEndContext(calleeName);
if (endContext != null) {
// We've already computed this; return early.
return endContext;
}
List<TemplateNode> templateNodes = inferences.lookupTemplates(calleeName);
// Optimistically assume the new callee ends with the same context as it starts, and then
// verify that's the case.
InferencesAndContext hypothesis =
hypothesizeContextualization(
startContext, startContext, calleeName, templateNodes, inferences);
endContext = hypothesis.context();
Inferences subInferences = hypothesis.inferences();
if (!endContext.equals(startContext) && subInferences.wasTemplateChecked(calleeName)) {
// Try assuming endContext as the endContext and see if that is a fixed point. If so, it
// is a valid endContext context since its output is the same regardless of whether
// recursive calls are properly typed. This allows us to gloss over minor differences in
// startContexts, e.g. JsFollowingSlash.
InferencesAndContext secondHypothesis =
hypothesizeContextualization(
startContext, endContext, calleeName, templateNodes, inferences);
Optional<Context> combined = Context.union(secondHypothesis.context(), endContext);
// See if the first and second hypothesis result in a compatible end context.
if (!combined.isPresent()) {
// Cannot identify an end context. Bail.
throw SoyAutoescapeException.createWithNode(
"Cannot determine end context for recursive template " + calleeName,
templateNodes.get(0));
}
endContext = combined.get();
}
subInferences.recordTemplateEndContext(calleeName, endContext);
subInferences.foldIntoParent();
return endContext;
}
/**
* Hypothesizes a particular end context and determines a potential end context, if any.
*
* <p>This returns the *actual* end context determined from this hypothesis. Hypotheses are
* needed to handle recursive templates, where the output context is needed to compute the
* context within the template.
*
* @param startContext The known context to start at.
* @param hypotheticalEndContext The end context to test.
* @param calleeName Name of the callee.
* @param templateNodes The templates and deltemplates of the same name.
* @param parentInferences The inferences to work from.
* @return A combination of the end context determined and the inferences that go along with
* them.
*/
private InferencesAndContext hypothesizeContextualization(
Context startContext,
Context hypotheticalEndContext,
String calleeName,
List<TemplateNode> templateNodes,
Inferences parentInferences) {
// Create a hypothetical world of inferences based on this hypothesis. It is up to the caller
// to fold these into the parent inferences if it chooses to use these.
Inferences inferences = new Inferences(parentInferences);
List<Context> endContexts = new ArrayList<Context>();
inferences.recordTemplateEndContext(calleeName, hypotheticalEndContext);
for (TemplateNode templateNode : templateNodes) {
endContexts.add(
inferTemplateEndContext(
templateNode,
startContext,
inferences,
errorReporter));
}
Optional<Context> combined = Context.union(endContexts);
if (!combined.isPresent()) {
throw SoyAutoescapeException.createWithNode(
"Deltemplates diverge when used with deprecated-contextual autoescaping."
+ " Based on the call site, assuming these templates all start in "
+ startContext
+ ", the different deltemplates end in incompatible contexts: "
+ Joiner.on(", ").join(endContexts),
templateNodes.get(0));
}
return InferencesAndContext.create(inferences, combined.get());
}
/** Consider the various branches separately and compute a union context for each branch. */
private void propagateAcrossDisjunction(ParentSoyNode<?> node) {
// All the branches of an {if} or {switch} should return compatible contexts, so that we can
// figure out the end context of the branch as a whole.
Iterator<? extends SoyNode> childIt = node.getChildren().iterator();
SoyNode firstBranch = childIt.next();
Context out = infer(firstBranch, context);
boolean sawElseOrDefault = false;
while (childIt.hasNext()) {
SoyNode branch = childIt.next();
Context brOut = infer(branch, context);
Optional<Context> combined = Context.union(out, brOut);
if (!combined.isPresent()) {
throw SoyAutoescapeException.createWithNode(
(node instanceof IfNode
? "{if} command branch ends in a different context than preceding branches:"
: "{switch} command case ends in a different context than preceding cases:")
+ " "
+ branch.toSourceString(),
branch);
}
out = combined.get();
if (branch instanceof IfElseNode || branch instanceof SwitchDefaultNode) {
sawElseOrDefault = true;
}
}
// If there is no else or default, then the end context has to be the compatible with the
// start context.
if (!sawElseOrDefault) {
Optional<Context> combined = Context.union(context, out);
if (!combined.isPresent()) {
throw SoyAutoescapeException.createWithNode(
(node instanceof IfNode
? "{if} command without {else} changes context."
: "{switch} command without {default} changes context."),
node);
}
out = combined.get();
}
context = out;
}
private void inferInStrictMode(RenderUnitNode node) {
inferStrictRenderUnitNode(
templateAutoescapeMode,
node,
inferences,
errorReporter);
}
/** Applies HTML contextual autoescaping on a legacy contextual parameter block. */
private void inferInContextualModeForHtml(CommandNode node) {
// NOTE: Previously this wouldn't do any contextual analysis, which resulted in subtle bugs
// such as the contextual autoescaper not seeing typed parameters in nested calls.
final Context paramContentNodeEndContext =
new InferenceEngine(
AutoescapeMode.CONTEXTUAL,
templateAutoescapeMode,
inferences,
errorReporter)
.inferChildren(node, Context.HTML_PCDATA);
if (!paramContentNodeEndContext.equals(Context.HTML_PCDATA)) {
throw SoyAutoescapeException.createWithNode(
"Blocks should start and end in HTML context.", node);
}
}
}
//
// Static helper methods (cannot be part of inner class).
@AutoValue
abstract static class DerivedNameAndContext {
static DerivedNameAndContext create(String derivedName, Context context) {
return new AutoValue_InferenceEngine_DerivedNameAndContext(derivedName, context);
}
abstract String derivedName();
abstract Context context();
}
@AutoValue
abstract static class InferencesAndContext {
static InferencesAndContext create(Inferences inferences, Context context) {
return new AutoValue_InferenceEngine_InferencesAndContext(inferences, context);
}
abstract Inferences inferences();
abstract Context context();
}
}
|
|
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.distributed;
import com.facebook.buck.distributed.thrift.BuildJobStateFileHashEntry;
import com.facebook.buck.distributed.thrift.BuildJobStateFileHashes;
import com.facebook.buck.distributed.thrift.PathWithUnixSeparators;
import com.facebook.buck.hashing.FileHashLoader;
import com.facebook.buck.io.ArchiveMemberPath;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.cache.FileHashCache;
import com.google.common.hash.HashCode;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Map;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
class DistBuildFileMaterializer implements FileHashLoader {
private static final Logger LOG = Logger.get(DistBuildFileMaterializer.class);
private final Map<Path, BuildJobStateFileHashEntry> remoteFileHashesByPath;
private final Set<Path> symlinkedPaths;
private final Set<Path> materializedPaths;
private final FileContentsProvider provider;
private final ProjectFilesystem projectFilesystem;
private final FileHashCache directFileHashCacheDelegate;
public DistBuildFileMaterializer(
final ProjectFilesystem projectFilesystem,
BuildJobStateFileHashes remoteFileHashes,
FileContentsProvider provider,
FileHashCache directFileHashCacheDelegate) {
this.directFileHashCacheDelegate = directFileHashCacheDelegate;
this.remoteFileHashesByPath = DistBuildFileHashes.indexEntriesByPath(
projectFilesystem,
remoteFileHashes);
this.symlinkedPaths = Collections.newSetFromMap(new ConcurrentHashMap<Path, Boolean>());
this.materializedPaths = Collections.newSetFromMap(new ConcurrentHashMap<Path, Boolean>());
this.provider = provider;
this.projectFilesystem = projectFilesystem;
}
public void preloadAllFiles() throws IOException {
for (Path path : remoteFileHashesByPath.keySet()) {
LOG.info("Preloading: [%s]", path.toString());
BuildJobStateFileHashEntry fileHashEntry = remoteFileHashesByPath.get(path);
if (fileHashEntry == null || fileHashEntry.isPathIsAbsolute()) {
continue;
} else if (fileHashEntry.isSetMaterializeDuringPreloading() &&
fileHashEntry.isMaterializeDuringPreloading()) {
get(path);
} else if (fileHashEntry.isSetRootSymLink()) {
materializeSymlink(fileHashEntry, symlinkedPaths);
symlinkedPaths.add(path);
} else if (!fileHashEntry.isDirectory) {
// Touch file
projectFilesystem.createParentDirs(path);
projectFilesystem.touch(path);
} else {
// Create directory
// No need to materialize sub-dirs/files here, as there will be separate entries for those.
projectFilesystem.mkdirs(path);
}
}
}
private void materializeIfNeeded(Path path, Queue<Path> remainingPaths) throws IOException {
if (materializedPaths.contains(path)) {
return;
}
LOG.info("Materializing: [%s]", path.toString());
BuildJobStateFileHashEntry fileHashEntry = remoteFileHashesByPath.get(path);
if (fileHashEntry == null || fileHashEntry.isPathIsAbsolute()) {
materializedPaths.add(path);
return;
}
if (fileHashEntry.isSetRootSymLink()) {
if (!symlinkedPaths.contains(path)) {
materializeSymlink(fileHashEntry, materializedPaths);
}
symlinkIntegrityCheck(fileHashEntry);
materializedPaths.add(path);
return;
}
// TODO(alisdair04,ruibm,shivanker): materialize directories
if (fileHashEntry.isIsDirectory()) {
materializeDirectory(path, fileHashEntry, remainingPaths);
materializedPaths.add(path);
return;
}
// Download contents outside of sync block, so that fetches happen in parallel.
// For a few cases we might get duplicate fetches, but this is much better than single
// threaded fetches.
Optional<InputStream> fileContents = provider.getFileContents(fileHashEntry);
synchronized (this) {
// Double check this path hasn't been materialized,
// as previous check wasn't inside sync block.
if (materializedPaths.contains(path)) {
return;
}
projectFilesystem.createParentDirs(projectFilesystem.resolve(path));
// Write the actual file contents.
if (!fileContents.isPresent()) {
throw new HumanReadableException(
String.format(
"Input source file is missing from stampede. File=[%s]",
fileHashEntry.toString()));
}
try (InputStream sourceStream = fileContents.get()) {
Files.copy(sourceStream, path, StandardCopyOption.REPLACE_EXISTING);
path.toFile().setExecutable(fileHashEntry.isExecutable);
}
materializedPaths.add(path);
}
}
private synchronized void materializeDirectory(
Path path,
BuildJobStateFileHashEntry fileHashEntry,
Queue<Path> remainingPaths) throws IOException {
if (materializedPaths.contains(path)) {
return;
}
projectFilesystem.mkdirs(path);
for (PathWithUnixSeparators unixPath : fileHashEntry.getChildren()) {
remainingPaths.add(projectFilesystem.resolve(Paths.get(unixPath.getPath())));
}
}
private void symlinkIntegrityCheck(BuildJobStateFileHashEntry fileHashEntry) throws IOException {
Path symlink = projectFilesystem.resolve(fileHashEntry.getPath().getPath());
HashCode expectedHash = HashCode.fromString(fileHashEntry.getHashCode());
HashCode actualHash = directFileHashCacheDelegate.get(symlink);
if (!expectedHash.equals(actualHash)) {
throw new RuntimeException(String.format(
"Symlink [%s] had hashcode [%s] during scheduling, but [%s] during build.",
symlink.toAbsolutePath(),
expectedHash,
actualHash));
}
}
private synchronized void materializeSymlink(
BuildJobStateFileHashEntry fileHashEntry, Set<Path> processedPaths) {
Path rootSymlink = projectFilesystem.resolve(fileHashEntry.getRootSymLink().getPath());
if (symlinkedPaths.contains(rootSymlink)) {
processedPaths.add(rootSymlink);
}
if (processedPaths.contains(rootSymlink)) {
return;
}
processedPaths.add(rootSymlink);
if (!projectFilesystem.getPathRelativeToProjectRoot(rootSymlink).isPresent()) {
// RecordingFileHashLoader stored an absolute path (which was also a sym link).
throw new RuntimeException(
"Root symlink is not in project root: " + rootSymlink.toAbsolutePath());
}
Path rootSymlinkTarget =
projectFilesystem.resolve(fileHashEntry.getRootSymLinkTarget().getPath());
LOG.info(
"Materializing sym link [%s] with target [%s]",
rootSymlink.toAbsolutePath().toString(),
rootSymlinkTarget.toAbsolutePath().toString());
try {
projectFilesystem.createParentDirs(rootSymlink);
projectFilesystem.createSymLink(
rootSymlink,
rootSymlinkTarget,
true /* force creation */);
} catch (IOException e) {
LOG.error(e);
throw new RuntimeException(e);
}
}
@Override
public HashCode get(Path path) throws IOException {
Queue<Path> remainingPaths = new LinkedList<>();
remainingPaths.add(path);
while (remainingPaths.size() > 0) {
materializeIfNeeded(remainingPaths.remove(), remainingPaths);
}
return HashCode.fromInt(0);
}
@Override
public long getSize(Path path) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public HashCode get(ArchiveMemberPath archiveMemberPath) throws IOException {
materializeIfNeeded(archiveMemberPath.getArchivePath(), new LinkedList<>());
return HashCode.fromInt(0);
}
}
|
|
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.portlets.lookup;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.portlet.PortletPreferences;
import javax.portlet.PortletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portal.EntityIdentifier;
import org.jasig.portal.portlets.search.DisplayNameComparator;
import org.jasig.portal.security.IAuthorizationPrincipal;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.services.AuthorizationService;
import org.jasig.services.persondir.IPersonAttributeDao;
import org.jasig.services.persondir.IPersonAttributes;
import org.jasig.services.persondir.support.NamedPersonImpl;
import org.springframework.webflow.context.ExternalContext;
/**
* Implements logic and helper methods for the person-lookup web flow.
*
* @author Eric Dalquist
* @version $Revision$
*/
public class PersonLookupHelperImpl implements IPersonLookupHelper {
protected final Log logger = LogFactory.getLog(this.getClass());
private IPersonAttributeDao personAttributeDao;
public IPersonAttributeDao getPersonAttributeDao() {
return personAttributeDao;
}
/**
* The {@link IPersonAttributeDao} used to perform lookups.
*/
public void setPersonAttributeDao(IPersonAttributeDao personLookupDao) {
this.personAttributeDao = personLookupDao;
}
/* (non-Javadoc)
* @see org.jasig.portal.portlets.swapper.IPersonLookupHelper#getQueryAttributes(org.springframework.webflow.context.ExternalContext)
*/
public Set<String> getQueryAttributes(ExternalContext externalContext) {
final PortletRequest portletRequest = (PortletRequest)externalContext.getNativeRequest();
final PortletPreferences preferences = portletRequest.getPreferences();
final Set<String> queryAttributes;
final String[] configuredAttributes = preferences.getValues(PERSON_LOOKUP_PERSON_LOOKUP_QUERY_ATTRIBUTES, null);
final String[] excludedAttributes = preferences.getValues(PERSON_LOOKUP_PERSON_LOOKUP_QUERY_ATTRIBUTES_EXCLUDES, null);
//If attributes are configured in portlet prefs just use them
if (configuredAttributes != null) {
queryAttributes = new LinkedHashSet<String>(Arrays.asList(configuredAttributes));
}
//Otherwise provide all available attributes from the IPersonAttributeDao
else {
final Set<String> availableAttributes = this.personAttributeDao.getAvailableQueryAttributes();
queryAttributes = new TreeSet<String>(availableAttributes);
}
//Remove excluded attributes
if (excludedAttributes != null) {
for (final String excludedAttribute : excludedAttributes) {
queryAttributes.remove(excludedAttribute);
}
}
return queryAttributes;
}
/* (non-Javadoc)
* @see org.jasig.portal.portlets.swapper.IPersonLookupHelper#getDisplayAttributes(org.springframework.webflow.context.ExternalContext)
*/
public Set<String> getDisplayAttributes(ExternalContext externalContext) {
final PortletRequest portletRequest = (PortletRequest)externalContext.getNativeRequest();
final PortletPreferences preferences = portletRequest.getPreferences();
final Set<String> displayAttributes;
final String[] configuredAttributes = preferences.getValues(PERSON_LOOKUP_PERSON_DETAILS_DETAILS_ATTRIBUTES, null);
final String[] excludedAttributes = preferences.getValues(PERSON_LOOKUP_PERSON_DETAILS_DETAILS_ATTRIBUTES_EXCLUDES, null);
//If attributes are configured in portlet prefs use those the user has
if (configuredAttributes != null) {
displayAttributes = new LinkedHashSet<String>();
displayAttributes.addAll(Arrays.asList(configuredAttributes));
}
//Otherwise provide all available attributes from the IPersonAttributes
else {
displayAttributes = new TreeSet<String>(personAttributeDao.getPossibleUserAttributeNames());
}
//Remove any excluded attributes
if (excludedAttributes != null) {
for (final String excludedAttribute : excludedAttributes) {
displayAttributes.remove(excludedAttribute);
}
}
return displayAttributes;
}
/* (non-Javadoc)
* @see org.jasig.portal.portlets.lookup.IPersonLookupHelper#getSelf(org.springframework.webflow.context.ExternalContext)
*/
public IPersonAttributes getSelf(ExternalContext externalContext) {
final PortletRequest portletRequest = (PortletRequest)externalContext.getNativeRequest();
final String username = portletRequest.getRemoteUser();
return this.personAttributeDao.getPerson(username);
}
/* (non-Javadoc)
* @see org.jasig.portal.portlets.lookup.IPersonLookupHelper#searchForPeople(org.jasig.portal.security.IPerson, java.util.Map)
*/
public List<IPersonAttributes> searchForPeople(final IPerson searcher, final Map<String, Object> query) {
// get the IAuthorizationPrincipal for the searching user
final IAuthorizationPrincipal principal = getPrincipalForUser(searcher);
// build a set of all possible user attributes the current user has
// permission to view
final Set<String> permittedAttributes = getAvailableAttributes(principal);
// remove any query attributes that the user does not have permission
// to view
final Map<String, Object> inUseQuery = new HashMap<String, Object>();
for (Map.Entry<String, Object> queryEntry : query.entrySet()) {
final String attr = queryEntry.getKey();
if (permittedAttributes.contains(attr)) {
inUseQuery.put(attr, queryEntry.getValue());
} else {
this.logger.warn("User '" + searcher.getName() + "' attempted searching on attribute '" + attr + "' which is not allowed in the current configuration. The attribute will be ignored.");
}
}
// ensure the query has at least one search attribute defined
if (inUseQuery.keySet().size() == 0) {
throw new IllegalArgumentException("Search query is empty");
}
// get the set of people matching the search query
final Set<IPersonAttributes> people = this.personAttributeDao.getPeople(inUseQuery);
if (people == null) {
return Collections.emptyList();
}
// for each returned match, check to see if the current user has
// permissions to view this user
List<IPersonAttributes> list = new ArrayList<IPersonAttributes>();
for (IPersonAttributes person : people) {
// if the current user has permission to view this person, construct
// a new representation of the person limited to attributes the
// searcher has permissions to view
final IPersonAttributes visiblePerson = getVisiblePerson(principal, person, permittedAttributes);
if (visiblePerson != null) {
list.add(visiblePerson);
}
}
// sort the list by display name
Collections.sort(list, new DisplayNameComparator());
// limit the list to a maximum of 10 returned results
// TODO: make this limit configurable
if (list.size() > 10) {
list = list.subList(0, 9);
}
return list;
}
/* (non-Javadoc)
* @see org.jasig.portal.portlets.lookup.IPersonLookupHelper#findPerson(org.jasig.portal.security.IPerson, java.lang.String)
*/
public IPersonAttributes findPerson(final IPerson searcher, final String username) {
// get the IAuthorizationPrincipal for the searching user
final IAuthorizationPrincipal principal = getPrincipalForUser(searcher);
// build a set of all possible user attributes the current user has
// permission to view
final Set<String> permittedAttributes = getAvailableAttributes(principal);
// get the set of people matching the search query
final IPersonAttributes person = this.personAttributeDao.getPerson(username);
if (person == null) {
logger.info("No user found with username matching " + username);
return null;
}
// if the current user has permission to view this person, construct
// a new representation of the person limited to attributes the
// searcher has permissions to view
return getVisiblePerson(principal, person, permittedAttributes);
}
/**
* Get the authoriztaion principal matching the supplied IPerson.
*
* @param person
* @return
*/
protected IAuthorizationPrincipal getPrincipalForUser(final IPerson person) {
final EntityIdentifier ei = person.getEntityIdentifier();
return AuthorizationService.instance().newPrincipal(ei.getKey(), ei.getType());
}
/**
* Get the set of all user attribute names defined in the portal for which
* the specified principal has the attribute viewing permission.
*
* @param principal
* @return
*/
protected Set<String> getAvailableAttributes(final IAuthorizationPrincipal principal) {
final Set<String> attributeNames = this.personAttributeDao.getPossibleUserAttributeNames();
return getAvailableAttributes(principal, attributeNames);
}
/**
* Filter the provided set of user attribute names to contain only those
* the specified principal has permissions to view.
*
* @param principal
* @param attributeNames
* @return
*/
protected Set<String> getAvailableAttributes(final IAuthorizationPrincipal principal, final Set<String> attributeNames) {
final Set<String> permittedAttributes = new HashSet<String>();
for (String attr : attributeNames) {
if (principal.hasPermission(USERS_OWNER, VIEW_ATTRIBUTE_PERMISSION, attr)) {
permittedAttributes.add(attr);
}
}
return permittedAttributes;
}
/**
* Filter an IPersonAttributes for a specified viewing principal. The returned
* person will contain only the attributes provided in the permitted attributes
* list. <code>null</code> if the principal does not have permission to
* view the user.
*
* @param principal
* @param person
* @param permittedAttributes
* @return
*/
protected IPersonAttributes getVisiblePerson(final IAuthorizationPrincipal principal,
final IPersonAttributes person, final Set<String> permittedAttributes) {
// first check to see if the principal has permission to view this user
if (person.getName() != null && principal.hasPermission(USERS_OWNER, VIEW_USER_PERMISSION, person.getName())) {
// if the user has permission, filter the person attributes according
// to the specified permitted attributes
final Map<String,List<Object>> visibleAttributes = new HashMap<String,List<Object>>();
for (String attr : person.getAttributes().keySet()) {
if (permittedAttributes.contains(attr)) {
visibleAttributes.put(attr, person.getAttributeValues(attr));
}
}
// use the filtered attribute list to create and return a new
// person object
final IPersonAttributes visiblePerson = new NamedPersonImpl(person.getName(), visibleAttributes);
return visiblePerson;
} else {
logger.debug("Principal " + principal.getKey() + " does not have permissions to view user " + person.getName());
return null;
}
}
}
|
|
/*
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.utils;
import com.github.ambry.clustermap.HelixVcrUtil;
import com.github.ambry.server.StatsReportType;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Supplier;
import org.apache.helix.HelixAdmin;
import org.apache.helix.manager.zk.ZKHelixAdmin;
import org.apache.helix.zookeeper.zkclient.ZkServer;
import org.apache.helix.zookeeper.zkclient.exception.ZkException;
import org.apache.helix.zookeeper.zkclient.exception.ZkInterruptedException;
import org.apache.zookeeper.server.NIOServerCnxnFactory;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
/**
* A class consisting of common util methods useful for tests.
*/
public class TestUtils {
public static final long TTL_SECS = TimeUnit.DAYS.toSeconds(7);
public static final Random RANDOM = new Random();
public static final List<Boolean> BOOLEAN_VALUES = Collections.unmodifiableList(Arrays.asList(true, false));
private static final int CHECK_INTERVAL_IN_MS = 100;
private static final String CHARACTERS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
private static final Logger logger = LoggerFactory.getLogger(TestUtils.class);
/**
* Return the number of threads currently running with a name containing the given pattern.
* @param pattern the pattern to compare
* @return the number of threads currently running with a name containing the given pattern.
*/
public static int numThreadsByThisName(String pattern) {
int count = 0;
for (Thread t : Thread.getAllStackTraces().keySet()) {
if (t.getName().contains(pattern)) {
count++;
}
}
return count;
}
/**
* Return the thread with a name that contains the given name. If there are multiple such threads,
* return the first such thread.
* @param pattern the pattern to compare
* @return the first thread with a name that contains the given pattern.
*/
public static Thread getThreadByThisName(String pattern) {
Thread thread = null;
for (Thread t : Thread.getAllStackTraces().keySet()) {
if (t.getName().contains(pattern)) {
thread = t;
break;
}
}
return thread;
}
/**
* Return all the threads with a name that contains the given name.
* @param pattern the pattern to compare
* @return all the threads with a name that contains the given pattern.
*/
public static List<Thread> getAllThreadsByThisName(String pattern) {
List<Thread> threads = new ArrayList<>();
for (Thread t : Thread.getAllStackTraces().keySet()) {
if (t.getName().contains(pattern)) {
threads.add(t);
}
}
return threads;
}
/**
* Gets a byte array of length {@code size} with random bytes.
* @param size the required length of the random byte array.
* @return a byte array of length {@code size} with random bytes.
*/
public static byte[] getRandomBytes(int size) {
byte[] bytes = new byte[size];
RANDOM.nextBytes(bytes);
return bytes;
}
/**
* Gets a random element from the given array of elements.
* @param elements the array of elements.
* @param <T> the type of the elements.
*/
public static <T> T getRandomElement(T[] elements) {
return elements[RANDOM.nextInt(elements.length)];
}
/**
* Awaits on the passed-in {@link CountDownLatch}. If times out throws an exception.
* @param latch The latch to await on.
* @param timeoutMs Timeout in millisecond.
* @throws TimeoutException If awaits for more than the specified time, throw a {@link TimeoutException}.
* @throws InterruptedException If wait is interrupted.
*/
public static void awaitLatchOrTimeout(CountDownLatch latch, long timeoutMs)
throws TimeoutException, InterruptedException {
if (!latch.await(timeoutMs, TimeUnit.MILLISECONDS)) {
throw new TimeoutException("Too long time to complete operation.");
}
}
/**
* Waits until the HardDeleter thread is in the {@code expectedState} for the specified {@code timeoutMs} time.
* @param thread the thread whose state needs to be checked.
* @param expectedState Expected HardDeleter thread state
* @param timeoutMs time in ms after which the check is considered failed if {@code expectedState} is not reached.
*/
public static boolean waitUntilExpectedState(Thread thread, Thread.State expectedState, long timeoutMs)
throws InterruptedException {
long timeSoFar = 0;
while (expectedState != thread.getState()) {
Thread.sleep(10);
timeSoFar += 10;
if (timeSoFar >= timeoutMs) {
return false;
}
}
return true;
}
/**
* Succeed if the {@code body} throws an exception of type {@code exceptionClass}, otherwise fail.
* @param exceptionClass the type of exception that should occur.
* @param body the body to execute. This should throw an exception of type {@code exceptionClass}
* @param errorAction if non-null and the exception class matches, execute this action.
* @throws Exception when an unexpected exception occurs.
*/
public static <E extends Exception> void assertException(Class<E> exceptionClass, ThrowingRunnable body,
ThrowingConsumer<E> errorAction) throws Exception {
try {
body.run();
Assert.fail("Should have thrown exception");
} catch (Exception e) {
if (exceptionClass.isInstance(e)) {
if (errorAction != null) {
errorAction.accept(exceptionClass.cast(e));
}
} else {
throw e;
}
}
}
/**
* Asserts that {@code actual} and {@code expect} are equal. Checks that {@code actual}
* contains no extra data if {@code checkActualComplete} is {@code true}.
*/
public static void assertInputStreamEqual(InputStream expect, InputStream actual, int size,
boolean checkActualComplete) throws IOException {
byte[] actualBuf = Utils.readBytesFromStream(actual, size);
if (checkActualComplete) {
int finalRead = actual.read();
// some InputStream impls in Ambry return 0 instead of -1 when they end
assertTrue("Actual stream had more bytes than expected", finalRead == 0 || finalRead == -1);
}
byte[] expectBuf = Utils.readBytesFromStream(expect, size);
assertArrayEquals("Data from actual stream does not match expected", expectBuf, actualBuf);
}
/**
* Verify that the {@code inputStream} satisfies basic properties of the contract.
* @param inputStream
* @throws Exception
*/
public static void validateInputStreamContract(InputStream inputStream) throws Exception {
int numBytes = 8;
byte[] bytes = new byte[numBytes];
assertException(NullPointerException.class, () -> inputStream.read(null, 0, 5), null);
assertException(IndexOutOfBoundsException.class, () -> inputStream.read(bytes, -1, 5), null);
assertException(IndexOutOfBoundsException.class, () -> inputStream.read(bytes, 0, -1), null);
assertException(IndexOutOfBoundsException.class, () -> inputStream.read(bytes, numBytes, 1), null);
assertException(IndexOutOfBoundsException.class, () -> inputStream.read(bytes, 1, numBytes), null);
Assert.assertEquals(0, inputStream.read(bytes, 0, 0));
}
/**
* Read through the {@code inputStream} using the no-arg read method until {@code -1} is returned,
* and verify that the expected number of bytes {@code expectedLength} is read.
* @param inputStream
* @param expectedLength
* @throws IOException
*/
public static void readInputStreamAndValidateSize(InputStream inputStream, long expectedLength) throws IOException {
int readVal = 0;
long numRead = 0;
do {
readVal = inputStream.read();
numRead++;
} while (readVal != -1);
numRead--;
Assert.assertEquals("Unexpected inputstream read length", expectedLength, numRead);
}
/**
* Gets a temporary directory with the given prefix. The directory will be deleted when the virtual machine terminates.
* @param prefix The prefix for the name of the temporary directory.
* @return The absolute path of the generated temporary directory.
* @throws IOException
*/
public static String getTempDir(String prefix) throws IOException {
File tempDir = Files.createTempDirectory(prefix + RANDOM.nextInt(1000)).toFile();
tempDir.deleteOnExit();
return tempDir.getAbsolutePath();
}
/**
* Generates and returns a random Hex String of the specified size
* @param size expected key hex string size
* @return the hex string thus generated
*/
public static String getRandomKey(int size) {
StringBuilder sb = new StringBuilder();
while (sb.length() < size) {
sb.append(Integer.toHexString(TestUtils.RANDOM.nextInt()));
}
sb.setLength(size);
return sb.toString();
}
public static String getRandomString(int length) {
StringBuilder sb = new StringBuilder(length);
for (int i = 0; i < length; i++) {
sb.append(CHARACTERS.charAt(RANDOM.nextInt(CHARACTERS.length())));
}
return sb.toString();
}
/**
* A wrapper class to start and shutdown {@link ZooKeeperServer}. The code is from {@link org.apache.helix.zookeeper.zkclient.ZkServer}.
* We maintain this class to speed up tests because function calls to NetworkUtil.getLocalHostNames() in
* {@link org.apache.helix.zookeeper.zkclient.ZkServer} takes time in Mac OS.
* {@link org.apache.helix.zookeeper.zkclient.ZkServer} calls NetworkUtil.getLocalHostNames() to log and make sure "localhost" is in
* the list of NetworkUtil.getLocalHostNames(), which are not necessary in tests.
*/
static class ZkServerWrapper {
private ZooKeeperServer zk;
private NIOServerCnxnFactory nioFactory;
private int port;
private File dataDir;
private File dataLogDir;
public ZkServerWrapper(String dataDir, String logDir, int port) {
this.dataDir = new File(dataDir);
this.dataLogDir = new File(logDir);
this.dataDir.mkdirs();
this.dataLogDir.mkdirs();
this.port = port;
}
public void start() {
try {
zk = new ZooKeeperServer(dataDir, dataLogDir, ZkServer.DEFAULT_TICK_TIME);
zk.setMinSessionTimeout(ZkServer.DEFAULT_MIN_SESSION_TIMEOUT);
nioFactory = new NIOServerCnxnFactory();
int maxClientConnections = 0; // 0 means unlimited
nioFactory.configure(new InetSocketAddress(port), maxClientConnections);
nioFactory.startup(zk);
} catch (IOException e) {
throw new ZkException("Unable to start single ZooKeeper server.", e);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
}
logger.info("ZooKeeperServer started successfully.");
}
public void shutdown() {
logger.info("Shutting down ZkServer...");
if (nioFactory != null) {
nioFactory.shutdown();
try {
nioFactory.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
nioFactory = null;
}
if (zk != null) {
zk.shutdown();
zk = null;
}
logger.info("Shutting down ZooKeeperServer...done");
}
}
/**
* A class to initialize and hold information about each Zk Server.
*/
public static class ZkInfo {
private String dcName;
private byte id;
private int port;
private String dataDir;
private String logDir;
private ZkServerWrapper zkServer;
private boolean isZkServerStarted = false;
/**
* Instantiate by starting a Zk server.
* @param tempDirPath the temporary directory string to use.
* @param dcName the name of the datacenter.
* @param id the id of the datacenter.
* @param port the port at which this Zk server should run on localhost.
*/
public ZkInfo(String tempDirPath, String dcName, byte id, int port, boolean start) {
this.dcName = dcName;
this.id = id;
this.port = port;
this.dataDir = tempDirPath + "/dataDir";
this.logDir = tempDirPath + "/logDir";
if (start) {
startZkServer(port, dataDir, logDir);
}
}
public void startZkServer() {
if (zkServer != null) {
zkServer.start();
isZkServerStarted = true;
logger.info("ZooKeeperServer started successfully.");
}
}
private void startZkServer(int port, String dataDir, String logDir) {
// start zookeeper
zkServer = new ZkServerWrapper(dataDir, logDir, port);
zkServer.start();
isZkServerStarted = true;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public String getDcName() {
return dcName;
}
public byte getId() {
return id;
}
public void shutdown() {
if (zkServer != null) {
zkServer.shutdown();
isZkServerStarted = false;
}
}
public boolean isZkServerStarted() {
return isZkServerStarted;
}
}
/**
* Similar to {@link Runnable}, but able to throw checked exceptions.
*/
public interface ThrowingRunnable {
/**
* Run the action.
* @throws Exception
*/
void run() throws Exception;
}
/**
* Periodically check expectedValue and actualValue until timeout.
* @param expectedValue the expected value.
* @param expressionToCheck the expression to check.
* @param timeoutInMs the time out in millisecond.
* @return true if value match.
*/
public static <T> boolean checkAndSleep(T expectedValue, Supplier<T> expressionToCheck, int timeoutInMs) {
long startTime = System.currentTimeMillis();
try {
while (!Objects.equals(expectedValue, expressionToCheck.get())) {
if (System.currentTimeMillis() - startTime >= timeoutInMs) {
return false;
}
Thread.sleep(CHECK_INTERVAL_IN_MS);
}
} catch (InterruptedException e) {
return false;
}
return true;
}
/**
* Periodically check boolean condition until timeout.
* @param conditionToCheck the condition to check.
* @param timeoutInMs the time out in millisecond.
* @return true if condition is true before timeout.
*/
public static boolean checkAndSleep(Supplier<Boolean> conditionToCheck, int timeoutInMs) {
return checkAndSleep(true, conditionToCheck, timeoutInMs);
}
/**
* Create a container storage map. This map will have to levels. First level's key is the account id. Second level's
* key is the container id. Account ids and container ids are both ranging from 1. The value would the storage usage,
* which will be greater than or equal to {@code minValue} and less than {@code maxValue}.
* @param numAccounts The number of accounts in the returned map.
* @param numContainerPerAccount The number of container under each account
* @param maxValue The maximum value for storage usage.
* @param minValue The minimum value for storage usage.
* @return A map representing the container storage usage.
*/
public static Map<String, Map<String, Long>> makeStorageMap(int numAccounts, int numContainerPerAccount,
long maxValue, long minValue) {
Random random = new Random();
Map<String, Map<String, Long>> accountMap = new HashMap<>();
short accountId = 1;
for (int i = 0; i < numAccounts; i++) {
Map<String, Long> containerMap = new HashMap<>();
accountMap.put(String.valueOf(accountId), containerMap);
short containerId = 1;
for (int j = 0; j < numContainerPerAccount; j++) {
long usage = Math.abs(random.nextLong()) % (maxValue - minValue) + minValue;
containerMap.put(String.valueOf(containerId), usage);
containerId++;
}
accountId++;
}
return accountMap;
}
/**
* A method to verify resources and partitions in src cluster and dest cluster are same.
*/
public static boolean isSrcDestSync(String srcZkString, String srcClusterName, String destZkString,
String destClusterName) {
HelixAdmin srcAdmin = new ZKHelixAdmin(srcZkString);
Set<String> srcResources = new HashSet<>(srcAdmin.getResourcesInCluster(srcClusterName));
HelixAdmin destAdmin = new ZKHelixAdmin(destZkString);
Set<String> destResources = new HashSet<>(destAdmin.getResourcesInCluster(destClusterName));
for (String resource : srcResources) {
if (HelixVcrUtil.ignoreResourceKeyWords.stream().anyMatch(resource::contains)) {
System.out.println("Resource " + resource + " from src cluster is ignored");
continue;
}
if (destResources.contains(resource)) {
// check if every partition exist.
Set<String> srcPartitions = srcAdmin.getResourceIdealState(srcClusterName, resource).getPartitionSet();
Set<String> destPartitions = destAdmin.getResourceIdealState(destClusterName, resource).getPartitionSet();
for (String partition : srcPartitions) {
if (!destPartitions.contains(partition)) {
return false;
}
}
} else {
return false;
}
}
return true;
}
}
|
|
/*
* Copyright (c) 2016 CommonsWare, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.guardianproject.netcipher;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.support.test.runner.AndroidJUnit4;
import info.guardianproject.netcipher.client.StrongBuilder;
import info.guardianproject.netcipher.client.StrongOkHttpClientBuilder;
import info.guardianproject.netcipher.proxy.OrbotHelper;
import info.guardianproject.netcipher.proxy.StatusCallback;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static android.support.test.InstrumentationRegistry.getContext;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@RunWith(AndroidJUnit4.class)
public class StrongOkHttpClientBuilderTest {
private static final String TEST_URL =
"https://gitlab.com/guardianproject/NetCipher/raw/6006c45988/netciphertest/res/test.json";
private static final String EXPECTED = "{\"Hello\": \"world\"}\n";
private static AtomicBoolean initialized = new AtomicBoolean(false);
private static AtomicBoolean isOrbotInstalled = null;
private static CountDownLatch initLatch = new CountDownLatch(1);
private CountDownLatch responseLatch;
private Exception innerException = null;
private String testResult = null;
@Before
public void setUp() throws InterruptedException {
if (!initialized.get()) {
OrbotHelper
.get(getContext())
.statusTimeout(60000)
.addStatusCallback(
new StatusCallback() {
@Override
public void onEnabled(Intent statusIntent) {
isOrbotInstalled = new AtomicBoolean(true);
initLatch.countDown();
}
@Override
public void onStarting() {
}
@Override
public void onStopping() {
}
@Override
public void onDisabled() {
// we got a broadcast with a status of off, so keep waiting
}
@Override
public void onStatusTimeout() {
initLatch.countDown();
throw new RuntimeException("Orbot status request timed out");
}
@Override
public void onNotYetInstalled() {
isOrbotInstalled = new AtomicBoolean(false);
initLatch.countDown();
}
})
.init();
assertTrue("setup timeout", initLatch.await(600, TimeUnit.SECONDS));
initialized.set(true);
}
responseLatch = new CountDownLatch(1);
}
@Test
public void testOrbotInstalled() {
assertTrue("we were not initialized", initialized.get());
assertNotNull("we did not get an Orbot status", isOrbotInstalled);
try {
getContext().getPackageManager().getApplicationInfo("org.torproject.android", 0);
assertTrue("Orbot is installed, but NetCipher thinks it is not", isOrbotInstalled.get());
} catch (PackageManager.NameNotFoundException e) {
assertFalse("Orbot not installed, but NetCipher thinks it is", isOrbotInstalled.get());
}
}
@Test
public void testBuilder() throws Exception {
assertTrue("we were not initialized", initialized.get());
assertNotNull("we did not get an Orbot status", isOrbotInstalled);
if (isOrbotInstalled.get()) {
StrongOkHttpClientBuilder builder = StrongOkHttpClientBuilder.forMaxSecurity(getContext());
testStrongBuilder(builder, new TestBuilderCallback<OkHttpClient>() {
@Override
protected void loadResult(OkHttpClient client) throws Exception {
Request request = new Request.Builder().url(TEST_URL).build();
testResult = client.newCall(request).execute().body().string();
}
});
}
}
@Test
public void testValidatedBuilder() throws Exception {
assertTrue("we were not initialized", initialized.get());
assertNotNull("we did not get an Orbot status", isOrbotInstalled);
if (isOrbotInstalled.get()) {
StrongOkHttpClientBuilder builder =
StrongOkHttpClientBuilder.forMaxSecurity(getContext()).withTorValidation();
testStrongBuilder(builder, new TestBuilderCallback<OkHttpClient>() {
@Override
protected void loadResult(OkHttpClient client) throws Exception {
Request request = new Request.Builder().url(TEST_URL).build();
testResult = client.newCall(request).execute().body().string();
}
});
}
}
private void testStrongBuilder(StrongBuilder builder,
TestBuilderCallback callback)
throws Exception {
testResult = null;
builder.build(callback);
assertTrue(responseLatch.await(600, TimeUnit.SECONDS));
if (innerException != null) {
throw innerException;
}
assertEquals(EXPECTED, testResult);
}
private abstract class TestBuilderCallback<C>
implements StrongBuilder.Callback<C> {
abstract protected void loadResult(C connection)
throws Exception;
@Override
public void onConnected(C connection) {
try {
loadResult(connection);
responseLatch.countDown();
} catch (Exception e) {
innerException = e;
responseLatch.countDown();
}
}
@Override
public void onConnectionException(Exception e) {
innerException = e;
responseLatch.countDown();
}
@Override
public void onTimeout() {
responseLatch.countDown();
}
@Override
public void onInvalid() {
responseLatch.countDown();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm;
import org.apache.storm.utils.Utils;
import org.apache.storm.utils.Time;
import java.nio.channels.ClosedByInterruptException;
import java.util.Comparator;
import java.util.Random;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The timer defined in this file is very similar to java.util.Timer, except
* it integrates with Storm's time simulation capabilities. This lets us test
* code that does asynchronous work on the timer thread
*/
public class StormTimer implements AutoCloseable {
public static class QueueEntry {
public final Long endTimeMs;
public final Runnable func;
public final String id;
public QueueEntry(Long endTimeMs, Runnable func, String id) {
this.endTimeMs = endTimeMs;
this.func = func;
this.id = id;
}
}
public static class StormTimerTask extends Thread {
//initialCapacity set to 11 since its the default inital capacity of PriorityBlockingQueue
private PriorityBlockingQueue<QueueEntry> queue = new PriorityBlockingQueue<QueueEntry>(11, new Comparator<QueueEntry>() {
@Override
public int compare(QueueEntry o1, QueueEntry o2) {
return o1.endTimeMs.intValue() - o2.endTimeMs.intValue();
}
});
// boolean to indicate whether timer is active
private AtomicBoolean active = new AtomicBoolean(false);
// function to call when timer is killed
private Thread.UncaughtExceptionHandler onKill;
//random number generator
private Random random = new Random();
@Override
public void run() {
while (this.active.get()) {
QueueEntry queueEntry = null;
try {
queueEntry = this.queue.peek();
if ((queueEntry != null) && (Time.currentTimeMillis() >= queueEntry.endTimeMs)) {
// It is imperative to not run the function
// inside the timer lock. Otherwise, it is
// possible to deadlock if the fn deals with
// other locks, like the submit lock.
this.queue.remove(queueEntry);
queueEntry.func.run();
} else if (queueEntry != null) {
// If any events are scheduled, sleep until
// event generation. If any recurring events
// are scheduled then we will always go
// through this branch, sleeping only the
// exact necessary amount of time. We give
// an upper bound, e.g. 1000 millis, to the
// sleeping time, to limit the response time
// for detecting any new event within 1 secs.
Time.sleep(Math.min(1000, (queueEntry.endTimeMs - Time.currentTimeMillis())));
} else {
// Otherwise poll to see if any new event
// was scheduled. This is, in essence, the
// response time for detecting any new event
// schedulings when there are no scheduled
// events.
Time.sleep(1000);
}
} catch (Throwable e) {
if (!(Utils.exceptionCauseIsInstanceOf(InterruptedException.class, e))
&& !(Utils.exceptionCauseIsInstanceOf(ClosedByInterruptException.class, e))) {
this.onKill.uncaughtException(this, e);
this.setActive(false);
}
}
}
}
public void setOnKillFunc(Thread.UncaughtExceptionHandler onKill) {
this.onKill = onKill;
}
public void setActive(boolean flag) {
this.active.set(flag);
}
public boolean isActive() {
return this.active.get();
}
public void add(QueueEntry queueEntry) {
this.queue.add(queueEntry);
}
}
//task to run
private StormTimerTask task = new StormTimerTask();
/**
* Makes a Timer in the form of a StormTimerTask Object
* @param name name of the timer
* @param onKill function to call when timer is killed unexpectedly
* @return StormTimerTask object that was initialized
*/
public StormTimer (String name, Thread.UncaughtExceptionHandler onKill) {
if (onKill == null) {
throw new RuntimeException("onKill func is null!");
}
if (name == null) {
this.task.setName("timer");
} else {
this.task.setName(name);
}
this.task.setOnKillFunc(onKill);
this.task.setActive(true);
this.task.setDaemon(true);
this.task.setPriority(Thread.MAX_PRIORITY);
this.task.start();
}
/**
* Schedule a function to be executed in the timer
* @param delaySecs the number of seconds to delay before running the function
* @param func the function to run
* @param checkActive whether to check is the timer is active
* @param jitterMs add jitter to the run
*/
public void schedule(int delaySecs, Runnable func, boolean checkActive, int jitterMs) {
if (func == null) {
throw new RuntimeException("function to schedule is null!");
}
if (checkActive) {
checkActive();
}
String id = Utils.uuid();
long endTimeMs = Time.currentTimeMillis() + Time.secsToMillisLong(delaySecs);
if (jitterMs > 0) {
endTimeMs = this.task.random.nextInt(jitterMs) + endTimeMs;
}
task.add(new QueueEntry(endTimeMs, func, id));
}
public void schedule(int delaySecs, Runnable func) {
schedule(delaySecs, func, true, 0);
}
/**
* Schedule a function to run recurrently
* @param delaySecs the number of seconds to delay before running the function
* @param recurSecs the time between each invocation
* @param func the function to run
*/
public void scheduleRecurring(int delaySecs, final int recurSecs, final Runnable func) {
schedule(delaySecs, new Runnable() {
@Override
public void run() {
func.run();
// This avoids a race condition with cancel-timer.
schedule(recurSecs, this, false, 0);
}
});
}
/**
* schedule a function to run recurrently with jitter
* @param delaySecs the number of seconds to delay before running the function
* @param recurSecs the time between each invocation
* @param jitterMs jitter added to the run
* @param func the function to run
*/
public void scheduleRecurringWithJitter(int delaySecs, final int recurSecs, final int jitterMs, final Runnable func) {
schedule(delaySecs, new Runnable() {
@Override
public void run() {
func.run();
// This avoids a race condition with cancel-timer.
schedule(recurSecs, this, false, jitterMs);
}
});
}
/**
* check if timer is active
*/
private void checkActive() {
if (!this.task.isActive()) {
throw new IllegalStateException("Timer is not active");
}
}
/**
* cancel timer
*/
@Override
public void close() throws InterruptedException {
if (this.task.isActive()) {
this.task.setActive(false);
this.task.interrupt();
this.task.join();
}
}
/**
* is timer waiting. Used in timer simulation
*/
public boolean isTimerWaiting() {
return Time.isThreadWaiting(task);
}
}
|
|
package com.conveyal.r5.analyst;
import com.beust.jcommander.ParameterException;
import com.conveyal.r5.util.InputStreamProvider;
import com.csvreader.CsvReader;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.Envelope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static com.conveyal.r5.common.GeometryUtils.checkWgsEnvelopeSize;
import static com.conveyal.r5.streets.VertexStore.fixedDegreesToFloating;
/**
* These are points serving as origins or destinations in an accessibility analysis which are not constrained to
* a regular grid. Each point has an arbitrary latitude and longitude attached to it.
* This class re-uses some of the legacy code, which was removed in R5 PR #338.
*/
public class FreeFormPointSet extends PointSet {
private static final Logger LOG = LoggerFactory.getLogger(FreeFormPointSet.class);
/** A unique identifier for each feature. */
private final String[] ids;
/** The latitude of each point. */
private final double[] lats;
/** The longitude of each point. */
private final double[] lons;
/** The number of opportunities located at each point. */
private final double[] counts;
// TODO check that all identifiers are unique
/**
* Create a FreeFormPointset from a CSV file, which must have latitude and longitude columns with the values of
* latField and lonField in the header row. If idField is supplied, its column will be used to supply id values
* for the points; if not, row numbers will be used as the ids.
*/
public static FreeFormPointSet fromCsv (
InputStreamProvider csvInputStreamProvider,
String latField,
String lonField,
String idField,
String countField
) throws IOException {
/* First, scan through the file to count lines and check for rows with the wrong number of columns. */
int nRecs;
int latCol = -1;
int lonCol = -1;
int idCol = -1;
int countCol = -1;
try (InputStream csvInputStream = csvInputStreamProvider.getInputStream()) {
CsvReader reader = new CsvReader(csvInputStream, ',', StandardCharsets.UTF_8);
reader.readHeaders();
int nCols = reader.getHeaderCount();
for (int c = 0; c < nCols; c++) {
String header = reader.getHeader(c);
if (header.equals(latField)) {
latCol = c;
} else if (header.equalsIgnoreCase(lonField)) {
lonCol = c;
} else if (header.equalsIgnoreCase(idField)) {
idCol = c;
} else if (header.equalsIgnoreCase(countField)) {
countCol = c;
}
}
if (latCol < 0 || lonCol < 0) {
throw new ParameterException("CSV file did not contain the specified latitude or longitude column.");
}
if (idField != null && idCol < 0) {
throw new ParameterException("CSV file did not contain the specified ID column.");
}
if (countField != null && countCol < 0) {
throw new ParameterException("CSV file did not contain the specified opportunity count column.");
}
while (reader.readRecord()) {
if (reader.getColumnCount() != nCols) {
String message = String.format(
"CSV header has %d fields, record %d has %d fields.",
nCols,
reader.getCurrentRecord(),
reader.getColumnCount()
);
throw new ParameterException(message);
}
}
// getCurrentRecord is zero-based and does not include headers or blank lines
// FIXME isn't this creating one record too many, and leaving it blank? Verify.
nRecs = (int) reader.getCurrentRecord() + 1;
}
/* If we reached here, the file is entirely readable. Re-read it from the beginning and record values. */
// Note that we're doing two passes just so we know the array size. We could just use TIntLists.
int rec = -1;
try (InputStream csvInputStream = csvInputStreamProvider.getInputStream()) {
CsvReader reader = new CsvReader(csvInputStream, ',', StandardCharsets.UTF_8);
FreeFormPointSet ret = new FreeFormPointSet(nRecs);
ret.name = countField != null ? countField : "[COUNT]";
reader.readHeaders();
while (reader.readRecord()) {
rec = (int) reader.getCurrentRecord();
ret.lats[rec] = Double.parseDouble(reader.get(latCol));
ret.lons[rec] = Double.parseDouble(reader.get(lonCol));
// If ID column was specified and present, use it. Otherwise, use record number as ID.
ret.ids[rec] = idCol < 0 ? String.valueOf(rec) : reader.get(idCol);
// If count column was specified and present, use it. Otherwise, one opportunity per point.
ret.counts[rec] = countCol < 0 ? 1D : Double.parseDouble(reader.get(countCol));
}
checkWgsEnvelopeSize(ret.getWgsEnvelope(), "freeform pointset");
return ret;
} catch (NumberFormatException nfe) {
throw new ParameterException(
String.format("Improperly formatted floating point value on line %d of CSV input", rec)
);
}
}
/**
* @param capacity expected number of features to be added to this FreeFormPointSet.
*/
private FreeFormPointSet(int capacity) {
ids = new String[capacity];
lats = new double[capacity];
lons = new double[capacity];
counts = new double[capacity];
}
@Override
public int featureCount() {
return ids.length;
}
@Override
public double sumTotalOpportunities () {
return Arrays.stream(counts).sum();
}
@Override
public double getLat(int i) {
return lats[i];
}
@Override
public double getLon(int i) {
return lons[i];
}
/**
* Write coordinates for these points, in binary format.
* Note that this does not save any opportunity magnitudes or densities. We do not use those yet.
* Note also that if we ever intend to use these directly in the UI we should switch to a
* fixed-width little-endian representation or JSON.
*/
public void write (OutputStream outputStream) throws IOException {
DataOutputStream out = new DataOutputStream(outputStream);
// Header
// TODO add identifier / version for future sanity checking?
// Should name and description be here or in Mongo metadata?
out.writeInt(ids.length);
for (String id : ids) {
out.writeUTF(id);
}
for (double lat : lats) {
out.writeDouble(lat);
}
for (double lon : lons) {
out.writeDouble(lon);
}
for (double count : counts) {
out.writeDouble(count);
}
out.close();
}
public FreeFormPointSet (InputStream inputStream) throws IOException {
DataInputStream data = new DataInputStream(inputStream);
int nPoints = data.readInt();
this.ids = new String[nPoints];
this.lats = new double[nPoints];
this.lons = new double[nPoints];
this.counts = new double[nPoints];
for (int i = 0; i < nPoints; i++) {
ids[i] = data.readUTF();
}
for (int i = 0; i < nPoints; i++) {
lats[i] = data.readDouble();
}
for (int i = 0; i < nPoints; i++) {
lons[i] = data.readDouble();
}
for (int i = 0; i < nPoints; i++) {
counts[i] = data.readDouble();
}
data.close();
}
@Override
public TIntList getPointsInEnvelope (Envelope envelopeFixedDegrees) {
// Convert fixed-degree envelope to floating
double west = fixedDegreesToFloating(envelopeFixedDegrees.getMinX());
double east = fixedDegreesToFloating(envelopeFixedDegrees.getMaxX());
double north = fixedDegreesToFloating(envelopeFixedDegrees.getMaxY());
double south = fixedDegreesToFloating(envelopeFixedDegrees.getMinY());
TIntList pointsInEnvelope = new TIntArrayList();
// Pixels are truncated toward zero, and coords increase toward East and South in web Mercator, so <= south/east.
for (int i = 0; i < lats.length; i++) {
if (lats[i] < north && lats[i] > south && lons[i] < east && lons[i] > west) pointsInEnvelope.add(i);
}
return pointsInEnvelope;
}
@Override
public double getOpportunityCount (int i) {
return counts[i];
}
@Override
public String getId (int i) {
return ids[i];
}
@Override
public Envelope getWgsEnvelope () {
if (lats.length == 1 || lons.length == 0) {
LOG.error("Attempt to create envelope from empty lat/lon array.");
return null;
}
double minLat = Arrays.stream(lats).min().getAsDouble();
double minLon = Arrays.stream(lons).min().getAsDouble();
double maxLat = Arrays.stream(lats).max().getAsDouble();
double maxLon = Arrays.stream(lons).max().getAsDouble();
Envelope envelope = new Envelope(minLon, maxLon, minLat, maxLat);
return envelope;
}
@Override
public WebMercatorExtents getWebMercatorExtents () {
final int DEFAULT_ZOOM = 9;
Envelope wgsEnvelope = this.getWgsEnvelope();
WebMercatorExtents webMercatorExtents = WebMercatorExtents.forWgsEnvelope(wgsEnvelope, DEFAULT_ZOOM);
return webMercatorExtents;
}
/** Construct a freeform point set containing one opportunity at each specified geographic coordinate. */
public FreeFormPointSet (Coordinate... coordinates) {
this(coordinates.length);
int i = 0;
for (Coordinate coordinate : coordinates) {
ids[i] = Integer.toString(i);
lons[i] = coordinate.x;
lats[i] = coordinate.y;
counts[i] = 1;
i++;
}
}
}
|
|
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sun.org.apache.xml.internal.security.algorithms;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.sun.org.apache.xml.internal.security.encryption.XMLCipher;
import com.sun.org.apache.xml.internal.security.signature.XMLSignature;
import org.w3c.dom.Element;
/**
* This class maps algorithm identifier URIs to JAVA JCE class names.
*/
public class JCEMapper {
/** {@link org.apache.commons.logging} logging facility */
private static java.util.logging.Logger log =
java.util.logging.Logger.getLogger(JCEMapper.class.getName());
private static Map<String, Algorithm> algorithmsMap =
new ConcurrentHashMap<String, Algorithm>();
private static String providerName = null;
/**
* Method register
*
* @param id
* @param algorithm
*/
public static void register(String id, Algorithm algorithm) {
algorithmsMap.put(id, algorithm);
}
/**
* This method registers the default algorithms.
*/
public static void registerDefaultAlgorithms() {
algorithmsMap.put(
MessageDigestAlgorithm.ALGO_ID_DIGEST_NOT_RECOMMENDED_MD5,
new Algorithm("", "MD5", "MessageDigest")
);
algorithmsMap.put(
MessageDigestAlgorithm.ALGO_ID_DIGEST_RIPEMD160,
new Algorithm("", "RIPEMD160", "MessageDigest")
);
algorithmsMap.put(
MessageDigestAlgorithm.ALGO_ID_DIGEST_SHA1,
new Algorithm("", "SHA-1", "MessageDigest")
);
algorithmsMap.put(
MessageDigestAlgorithm.ALGO_ID_DIGEST_SHA256,
new Algorithm("", "SHA-256", "MessageDigest")
);
algorithmsMap.put(
MessageDigestAlgorithm.ALGO_ID_DIGEST_SHA384,
new Algorithm("", "SHA-384", "MessageDigest")
);
algorithmsMap.put(
MessageDigestAlgorithm.ALGO_ID_DIGEST_SHA512,
new Algorithm("", "SHA-512", "MessageDigest")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_DSA,
new Algorithm("", "SHA1withDSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_NOT_RECOMMENDED_RSA_MD5,
new Algorithm("", "MD5withRSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_RSA_RIPEMD160,
new Algorithm("", "RIPEMD160withRSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_RSA_SHA1,
new Algorithm("", "SHA1withRSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_RSA_SHA256,
new Algorithm("", "SHA256withRSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_RSA_SHA384,
new Algorithm("", "SHA384withRSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_RSA_SHA512,
new Algorithm("", "SHA512withRSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_ECDSA_SHA1,
new Algorithm("", "SHA1withECDSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_ECDSA_SHA256,
new Algorithm("", "SHA256withECDSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_ECDSA_SHA384,
new Algorithm("", "SHA384withECDSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_SIGNATURE_ECDSA_SHA512,
new Algorithm("", "SHA512withECDSA", "Signature")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_MAC_HMAC_NOT_RECOMMENDED_MD5,
new Algorithm("", "HmacMD5", "Mac")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_MAC_HMAC_RIPEMD160,
new Algorithm("", "HMACRIPEMD160", "Mac")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_MAC_HMAC_SHA1,
new Algorithm("", "HmacSHA1", "Mac")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_MAC_HMAC_SHA256,
new Algorithm("", "HmacSHA256", "Mac")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_MAC_HMAC_SHA384,
new Algorithm("", "HmacSHA384", "Mac")
);
algorithmsMap.put(
XMLSignature.ALGO_ID_MAC_HMAC_SHA512,
new Algorithm("", "HmacSHA512", "Mac")
);
algorithmsMap.put(
XMLCipher.TRIPLEDES,
new Algorithm("DESede", "DESede/CBC/ISO10126Padding", "BlockEncryption", 192)
);
algorithmsMap.put(
XMLCipher.AES_128,
new Algorithm("AES", "AES/CBC/ISO10126Padding", "BlockEncryption", 128)
);
algorithmsMap.put(
XMLCipher.AES_192,
new Algorithm("AES", "AES/CBC/ISO10126Padding", "BlockEncryption", 192)
);
algorithmsMap.put(
XMLCipher.AES_256,
new Algorithm("AES", "AES/CBC/ISO10126Padding", "BlockEncryption", 256)
);
algorithmsMap.put(
XMLCipher.AES_128_GCM,
new Algorithm("AES", "AES/GCM/NoPadding", "BlockEncryption", 128)
);
algorithmsMap.put(
XMLCipher.AES_192_GCM,
new Algorithm("AES", "AES/GCM/NoPadding", "BlockEncryption", 192)
);
algorithmsMap.put(
XMLCipher.AES_256_GCM,
new Algorithm("AES", "AES/GCM/NoPadding", "BlockEncryption", 256)
);
algorithmsMap.put(
XMLCipher.RSA_v1dot5,
new Algorithm("RSA", "RSA/ECB/PKCS1Padding", "KeyTransport")
);
algorithmsMap.put(
XMLCipher.RSA_OAEP,
new Algorithm("RSA", "RSA/ECB/OAEPPadding", "KeyTransport")
);
algorithmsMap.put(
XMLCipher.RSA_OAEP_11,
new Algorithm("RSA", "RSA/ECB/OAEPPadding", "KeyTransport")
);
algorithmsMap.put(
XMLCipher.DIFFIE_HELLMAN,
new Algorithm("", "", "KeyAgreement")
);
algorithmsMap.put(
XMLCipher.TRIPLEDES_KeyWrap,
new Algorithm("DESede", "DESedeWrap", "SymmetricKeyWrap", 192)
);
algorithmsMap.put(
XMLCipher.AES_128_KeyWrap,
new Algorithm("AES", "AESWrap", "SymmetricKeyWrap", 128)
);
algorithmsMap.put(
XMLCipher.AES_192_KeyWrap,
new Algorithm("AES", "AESWrap", "SymmetricKeyWrap", 192)
);
algorithmsMap.put(
XMLCipher.AES_256_KeyWrap,
new Algorithm("AES", "AESWrap", "SymmetricKeyWrap", 256)
);
}
/**
* Method translateURItoJCEID
*
* @param algorithmURI
* @return the JCE standard name corresponding to the given URI
*/
public static String translateURItoJCEID(String algorithmURI) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Request for URI " + algorithmURI);
}
Algorithm algorithm = algorithmsMap.get(algorithmURI);
if (algorithm != null) {
return algorithm.jceName;
}
return null;
}
/**
* Method getAlgorithmClassFromURI
* @param algorithmURI
* @return the class name that implements this algorithm
*/
public static String getAlgorithmClassFromURI(String algorithmURI) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Request for URI " + algorithmURI);
}
Algorithm algorithm = algorithmsMap.get(algorithmURI);
if (algorithm != null) {
return algorithm.algorithmClass;
}
return null;
}
/**
* Returns the keylength in bits for a particular algorithm.
*
* @param algorithmURI
* @return The length of the key used in the algorithm
*/
public static int getKeyLengthFromURI(String algorithmURI) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Request for URI " + algorithmURI);
}
Algorithm algorithm = algorithmsMap.get(algorithmURI);
if (algorithm != null) {
return algorithm.keyLength;
}
return 0;
}
/**
* Method getJCEKeyAlgorithmFromURI
*
* @param algorithmURI
* @return The KeyAlgorithm for the given URI.
*/
public static String getJCEKeyAlgorithmFromURI(String algorithmURI) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, "Request for URI " + algorithmURI);
}
Algorithm algorithm = algorithmsMap.get(algorithmURI);
if (algorithm != null) {
return algorithm.requiredKey;
}
return null;
}
/**
* Gets the default Provider for obtaining the security algorithms
* @return the default providerId.
*/
public static String getProviderId() {
return providerName;
}
/**
* Sets the default Provider for obtaining the security algorithms
* @param provider the default providerId.
*/
public static void setProviderId(String provider) {
providerName = provider;
}
/**
* Represents the Algorithm xml element
*/
public static class Algorithm {
final String requiredKey;
final String jceName;
final String algorithmClass;
final int keyLength;
/**
* Gets data from element
* @param el
*/
public Algorithm(Element el) {
requiredKey = el.getAttribute("RequiredKey");
jceName = el.getAttribute("JCEName");
algorithmClass = el.getAttribute("AlgorithmClass");
if (el.hasAttribute("KeyLength")) {
keyLength = Integer.parseInt(el.getAttribute("KeyLength"));
} else {
keyLength = 0;
}
}
public Algorithm(String requiredKey, String jceName) {
this(requiredKey, jceName, null, 0);
}
public Algorithm(String requiredKey, String jceName, String algorithmClass) {
this(requiredKey, jceName, algorithmClass, 0);
}
public Algorithm(String requiredKey, String jceName, int keyLength) {
this(requiredKey, jceName, null, keyLength);
}
public Algorithm(String requiredKey, String jceName, String algorithmClass, int keyLength) {
this.requiredKey = requiredKey;
this.jceName = jceName;
this.algorithmClass = algorithmClass;
this.keyLength = keyLength;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl;
import static com.google.common.base.Preconditions.checkArgument;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.security.SecurityPermission;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.NamespaceNotFoundException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.data.NamespaceId;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.master.state.tables.TableState;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.singletons.SingletonManager;
import org.apache.accumulo.core.singletons.SingletonService;
import org.apache.accumulo.core.util.Pair;
import org.apache.accumulo.fate.zookeeper.ZooCache;
import org.apache.accumulo.fate.zookeeper.ZooCacheFactory;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
public class Tables {
public static final String VALID_NAME_REGEX = "^(\\w+\\.)?(\\w+)$";
private static final SecurityPermission TABLES_PERMISSION =
new SecurityPermission("tablesPermission");
// Per instance cache will expire after 10 minutes in case we encounter an instance not used
// frequently
private static Cache<String,TableMap> instanceToMapCache =
CacheBuilder.newBuilder().expireAfterAccess(10, TimeUnit.MINUTES).build();
static {
SingletonManager.register(new SingletonService() {
boolean enabled = false;
@Override
public synchronized boolean isEnabled() {
return enabled;
}
@Override
public synchronized void enable() {
enabled = true;
}
@Override
public synchronized void disable() {
try {
instanceToMapCache.invalidateAll();
} finally {
enabled = false;
}
}
});
}
/**
* Lookup table ID in ZK. Throw TableNotFoundException if not found. Also wraps
* NamespaceNotFoundException in TableNotFoundException if namespace is not found.
*/
public static TableId getTableId(ClientContext context, String tableName)
throws TableNotFoundException {
try {
return _getTableId(context, tableName);
} catch (NamespaceNotFoundException e) {
throw new TableNotFoundException(tableName, e);
}
}
/**
* Return the cached ZooCache for provided context. ZooCache is initially created with a watcher
* that will clear the TableMap cache for that instance when WatchedEvent occurs.
*/
private static ZooCache getZooCache(final ClientContext context) {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(TABLES_PERMISSION);
}
return new ZooCacheFactory().getZooCache(context.getZooKeepers(),
context.getZooKeepersSessionTimeOut());
}
/**
* Lookup table ID in ZK. If not found, clears cache and tries again.
*/
public static TableId _getTableId(ClientContext context, String tableName)
throws NamespaceNotFoundException, TableNotFoundException {
TableId tableId = getNameToIdMap(context).get(tableName);
if (tableId == null) {
// maybe the table exist, but the cache was not updated yet... so try to clear the cache and
// check again
clearCache(context);
tableId = getNameToIdMap(context).get(tableName);
if (tableId == null) {
String namespace = qualify(tableName).getFirst();
if (Namespaces.getNameToIdMap(context).containsKey(namespace))
throw new TableNotFoundException(null, tableName, null);
else
throw new NamespaceNotFoundException(null, namespace, null);
}
}
return tableId;
}
public static String getTableName(ClientContext context, TableId tableId)
throws TableNotFoundException {
String tableName = getIdToNameMap(context).get(tableId);
if (tableName == null)
throw new TableNotFoundException(tableId.canonical(), null, null);
return tableName;
}
public static String getTableOfflineMsg(ClientContext context, TableId tableId) {
if (tableId == null)
return "Table <unknown table> is offline";
try {
String tableName = Tables.getTableName(context, tableId);
return "Table " + tableName + " (" + tableId.canonical() + ") is offline";
} catch (TableNotFoundException e) {
return "Table <unknown table> (" + tableId.canonical() + ") is offline";
}
}
public static Map<String,TableId> getNameToIdMap(ClientContext context) {
return getTableMap(context).getNameToIdMap();
}
public static Map<TableId,String> getIdToNameMap(ClientContext context) {
return getTableMap(context).getIdtoNameMap();
}
/**
* Get the TableMap from the cache. A new one will be populated when needed. Cache is cleared
* manually by calling {@link #clearCache(ClientContext)}
*/
private static TableMap getTableMap(final ClientContext context) {
TableMap map;
final ZooCache zc = getZooCache(context);
map = getTableMap(context, zc);
if (!map.isCurrent(zc)) {
instanceToMapCache.invalidate(context.getInstanceID());
map = getTableMap(context, zc);
}
return map;
}
private static TableMap getTableMap(final ClientContext context, final ZooCache zc) {
try {
return instanceToMapCache.get(context.getInstanceID(), () -> new TableMap(context, zc));
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
public static boolean exists(ClientContext context, TableId tableId) {
ZooCache zc = getZooCache(context);
List<String> tableIds = zc.getChildren(context.getZooKeeperRoot() + Constants.ZTABLES);
return tableIds.contains(tableId.canonical());
}
public static void clearCache(ClientContext context) {
getZooCache(context).clear(context.getZooKeeperRoot() + Constants.ZTABLES);
getZooCache(context).clear(context.getZooKeeperRoot() + Constants.ZNAMESPACES);
instanceToMapCache.invalidate(context.getInstanceID());
}
/**
* Clears the zoo cache from instance/root/{PATH}
*
* @param context
* The Accumulo client context
* @param zooPath
* A zookeeper path
*/
public static void clearCacheByPath(ClientContext context, final String zooPath) {
String thePath = zooPath.startsWith("/") ? zooPath : "/" + zooPath;
getZooCache(context).clear(context.getZooKeeperRoot() + thePath);
instanceToMapCache.invalidate(context.getInstanceID());
}
public static String getPrintableTableInfoFromId(ClientContext context, TableId tableId) {
String tableName = null;
try {
tableName = getTableName(context, tableId);
} catch (TableNotFoundException e) {
// handled in the string formatting
}
return tableName == null ? String.format("?(ID:%s)", tableId.canonical())
: String.format("%s(ID:%s)", tableName, tableId.canonical());
}
public static String getPrintableTableInfoFromName(ClientContext context, String tableName) {
TableId tableId = null;
try {
tableId = getTableId(context, tableName);
} catch (TableNotFoundException e) {
// handled in the string formatting
}
return tableId == null ? String.format("%s(?)", tableName)
: String.format("%s(ID:%s)", tableName, tableId.canonical());
}
public static TableState getTableState(ClientContext context, TableId tableId) {
return getTableState(context, tableId, false);
}
/**
* Get the current state of the table using the tableid. The boolean clearCache, if true will
* clear the table state in zookeeper before fetching the state. Added with ACCUMULO-4574.
*
* @param context
* the Accumulo client context
* @param tableId
* the table id
* @param clearCachedState
* if true clear the table state in zookeeper before checking status
* @return the table state.
*/
public static TableState getTableState(ClientContext context, TableId tableId,
boolean clearCachedState) {
String statePath = context.getZooKeeperRoot() + Constants.ZTABLES + "/" + tableId.canonical()
+ Constants.ZTABLE_STATE;
if (clearCachedState) {
Tables.clearCacheByPath(context, statePath);
}
ZooCache zc = getZooCache(context);
byte[] state = zc.get(statePath);
if (state == null)
return TableState.UNKNOWN;
return TableState.valueOf(new String(state, UTF_8));
}
public static String qualified(String tableName) {
return qualified(tableName, Namespace.DEFAULT.name());
}
public static String qualified(String tableName, String defaultNamespace) {
Pair<String,String> qualifiedTableName = qualify(tableName, defaultNamespace);
if (Namespace.DEFAULT.name().equals(qualifiedTableName.getFirst()))
return qualifiedTableName.getSecond();
else
return qualifiedTableName.toString("", ".", "");
}
public static Pair<String,String> qualify(String tableName) {
return qualify(tableName, Namespace.DEFAULT.name());
}
public static Pair<String,String> qualify(String tableName, String defaultNamespace) {
if (!tableName.matches(VALID_NAME_REGEX))
throw new IllegalArgumentException("Invalid table name '" + tableName + "'");
if (MetadataTable.OLD_NAME.equals(tableName))
tableName = MetadataTable.NAME;
if (tableName.contains(".")) {
String[] s = tableName.split("\\.", 2);
return new Pair<>(s[0], s[1]);
}
return new Pair<>(defaultNamespace, tableName);
}
/**
* Returns the namespace id for a given table ID.
*
* @param context
* The Accumulo client context
* @param tableId
* The tableId
* @return The namespace id which this table resides in.
* @throws IllegalArgumentException
* if the table doesn't exist in ZooKeeper
*/
public static NamespaceId getNamespaceId(ClientContext context, TableId tableId)
throws TableNotFoundException {
checkArgument(context != null, "instance is null");
checkArgument(tableId != null, "tableId is null");
ZooCache zc = getZooCache(context);
byte[] n = zc.get(context.getZooKeeperRoot() + Constants.ZTABLES + "/" + tableId
+ Constants.ZTABLE_NAMESPACE);
// We might get null out of ZooCache if this tableID doesn't exist
if (n == null) {
throw new TableNotFoundException(tableId.canonical(), null, null);
}
return NamespaceId.of(new String(n, UTF_8));
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.paint;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Pair;
import com.intellij.ui.JBColor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcs.log.graph.EdgePrintElement;
import com.intellij.vcs.log.graph.NodePrintElement;
import com.intellij.vcs.log.graph.PrintElement;
import com.intellij.vcs.log.graph.impl.print.elements.TerminalEdgePrintElement;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.awt.*;
import java.awt.geom.Ellipse2D;
import java.util.Collection;
import java.util.List;
/**
* @author erokhins
*/
public class SimpleGraphCellPainter implements GraphCellPainter {
private static final Color MARK_COLOR = JBColor.BLACK;
private static final double ARROW_ANGLE_COS2 = 0.7;
private static final double ARROW_LENGTH = 0.3;
@Nonnull
private final ColorGenerator myColorGenerator;
public SimpleGraphCellPainter(@Nonnull ColorGenerator colorGenerator) {
myColorGenerator = colorGenerator;
}
protected int getRowHeight() {
return PaintParameters.ROW_HEIGHT;
}
private float[] getDashLength(int edgeLength) {
int space = getRowHeight() / 4 - 1;
int dash = getRowHeight() / 4 + 1;
int count = edgeLength / (2 * (dash + space));
assert count != 0;
int dashApprox = (edgeLength / 2 - count * space) / count;
return new float[]{2 * dashApprox, 2 * space};
}
@Nonnull
private BasicStroke getOrdinaryStroke() {
return new BasicStroke(PaintParameters.getLineThickness(getRowHeight()), BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL);
}
@Nonnull
private BasicStroke getSelectedStroke() {
return new BasicStroke(PaintParameters.getSelectedLineThickness(getRowHeight()), BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL);
}
@Nonnull
private Stroke getDashedStroke(float[] dash) {
return new BasicStroke(PaintParameters.getLineThickness(getRowHeight()), BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL, 0, dash,
dash[0] / 2);
}
@Nonnull
private Stroke getSelectedDashedStroke(float[] dash) {
return new BasicStroke(PaintParameters.getSelectedLineThickness(getRowHeight()), BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL, 0, dash,
dash[0] / 2);
}
private void paintUpLine(@Nonnull Graphics2D g2,
@Nonnull Color color,
int from,
int to,
boolean hasArrow,
boolean isUsual,
boolean isSelected,
boolean isTerminal) {
// paint vertical lines normal size
// paint non-vertical lines twice the size to make them dock with each other well
int nodeWidth = PaintParameters.getNodeWidth(getRowHeight());
if (from == to) {
int x = nodeWidth * from + nodeWidth / 2;
int y1 = getRowHeight() / 2 - 1;
int y2 = isTerminal ? PaintParameters.getCircleRadius(getRowHeight()) / 2 + 1 : 0;
paintLine(g2, color, hasArrow, x, y1, x, y2, x, y2, isUsual, isSelected);
}
else {
assert !isTerminal;
int x1 = nodeWidth * from + nodeWidth / 2;
int y1 = getRowHeight() / 2;
int x2 = nodeWidth * to + nodeWidth / 2;
int y2 = -getRowHeight() / 2;
paintLine(g2, color, hasArrow, x1, y1, x2, y2, (x1 + x2) / 2, (y1 + y2) / 2, isUsual, isSelected);
}
}
private void paintDownLine(@Nonnull Graphics2D g2,
@Nonnull Color color,
int from,
int to,
boolean hasArrow,
boolean isUsual,
boolean isSelected,
boolean isTerminal) {
int nodeWidth = PaintParameters.getNodeWidth(getRowHeight());
if (from == to) {
int y2 = getRowHeight() - 1 - (isTerminal ? PaintParameters.getCircleRadius(getRowHeight()) / 2 + 1 : 0);
int y1 = getRowHeight() / 2;
int x = nodeWidth * from + nodeWidth / 2;
paintLine(g2, color, hasArrow, x, y1, x, y2, x, y2, isUsual, isSelected);
}
else {
assert !isTerminal;
int x1 = nodeWidth * from + nodeWidth / 2;
int y1 = getRowHeight() / 2;
int x2 = nodeWidth * to + nodeWidth / 2;
int y2 = getRowHeight() + getRowHeight() / 2;
paintLine(g2, color, hasArrow, x1, y1, x2, y2, (x1 + x2) / 2, (y1 + y2) / 2, isUsual, isSelected);
}
}
private void paintLine(@Nonnull Graphics2D g2,
@Nonnull Color color,
boolean hasArrow,
int x1,
int y1,
int x2,
int y2,
int startArrowX,
int startArrowY,
boolean isUsual,
boolean isSelected) {
g2.setColor(color);
int length = (x1 == x2) ? getRowHeight() : (int)Math.ceil(Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)));
setStroke(g2, isUsual || hasArrow, isSelected, length);
g2.drawLine(x1, y1, x2, y2);
if (hasArrow) {
Pair<Integer, Integer> rotate1 =
rotate(x1, y1, startArrowX, startArrowY, Math.sqrt(ARROW_ANGLE_COS2), Math.sqrt(1 - ARROW_ANGLE_COS2),
ARROW_LENGTH * getRowHeight());
Pair<Integer, Integer> rotate2 =
rotate(x1, y1, startArrowX, startArrowY, Math.sqrt(ARROW_ANGLE_COS2), -Math.sqrt(1 - ARROW_ANGLE_COS2),
ARROW_LENGTH * getRowHeight());
g2.drawLine(startArrowX, startArrowY, rotate1.first, rotate1.second);
g2.drawLine(startArrowX, startArrowY, rotate2.first, rotate2.second);
}
}
@Nonnull
private static Pair<Integer, Integer> rotate(double x,
double y,
double centerX,
double centerY,
double cos,
double sin,
double arrowLength) {
double translateX = (x - centerX);
double translateY = (y - centerY);
double d = Math.sqrt(translateX * translateX + translateY * translateY);
double scaleX = arrowLength * translateX / d;
double scaleY = arrowLength * translateY / d;
double rotateX = scaleX * cos - scaleY * sin;
double rotateY = scaleX * sin + scaleY * cos;
return Pair.create((int)Math.round(rotateX + centerX), (int)Math.round(rotateY + centerY));
}
private void paintCircle(@Nonnull Graphics2D g2, int position, @Nonnull Color color, boolean select) {
int nodeWidth = PaintParameters.getNodeWidth(getRowHeight());
int circleRadius = PaintParameters.getCircleRadius(getRowHeight());
int selectedCircleRadius = PaintParameters.getSelectedCircleRadius(getRowHeight());
int x0 = nodeWidth * position + nodeWidth / 2;
int y0 = getRowHeight() / 2;
int r = circleRadius;
if (select) {
r = selectedCircleRadius;
}
Ellipse2D.Double circle = new Ellipse2D.Double(x0 - r + 0.5, y0 - r + 0.5, 2 * r, 2 * r);
g2.setColor(color);
g2.fill(circle);
}
private void setStroke(@Nonnull Graphics2D g2, boolean usual, boolean select, int edgeLength) {
if (usual) {
if (select) {
g2.setStroke(getSelectedStroke());
}
else {
g2.setStroke(getOrdinaryStroke());
}
}
else {
if (select) {
g2.setStroke(getSelectedDashedStroke(getDashLength(edgeLength)));
}
else {
g2.setStroke(getDashedStroke(getDashLength(edgeLength)));
}
}
}
@Nonnull
private Color getColor(@Nonnull PrintElement printElement) {
return myColorGenerator.getColor(printElement.getColorId());
}
private static boolean isUsual(@Nonnull PrintElement printElement) {
if (!(printElement instanceof EdgePrintElement)) return true;
EdgePrintElement.LineStyle lineStyle = ((EdgePrintElement)printElement).getLineStyle();
return lineStyle == EdgePrintElement.LineStyle.SOLID;
}
@Override
public void draw(@Nonnull Graphics2D g2, @Nonnull Collection<? extends PrintElement> printElements) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
for (PrintElement printElement : printElements) {
if (!printElement.isSelected()) {
drawElement(g2, printElement, false);
}
}
List<PrintElement> selected = ContainerUtil.filter(printElements, new Condition<PrintElement>() {
@Override
public boolean value(PrintElement printElement) {
return printElement.isSelected();
}
});
for (PrintElement printElement : selected) {
drawElement(g2, printElement, true);
}
for (PrintElement printElement : selected) {
drawElement(g2, printElement, false);
}
}
protected void drawElement(@Nonnull Graphics2D g2, @Nonnull PrintElement printElement, boolean isSelected) {
if (printElement instanceof EdgePrintElement) {
if (isSelected) {
printEdge(g2, MARK_COLOR, true, (EdgePrintElement)printElement);
}
else {
printEdge(g2, getColor(printElement), false, (EdgePrintElement)printElement);
}
}
if (printElement instanceof NodePrintElement) {
int position = printElement.getPositionInCurrentRow();
if (isSelected) {
paintCircle(g2, position, MARK_COLOR, true);
}
else {
paintCircle(g2, position, getColor(printElement), false);
}
}
}
private void printEdge(@Nonnull Graphics2D g2, @Nonnull Color color, boolean isSelected, @Nonnull EdgePrintElement edgePrintElement) {
int from = edgePrintElement.getPositionInCurrentRow();
int to = edgePrintElement.getPositionInOtherRow();
boolean isUsual = isUsual(edgePrintElement);
if (edgePrintElement.getType() == EdgePrintElement.Type.DOWN) {
paintDownLine(g2, color, from, to, edgePrintElement.hasArrow(), isUsual, isSelected,
edgePrintElement instanceof TerminalEdgePrintElement);
}
else {
paintUpLine(g2, color, from, to, edgePrintElement.hasArrow(), isUsual, isSelected,
edgePrintElement instanceof TerminalEdgePrintElement);
}
}
@Nullable
@Override
public PrintElement getElementUnderCursor(@Nonnull Collection<? extends PrintElement> printElements, int x, int y) {
int nodeWidth = PaintParameters.getNodeWidth(getRowHeight());
for (PrintElement printElement : printElements) {
if (printElement instanceof NodePrintElement) {
int circleRadius = PaintParameters.getCircleRadius(getRowHeight());
if (PositionUtil.overNode(printElement.getPositionInCurrentRow(), x, y, getRowHeight(), nodeWidth, circleRadius)) {
return printElement;
}
}
}
for (PrintElement printElement : printElements) {
if (printElement instanceof EdgePrintElement) {
EdgePrintElement edgePrintElement = (EdgePrintElement)printElement;
float lineThickness = PaintParameters.getLineThickness(getRowHeight());
if (edgePrintElement.getType() == EdgePrintElement.Type.DOWN) {
if (PositionUtil
.overDownEdge(edgePrintElement.getPositionInCurrentRow(), edgePrintElement.getPositionInOtherRow(), x, y, getRowHeight(),
nodeWidth, lineThickness)) {
return printElement;
}
}
else {
if (PositionUtil
.overUpEdge(edgePrintElement.getPositionInOtherRow(), edgePrintElement.getPositionInCurrentRow(), x, y, getRowHeight(),
nodeWidth, lineThickness)) {
return printElement;
}
}
}
}
return null;
}
}
|
|
/*
* Copyright (c) 2010-2016. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.amqp.eventhandling.spring;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.ShutdownSignalException;
import org.axonframework.amqp.eventhandling.*;
import org.axonframework.common.Assert;
import org.axonframework.common.AxonConfigurationException;
import org.axonframework.common.Registration;
import org.axonframework.eventhandling.EventMessage;
import org.axonframework.messaging.SubscribableMessageSource;
import org.axonframework.messaging.unitofwork.CurrentUnitOfWork;
import org.axonframework.messaging.unitofwork.UnitOfWork;
import org.axonframework.serialization.Serializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.amqp.core.Exchange;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
/**
* EventBusTerminal implementation that uses an AMQP 0.9 compatible Message Broker to dispatch event messages. All
* outgoing messages are sent to a configured Exchange, which defaults to "{@code Axon.EventBus}".
* <p>
* This terminal does not dispatch Events internally, as it relies on each event processor to listen to it's own AMQP Queue.
*
* @author Allard Buijze
* @since 3.0
*/
public class SpringAMQPPublisher implements InitializingBean, ApplicationContextAware {
private static final Logger logger = LoggerFactory.getLogger(SpringAMQPPublisher.class);
private static final String DEFAULT_EXCHANGE_NAME = "Axon.EventBus";
private final SubscribableMessageSource<EventMessage<?>> messageSource;
private ConnectionFactory connectionFactory;
private String exchangeName = DEFAULT_EXCHANGE_NAME;
private boolean isTransactional = false;
private boolean isDurable = true;
private AMQPMessageConverter messageConverter;
private ApplicationContext applicationContext;
private Serializer serializer;
private RoutingKeyResolver routingKeyResolver;
private boolean waitForAck;
private long publisherAckTimeout;
private Registration eventBusRegistration;
/**
* Initialize this instance to publish message as they are published on the given {@code messageSource}.
*
* @param messageSource The component providing messages to be publishes
*/
public SpringAMQPPublisher(SubscribableMessageSource<EventMessage<?>> messageSource) {
this.messageSource = messageSource;
}
/**
* Subscribes this publisher to the messageSource provided during initialization.
*/
public void start() {
eventBusRegistration = messageSource.subscribe(this::send);
}
/**
* Shuts down this component and unsubscribes it from its messageSource.
*/
public void shutDown() {
if (eventBusRegistration != null) {
eventBusRegistration.cancel();
}
}
/**
* Sends the given {@code events} to the configured AMQP Exchange. It takes the current Unit of Work into account
* when available. Otherwise, it simply publishes directly.
*
* @param events the events to publish on the AMQP Message Broker
*/
protected void send(List<? extends EventMessage<?>> events) {
Channel channel = connectionFactory.createConnection().createChannel(isTransactional);
try {
if (isTransactional) {
channel.txSelect();
} else if (waitForAck) {
channel.confirmSelect();
}
for (EventMessage event : events) {
AMQPMessage amqpMessage = messageConverter.createAMQPMessage(event);
doSendMessage(channel, amqpMessage);
}
if (CurrentUnitOfWork.isStarted()) {
UnitOfWork<?> unitOfWork = CurrentUnitOfWork.get();
unitOfWork.onCommit(u -> {
if ((isTransactional || waitForAck) && !channel.isOpen()) {
throw new EventPublicationFailedException(
"Unable to Commit UnitOfWork changes to AMQP: Channel is closed.",
channel.getCloseReason());
}
});
unitOfWork.afterCommit(u -> {
try {
if (isTransactional) {
channel.txCommit();
} else if (waitForAck) {
try {
channel.waitForConfirmsOrDie(publisherAckTimeout);
} catch (IOException ex) {
throw new EventPublicationFailedException(
"Failed to receive acknowledgements for all events",
ex);
} catch (TimeoutException ex) {
throw new EventPublicationFailedException(
"Timeout while waiting for publisher acknowledgements",
ex);
}
}
} catch (IOException e) {
logger.warn("Unable to commit transaction on channel.", e);
} catch (InterruptedException e) {
logger.warn("Interrupt received when waiting for message confirms.");
Thread.currentThread().interrupt();
}
tryClose(channel);
});
unitOfWork.onRollback(u -> {
try {
if (isTransactional) {
channel.txRollback();
}
} catch (IOException ex) {
logger.warn("Unable to rollback transaction on channel.", ex);
}
tryClose(channel);
});
} else if (isTransactional) {
channel.txCommit();
} else if (waitForAck) {
channel.waitForConfirmsOrDie();
}
} catch (IOException e) {
if (isTransactional) {
tryRollback(channel);
}
throw new EventPublicationFailedException("Failed to dispatch Events to the Message Broker.", e);
} catch (ShutdownSignalException e) {
throw new EventPublicationFailedException("Failed to dispatch Events to the Message Broker.", e);
} catch (InterruptedException e) {
logger.warn("Interrupt received when waiting for message confirms.");
Thread.currentThread().interrupt();
} finally {
if (!CurrentUnitOfWork.isStarted()) {
tryClose(channel);
}
}
}
private void tryClose(Channel channel) {
try {
channel.close();
} catch (IOException | TimeoutException e) {
logger.info("Unable to close channel. It might already be closed.", e);
}
}
/**
* Does the actual publishing of the given {@code body} on the given {@code channel}. This method can be
* overridden to change the properties used to send a message.
*
* @param channel The channel to dispatch the message on
* @param amqpMessage The AMQPMessage describing the characteristics of the message to publish
* @throws java.io.IOException when an error occurs while writing the message
*/
protected void doSendMessage(Channel channel, AMQPMessage amqpMessage)
throws IOException {
channel.basicPublish(exchangeName, amqpMessage.getRoutingKey(), amqpMessage.isMandatory(),
amqpMessage.isImmediate(), amqpMessage.getProperties(), amqpMessage.getBody());
}
private void tryRollback(Channel channel) {
try {
channel.txRollback();
} catch (IOException e) {
logger.debug("Unable to rollback. The underlying channel might already be closed.", e);
}
}
@Override
public void afterPropertiesSet() throws Exception {
if (connectionFactory == null) {
connectionFactory = applicationContext.getBean(ConnectionFactory.class);
}
if (messageConverter == null) {
if (serializer == null) {
serializer = applicationContext.getBean(Serializer.class);
}
if (routingKeyResolver == null) {
Map<String, RoutingKeyResolver> routingKeyResolverCandidates = applicationContext.getBeansOfType(
RoutingKeyResolver.class);
if (routingKeyResolverCandidates.size() > 1) {
throw new AxonConfigurationException("No MessageConverter was configured, but none can be created "
+ "using autowired properties, as more than 1 "
+ "RoutingKeyResolver is present in the "
+ "ApplicationContent");
} else if (routingKeyResolverCandidates.size() == 1) {
routingKeyResolver = routingKeyResolverCandidates.values().iterator().next();
} else {
routingKeyResolver = new PackageRoutingKeyResolver();
}
}
messageConverter = new DefaultAMQPMessageConverter(serializer, routingKeyResolver, isDurable);
}
}
/**
* Whether this Terminal should dispatch its Events in a transaction or not. Defaults to {@code false}.
* <p>
* If a delegate Terminal is configured, the transaction will be committed <em>after</em> the delegate has
* dispatched the events.
* <p>
* Transactional behavior cannot be enabled if {@link #setWaitForPublisherAck(boolean)} has been set to
* {@code true}.
*
* @param transactional whether dispatching should be transactional or not
*/
public void setTransactional(boolean transactional) {
Assert.isTrue(!waitForAck || !transactional,
() -> "Cannot set transactional behavior when 'waitForServerAck' is enabled.");
isTransactional = transactional;
}
/**
* Enables or diables the RabbitMQ specific publisher acknowledgements (confirms). When confirms are enabled, the
* terminal will wait until the server has acknowledged the reception (or fsync to disk on persistent messages) of
* all published messages.
* <p>
* Server ACKS cannot be enabled when transactions are enabled.
* <p>
* See <a href="http://www.rabbitmq.com/confirms.html">RabbitMQ Documentation</a> for more information about
* publisher acknowledgements.
*
* @param waitForPublisherAck whether or not to enab;e server acknowledgements (confirms)
*/
public void setWaitForPublisherAck(boolean waitForPublisherAck) {
Assert.isTrue(!waitForPublisherAck || !isTransactional,
() -> "Cannot set 'waitForPublisherAck' when using transactions.");
this.waitForAck = waitForPublisherAck;
}
/**
* Sets the maximum amount of time (in milliseconds) the publisher may wait for the acknowledgement of published
* messages. If not all messages have been acknowledged withing this time, the publication will throw an
* EventPublicationFailedException.
* <p>
* This setting is only used when {@link #setWaitForPublisherAck(boolean)} is set to {@code true}.
*
* @param publisherAckTimeout The number of milliseconds to wait for confirms, or 0 to wait indefinitely.
*/
public void setPublisherAckTimeout(long publisherAckTimeout) {
this.publisherAckTimeout = publisherAckTimeout;
}
/**
* Sets the ConnectionFactory providing the Connections and Channels to send messages on. The SpringAMQPPublisher
* does not cache or reuse connections. Providing a ConnectionFactory instance that caches connections will prevent
* new connections to be opened for each invocation to {@link #send(List)}
* <p>
* Defaults to an autowired Connection Factory.
*
* @param connectionFactory The connection factory to set
*/
public void setConnectionFactory(ConnectionFactory connectionFactory) {
this.connectionFactory = connectionFactory;
}
/**
* Sets the Message Converter that creates AMQP Messages from Event Messages and vice versa. Setting this property
* will ignore the "durable", "serializer" and "routingKeyResolver" properties, which just act as short hands to
* create a DefaultAMQPMessageConverter instance.
* <p>
* Defaults to a DefaultAMQPMessageConverter.
*
* @param messageConverter The message converter to convert AMQP Messages to Event Messages and vice versa.
*/
public void setMessageConverter(AMQPMessageConverter messageConverter) {
this.messageConverter = messageConverter;
}
/**
* Whether or not messages should be marked as "durable" when sending them out. Durable messages suffer from a
* performance penalty, but will survive a reboot of the Message broker that stores them.
* <p>
* By default, messages are durable.
* <p>
* Note that this setting is ignored if a {@link
* #setMessageConverter(AMQPMessageConverter) MessageConverter} is provided.
* In that case, the message converter must add the properties to reflect the required durability setting.
*
* @param durable whether or not messages should be durable
*/
public void setDurable(boolean durable) {
isDurable = durable;
}
/**
* Sets the serializer to serialize messages with when sending them to the Exchange.
* <p>
* Defaults to an autowired serializer, which requires exactly 1 eligible serializer to be present in the
* application context.
* <p>
* This setting is ignored if a {@link
* #setMessageConverter(AMQPMessageConverter) MessageConverter} is configured.
*
* @param serializer the serializer to serialize message with
*/
public void setSerializer(Serializer serializer) {
this.serializer = serializer;
}
/**
* Sets the RoutingKeyResolver that provides the Routing Key for each message to dispatch. Defaults to a {@link
* PackageRoutingKeyResolver}, which uses the package name of the message's
* payload as a Routing Key.
* <p>
* This setting is ignored if a {@link
* #setMessageConverter(AMQPMessageConverter) MessageConverter} is configured.
*
* @param routingKeyResolver the RoutingKeyResolver to use
*/
public void setRoutingKeyResolver(RoutingKeyResolver routingKeyResolver) {
this.routingKeyResolver = routingKeyResolver;
}
/**
* Sets the name of the exchange to dispatch published messages to. Defaults to "{@code Axon.EventBus}".
*
* @param exchangeName the name of the exchange to dispatch messages to
*/
public void setExchangeName(String exchangeName) {
this.exchangeName = exchangeName;
}
/**
* Sets the name of the exchange to dispatch published messages to. Defaults to the exchange named
* "{@code Axon.EventBus}".
*
* @param exchange the exchange to dispatch messages to
*/
public void setExchange(Exchange exchange) {
this.exchangeName = exchange.getName();
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}
|
|
//CSE 373 Homework 5: The Even More Amazing Heap
// instructor-provided file
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
/**
* This is a basic testing program for your HeapPriorityQueue class.
* It will help you to test each of your methods to make sure they work properly.
* Please feel free to modify it to add your own testing code.
*/
public class TestHeapPriorityQueue1 {
/** Runs the testing program. */
public static void main(String[] args) {
System.out.println("HeapPriorityQueue Testing Program #1");
System.out.println();
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>();
testAddToString(pqueue);
testRemoveMin(pqueue);
testIsEmptySize(pqueue);
testClear();
pqueue = new HeapPriorityQueue<String>();
testResize();
testContains();
testRemove();
testIteratorIterable();
testComparator();
System.out.println("All tests complete.");
}
// Tests the add method.
private static void testAddToString(PriorityQueue<String> pqueue) {
System.out.println("add, toString:");
System.out.println(pqueue);
pqueue.add("four");
System.out.println(pqueue);
pqueue.add("score");
System.out.println(pqueue);
pqueue.add("and");
System.out.println(pqueue);
pqueue.add("seven");
System.out.println(pqueue);
pqueue.add("years");
System.out.println(pqueue);
pqueue.add("ago");
System.out.println(pqueue);
}
// Tests the contains method.
private static void testContains() {
System.out.println("contains:");
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>();
pqueue.add("four");
pqueue.add("score");
pqueue.add("and");
pqueue.add("seven");
pqueue.add("years");
pqueue.add("ago");
for (String word : "four five and AGO e seven years score test ago".split(" ")) {
System.out.println(pqueue + " contains " + word + "? " + pqueue.contains(word));
}
System.out.println();
}
// Tests the contains method.
private static void testRemove() {
System.out.println("remove(E) - removing arbitrary element:");
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>();
pqueue.add("four");
pqueue.add("score");
pqueue.add("and");
pqueue.add("seven");
pqueue.add("years");
pqueue.add("ago");
pqueue.add("our");
pqueue.add("fathers");
System.out.println(pqueue);
String words = "years AGO and five e seven four score test ago fathers booyah our";
for (String word : words.split(" ")) {
pqueue.remove(word);
System.out.print(pqueue + " after removing " + word);
System.out.println(" (size " + pqueue.size() + ")");
}
System.out.println();
}
// Tests the remove method to remove the minimum element.
private static void testRemoveMin(PriorityQueue<String> pqueue) {
System.out.println();
System.out.println("remove() - removing minimum element:");
System.out.println(pqueue.remove());
System.out.println(pqueue);
System.out.println(pqueue.remove());
System.out.println(pqueue);
System.out.println(pqueue.remove());
System.out.println(pqueue);
}
// Tests the isEmpty and size methods.
private static void testIsEmptySize(PriorityQueue<String> pqueue) {
System.out.println();
System.out.println("isEmpty / size:");
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? " + pqueue.isEmpty());
pqueue.add("seven");
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? " + pqueue.isEmpty());
pqueue.add("years");
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? " + pqueue.isEmpty());
pqueue.add("ago");
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? " + pqueue.isEmpty());
while (!pqueue.isEmpty()) {
System.out.println("remove() returns " + pqueue.remove());
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? "
+ pqueue.isEmpty());
}
}
// Tests the clear method.
private static void testClear() {
System.out.println();
System.out.println("clear:");
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>();
pqueue.add("four");
pqueue.add("score");
pqueue.add("and");
pqueue.add("seven");
pqueue.add("years");
pqueue.add("ago");
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? " + pqueue.isEmpty());
pqueue.clear();
System.out.println(pqueue + " (size " + pqueue.size() + "), empty? " + pqueue.isEmpty());
}
// A large test that checks whether the internal array can resize properly after many adds.
private static void testResize() {
System.out.println();
System.out.println("many elements (resize):");
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>();
String message = "Four score and seven years ago " +
"our fathers brought forth on this continent a new nation, " +
"conceived in liberty, and dedicated to the proposition that " +
"all men are created equal. " +
"Now we are engaged in a great civil war, testing whether that " +
"nation, or any nation, so conceived and so dedicated, can " +
"long endure.";
addAllRemoveAllHelper(pqueue, message);
}
// Tests the priority queue when using a Comparator for ordering.
private static void testComparator() {
System.out.println("Comparator (reverse ABC order):");
Comparator<String> comp = Collections.reverseOrder();
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>(15, comp);
String message = "the quick brown fox jumps over the very lazy bad dog";
addAllRemoveAllHelper(pqueue, message);
System.out.println("Comparator (order by length):");
Comparator<String> comp2 = new StringLengthComparator();
PriorityQueue<String> pqueue2 = new HeapPriorityQueue<String>(5, comp2);
String message2 = "our fathers brought forth on this continent " +
"a new nation conceived in liberty";
addAllRemoveAllHelper(pqueue2, message2);
}
// helper method that breaks a string into words, adds all the words,
// printing the priority queue each time, then removes them all until
// the priority queue is empty
private static void addAllRemoveAllHelper(PriorityQueue<String> pqueue, String message) {
String[] words = message.split("[ .,]+");
for (int i = 0; i < words.length; i++) {
String word = words[i].toLowerCase();
pqueue.add(word);
System.out.println("after adding " + word + ": " + pqueue
+ " (size " + pqueue.size() + ")");
}
System.out.println("Removing all words until empty:");
while (!pqueue.isEmpty()) {
System.out.print(pqueue.remove() + " ");
}
System.out.println();
System.out.println();
}
// Tests the iterator method and the for-each loop (Iterable).
private static void testIteratorIterable() {
System.out.println("iterator:");
PriorityQueue<String> pqueue = new HeapPriorityQueue<String>();
pqueue.add("four");
pqueue.add("score");
pqueue.add("and");
pqueue.add("seven");
pqueue.add("years");
pqueue.add("ago");
Iterator<String> itr = pqueue.iterator();
while (itr.hasNext()) {
System.out.println("iterator next() = " + itr.next());
}
System.out.println();
System.out.println("Iterable:");
for (String s : pqueue) {
System.out.println("foreach loop value = " + s);
}
System.out.println();
}
// A helper class that orders strings by length, breaking ties by ABC order.
private static class StringLengthComparator implements Comparator<String> {
// Returns > 0 if s1 is longer than s2, < 0 if shorter; if their
// lengths are the same, returns the natural ABC ordering.
public int compare(String s1, String s2) {
int l1 = s1.length();
int l2 = s2.length();
if (l1 != l2) {
return l1 - l2;
} else {
return s1.compareTo(s2);
}
}
}
}
|
|
package pb.gizmos;
import java.util.ArrayList;
import java.util.List;
import pb.board.MobileGizmo;
import pb.board.Shape;
import pb.board.SolidGizmo;
import physics.Circle;
import physics.Vect;
public class Absorber extends SolidGizmo {
/** The absorber's top-left corner. */
private final Vect origin;
/** The absorber's width. */
private final double width;
/** The absorber's height. */
private final double height;
/** The absorber's shape. */
private final Shape shape;
/** The balls absorbed by this absorber. */
private final List<Ball> absorbed;
/**
* The ball that is leaving this absorber.
*
* This is null if the most recently launched ball has already left the
* absorber.
*/
private Ball leavingBall;
/**
* Creates an absorber with given position and dimensions.
*
* @param name the absorber's name
* @param x the X coordinate of the absorber's origin (top-left corner)
* @param y the Y coordinate of the absorber's origin (top-left corner)
* @param width the absorber's width; must be greater or equal to 1
* @param height the absorber's height; must be greater or equal to 1
*/
public Absorber(String name, double x, double y, double width,
double height) {
super(name);
assert width >= 1 && height >= 1;
this.origin = new Vect(x, y);
this.width = width;
this.height = height;
Vect[] corners = new Vect[] {
new Vect(x, y),
new Vect(x + width, y),
new Vect(x + width, y + height),
new Vect(x, y + height)
};
this.shape = new Shape(corners);
this.absorbed = new ArrayList<Ball>();
this.leavingBall = null;
}
/**
* The position where a ball will be launched.
*
* When an absorber is triggered, it launches a ball. This returns the
* ball's center when is launched.
*
* @param radius the ball's radius
* @return the ball's center when it will be launched by the absorber
*/
public Vect ballLaunchCenter(double radius) {
return new Vect(origin.x() + width - radius,
origin.y() + height - radius);
}
/**
* The absorber's shape.
*
* @return the absorber's shape
*/
public Shape getShape() {
return shape;
}
/**
* The coordinates of the absorber's origin (top-left corner).
*
* @return the coordinates of the absorber's origin (top-left corner)
*/
public Vect getOrigin() {
return origin;
}
/**
* The absorber's width, in L units.
*
* @return the absorber's width, in L units
*/
public double getWidth() {
return width;
}
/**
* The absorber's height.
*
* @return the absorber's height, in L units
*/
public double getHeight() {
return height;
}
/**
* The ball that is leaving this absorber.
*
* @return the last ball launched by the absorber, if the ball hasn't left
* the absorber's bounds; null if the most recently launched ball has left
* the absorber
*/
public Ball getLeavingBall() {
return leavingBall;
}
/**
* True if the given circle overlaps the absorber.
*
* @param circle the circle to be tested for overlaps
* @return true if the given circle overlaps the absorber, false if the
* shapes are completely disjoint
*/
public boolean isInside(Circle circle) {
Vect center = circle.getCenter();
double x = center.x();
double y = center.y();
double r = circle.getRadius();
if (x + r < origin.x())
return false;
if (y + r < origin.y())
return false;
if (x - r >= origin.x() + width)
return false;
if (y - r >= origin.y() + height)
return false;
return true;
}
/** The velocity of a ball launched when an absorber is triggered. */
public static final Vect LAUNCH_VELOCITY = new Vect(0, -50);
@Override
public double timeToCollision(MobileGizmo other) {
if (other instanceof Ball) {
Ball ball = (Ball)other;
if (ball == leavingBall)
return Double.MAX_VALUE;
return shape.timeUntilBallCollision(ball.getShape(),
ball.getVelocity());
}
throw new UnsupportedOperationException("Unsupported MobileGizmo");
}
@Override
public void collide(MobileGizmo other) {
if (other instanceof Ball) {
Ball ball = (Ball)other;
ball.setVelocity(Absorber.LAUNCH_VELOCITY);
ball.setCenter(ballLaunchCenter(ball.getShape().getRadius()));
absorbed.add(ball);
board().remove(ball);
trigger();
assert checkRep();
return;
}
throw new IllegalArgumentException(
"Absorbers can only collide with balls");
}
@Override
public void advanceTime(double timeStep) {
// NOTE: absorbers don't move, but we take this opportunity to observe
// the ball that is leaving the absorber
if (leavingBall == null)
return;
Circle circle = leavingBall.getShape();
if (!isInside(circle)) {
// The ball left the absorber.
leavingBall = null;
}
}
@Override
public void doAction() {
if (leavingBall != null)
return;
if (absorbed.isEmpty())
return;
Ball ball = absorbed.remove(absorbed.size() - 1);
board().add(ball);
leavingBall = ball;
}
}
|
|
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.base;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Arrays.asList;
import static java.util.Collections.unmodifiableList;
import com.google.common.annotations.Beta;
import com.google.common.annotations.VisibleForTesting;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.annotation.Nullable;
/**
* Static utility methods pertaining to instances of {@link Throwable}.
*
* <p>See the Guava User Guide entry on <a href=
* "http://code.google.com/p/guava-libraries/wiki/ThrowablesExplained">Throwables</a>.
*
* @author Kevin Bourrillion
* @author Ben Yu
* @since 1.0
*/
public final class Throwables {
private Throwables() {}
/**
* Propagates {@code throwable} exactly as-is, if and only if it is an instance of {@code
* declaredType}. Example usage:
* <pre>
* try {
* someMethodThatCouldThrowAnything();
* } catch (IKnowWhatToDoWithThisException e) {
* handle(e);
* } catch (Throwable t) {
* Throwables.propagateIfInstanceOf(t, IOException.class);
* Throwables.propagateIfInstanceOf(t, SQLException.class);
* throw Throwables.propagate(t);
* }
* </pre>
*/
public static <X extends Throwable> void propagateIfInstanceOf(
@Nullable Throwable throwable, Class<X> declaredType) throws X {
// Check for null is needed to avoid frequent JNI calls to isInstance().
if (throwable != null && declaredType.isInstance(throwable)) {
throw declaredType.cast(throwable);
}
}
/**
* Propagates {@code throwable} exactly as-is, if and only if it is an instance of {@link
* RuntimeException} or {@link Error}. Example usage:
* <pre>
* try {
* someMethodThatCouldThrowAnything();
* } catch (IKnowWhatToDoWithThisException e) {
* handle(e);
* } catch (Throwable t) {
* Throwables.propagateIfPossible(t);
* throw new RuntimeException("unexpected", t);
* }
* </pre>
*/
public static void propagateIfPossible(@Nullable Throwable throwable) {
propagateIfInstanceOf(throwable, Error.class);
propagateIfInstanceOf(throwable, RuntimeException.class);
}
/**
* Propagates {@code throwable} exactly as-is, if and only if it is an instance of {@link
* RuntimeException}, {@link Error}, or {@code declaredType}. Example usage:
* <pre>
* try {
* someMethodThatCouldThrowAnything();
* } catch (IKnowWhatToDoWithThisException e) {
* handle(e);
* } catch (Throwable t) {
* Throwables.propagateIfPossible(t, OtherException.class);
* throw new RuntimeException("unexpected", t);
* }
* </pre>
*
* @param throwable the Throwable to possibly propagate
* @param declaredType the single checked exception type declared by the calling method
*/
public static <X extends Throwable> void propagateIfPossible(
@Nullable Throwable throwable, Class<X> declaredType) throws X {
propagateIfInstanceOf(throwable, declaredType);
propagateIfPossible(throwable);
}
/**
* Propagates {@code throwable} exactly as-is, if and only if it is an instance of {@link
* RuntimeException}, {@link Error}, {@code declaredType1}, or {@code declaredType2}. In the
* unlikely case that you have three or more declared checked exception types, you can handle them
* all by invoking these methods repeatedly. See usage example in {@link
* #propagateIfPossible(Throwable, Class)}.
*
* @param throwable the Throwable to possibly propagate
* @param declaredType1 any checked exception type declared by the calling method
* @param declaredType2 any other checked exception type declared by the calling method
*/
public static <X1 extends Throwable, X2 extends Throwable>
void propagateIfPossible(@Nullable Throwable throwable,
Class<X1> declaredType1, Class<X2> declaredType2) throws X1, X2 {
checkNotNull(declaredType2);
propagateIfInstanceOf(throwable, declaredType1);
propagateIfPossible(throwable, declaredType2);
}
/**
* Propagates {@code throwable} as-is if it is an instance of {@link RuntimeException} or {@link
* Error}, or else as a last resort, wraps it in a {@code RuntimeException} and then propagates.
* <p>
* This method always throws an exception. The {@code RuntimeException} return type is only for
* client code to make Java type system happy in case a return value is required by the enclosing
* method. Example usage:
* <pre>
* T doSomething() {
* try {
* return someMethodThatCouldThrowAnything();
* } catch (IKnowWhatToDoWithThisException e) {
* return handle(e);
* } catch (Throwable t) {
* throw Throwables.propagate(t);
* }
* }
* </pre>
*
* @param throwable the Throwable to propagate
* @return nothing will ever be returned; this return type is only for your convenience, as
* illustrated in the example above
*/
public static RuntimeException propagate(Throwable throwable) {
propagateIfPossible(checkNotNull(throwable));
throw new RuntimeException(throwable);
}
/**
* Returns the innermost cause of {@code throwable}. The first throwable in a
* chain provides context from when the error or exception was initially
* detected. Example usage:
* <pre>
* assertEquals("Unable to assign a customer id", Throwables.getRootCause(e).getMessage());
* </pre>
*/
public static Throwable getRootCause(Throwable throwable) {
Throwable cause;
while ((cause = throwable.getCause()) != null) {
throwable = cause;
}
return throwable;
}
/**
* Gets a {@code Throwable} cause chain as a list. The first entry in the list will be {@code
* throwable} followed by its cause hierarchy. Note that this is a snapshot of the cause chain
* and will not reflect any subsequent changes to the cause chain.
*
* <p>Here's an example of how it can be used to find specific types of exceptions in the cause
* chain:
*
* <pre>
* Iterables.filter(Throwables.getCausalChain(e), IOException.class));
* </pre>
*
* @param throwable the non-null {@code Throwable} to extract causes from
* @return an unmodifiable list containing the cause chain starting with {@code throwable}
*/
@Beta // TODO(kevinb): decide best return type
public static List<Throwable> getCausalChain(Throwable throwable) {
checkNotNull(throwable);
List<Throwable> causes = new ArrayList<Throwable>(4);
while (throwable != null) {
causes.add(throwable);
throwable = throwable.getCause();
}
return Collections.unmodifiableList(causes);
}
/**
* Returns a string containing the result of {@link Throwable#toString() toString()}, followed by
* the full, recursive stack trace of {@code throwable}. Note that you probably should not be
* parsing the resulting string; if you need programmatic access to the stack frames, you can call
* {@link Throwable#getStackTrace()}.
*/
public static String getStackTraceAsString(Throwable throwable) {
StringWriter stringWriter = new StringWriter();
throwable.printStackTrace(new PrintWriter(stringWriter));
return stringWriter.toString();
}
/**
* Returns the stack trace of {@code throwable}, possibly providing slower iteration over the full
* trace but faster iteration over parts of the trace. Here, "slower" and "faster" are defined in
* comparison to the normal way to access the stack trace, {@link Throwable#getStackTrace()
* throwable.getStackTrace()}. Note, however, that this method's special implementation is not
* available for all platforms and configurations. If that implementation is unavailable, this
* method falls back to {@code getStackTrace}. Callers that require the special implementation can
* check its availability with {@link #lazyStackTraceIsLazy()}.
*
* <p>The expected (but not guaranteed) performance of the special implementation differs from
* {@code getStackTrace} in one main way: The {@code lazyStackTrace} call itself returns quickly
* by delaying the per-stack-frame work until each element is accessed. Roughly speaking:
*
* <ul>
* <li>{@code getStackTrace} takes {@code stackSize} time to return but then negligible time to
* retrieve each element of the returned list.
* <li>{@code lazyStackTrace} takes negligible time to return but then {@code 1/stackSize} time to
* retrieve each element of the returned list (probably slightly more than {@code 1/stackSize}).
* </ul>
*
* <p>Note: The special implementation does not respect calls to {@link Throwable#setStackTrace
* throwable.setStackTrace}. Instead, it always reflects the original stack trace from the
* exception's creation.
*
* @since 19.0
*/
// TODO(cpovirk): Say something about the possibility that List access could fail at runtime?
@Beta
public static List<StackTraceElement> lazyStackTrace(Throwable throwable) {
return lazyStackTraceIsLazy()
? jlaStackTrace(throwable)
: unmodifiableList(asList(throwable.getStackTrace()));
}
/**
* Returns whether {@link #lazyStackTrace} will use the special implementation described in its
* documentation.
*
* @since 19.0
*/
@Beta
public static boolean lazyStackTraceIsLazy() {
return getStackTraceElementMethod != null & getStackTraceDepthMethod != null;
}
private static List<StackTraceElement> jlaStackTrace(final Throwable t) {
checkNotNull(t);
/*
* TODO(cpovirk): Consider optimizing iterator() to catch IOOBE instead of doing bounds checks.
*
* TODO(cpovirk): Consider the UnsignedBytes pattern if it performs faster and doesn't cause
* AOSP grief.
*/
return new AbstractList<StackTraceElement>() {
@Override
public StackTraceElement get(int n) {
return (StackTraceElement)
invokeAccessibleNonThrowingMethod(getStackTraceElementMethod, jla, t, n);
}
@Override
public int size() {
return (Integer) invokeAccessibleNonThrowingMethod(getStackTraceDepthMethod, jla, t);
}
};
}
private static Object invokeAccessibleNonThrowingMethod(
Method method, Object receiver, Object... params) {
try {
return method.invoke(receiver, params);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
throw propagate(e.getCause());
}
}
/** JavaLangAccess class name to load using reflection */
private static final String JAVA_LANG_ACCESS_CLASSNAME = "sun.misc.JavaLangAccess";
/** SharedSecrets class name to load using reflection */
@VisibleForTesting
static final String SHARED_SECRETS_CLASSNAME = "sun.misc.SharedSecrets";
/** Access to some fancy internal JVM internals. */
@Nullable
private static final Object jla = getJLA();
/**
* The "getStackTraceElementMethod" method, only available on some JDKs so we use reflection to
* find it when available. When this is null, use the slow way.
*/
@Nullable
private static final Method getStackTraceElementMethod = (jla == null) ? null : getGetMethod();
/**
* The "getStackTraceDepth" method, only available on some JDKs so we use reflection to find it
* when available. When this is null, use the slow way.
*/
@Nullable
private static final Method getStackTraceDepthMethod = (jla == null) ? null : getSizeMethod();
/**
* Returns the JavaLangAccess class that is present in all Sun JDKs. It is not whitelisted for
* AppEngine, and not present in non-Sun JDKs.
*/
@Nullable
private static Object getJLA() {
try {
/*
* We load sun.misc.* classes using reflection since Android doesn't support these classes and
* would result in compilation failure if we directly refer to these classes.
*/
Class<?> sharedSecrets = Class.forName(SHARED_SECRETS_CLASSNAME, false, null);
Method langAccess = sharedSecrets.getMethod("getJavaLangAccess");
return langAccess.invoke(null);
} catch (ThreadDeath death) {
throw death;
} catch (Throwable t) {
/*
* This is not one of AppEngine's whitelisted classes, so even in Sun JDKs, this can fail with
* a NoClassDefFoundError. Other apps might deny access to sun.misc packages.
*/
return null;
}
}
/**
* Returns the Method that can be used to resolve an individual StackTraceElement, or null if that
* method cannot be found (it is only to be found in fairly recent JDKs).
*/
@Nullable
private static Method getGetMethod() {
return getJlaMethod("getStackTraceElement", Throwable.class, int.class);
}
/**
* Returns the Method that can be used to return the size of a stack, or null if that method
* cannot be found (it is only to be found in fairly recent JDKs).
*/
@Nullable
private static Method getSizeMethod() {
return getJlaMethod("getStackTraceDepth", Throwable.class);
}
@Nullable
private static Method getJlaMethod(String name, Class<?>... parameterTypes) throws ThreadDeath {
try {
return Class.forName(JAVA_LANG_ACCESS_CLASSNAME, false, null).getMethod(name, parameterTypes);
} catch (ThreadDeath death) {
throw death;
} catch (Throwable t) {
/*
* Either the JavaLangAccess class itself is not found, or the method is not supported on the
* JVM.
*/
return null;
}
}
}
|
|
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.haskell;
import com.facebook.buck.cxx.CxxHeadersDir;
import com.facebook.buck.cxx.CxxPreprocessables;
import com.facebook.buck.cxx.CxxPreprocessorDep;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.linker.Linker;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkableInput;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.CommonDescriptionArg;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.HasDeclaredDeps;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.util.RichStream;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.facebook.buck.versions.VersionPropagator;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import org.immutables.value.Value;
public class HaskellPrebuiltLibraryDescription
implements Description<HaskellPrebuiltLibraryDescriptionArg>,
VersionPropagator<HaskellPrebuiltLibraryDescriptionArg> {
@Override
public Class<HaskellPrebuiltLibraryDescriptionArg> getConstructorArgType() {
return HaskellPrebuiltLibraryDescriptionArg.class;
}
@Override
public BuildRule createBuildRule(
TargetGraph targetGraph,
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
BuildRuleResolver resolver,
CellPathResolver cellRoots,
final HaskellPrebuiltLibraryDescriptionArg args) {
return new PrebuiltHaskellLibrary(buildTarget, projectFilesystem, params) {
private final LoadingCache<CxxPlatform, ImmutableMap<BuildTarget, CxxPreprocessorInput>>
transitiveCxxPreprocessorInputCache =
CxxPreprocessables.getTransitiveCxxPreprocessorInputCache(this);
@Override
public Iterable<BuildRule> getCompileDeps(HaskellPlatform platform) {
return RichStream.from(args.getDeps())
.map(resolver::getRule)
.filter(HaskellCompileDep.class::isInstance)
.toImmutableList();
}
@Override
public HaskellCompileInput getCompileInput(
HaskellPlatform platform, Linker.LinkableDepType depType, boolean hsProfile) {
// Build the package.
HaskellPackage.Builder pkgBuilder =
HaskellPackage.builder()
.setInfo(
HaskellPackageInfo.of(
getBuildTarget().getShortName(), args.getVersion(), args.getId()))
.setPackageDb(args.getDb())
.addAllInterfaces(args.getImportDirs());
if (Linker.LinkableDepType.SHARED == depType) {
pkgBuilder.addAllLibraries(args.getSharedLibs().values());
} else {
pkgBuilder.addAllLibraries(args.getStaticLibs());
// If profiling is enabled, we also include their libs in the same package.
if (args.isEnableProfiling() || hsProfile) {
pkgBuilder.addAllLibraries(args.getProfiledStaticLibs());
}
}
HaskellPackage pkg = pkgBuilder.build();
return HaskellCompileInput.builder()
.addAllFlags(args.getExportedCompilerFlags())
.addPackages(pkg)
.build();
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkableDeps() {
return ImmutableList.of();
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkableExportedDeps() {
return FluentIterable.from(getDeclaredDeps()).filter(NativeLinkable.class);
}
@Override
public NativeLinkableInput getNativeLinkableInput(
CxxPlatform cxxPlatform,
Linker.LinkableDepType type,
boolean forceLinkWhole,
ImmutableSet<LanguageExtensions> languageExtensions) {
NativeLinkableInput.Builder builder = NativeLinkableInput.builder();
builder.addAllArgs(StringArg.from(args.getExportedLinkerFlags()));
if (type == Linker.LinkableDepType.SHARED) {
builder.addAllArgs(SourcePathArg.from(args.getSharedLibs().values()));
} else {
Linker linker = cxxPlatform.getLd().resolve(resolver);
ImmutableList<Arg> libArgs =
SourcePathArg.from(
args.isEnableProfiling() ? args.getProfiledStaticLibs() : args.getStaticLibs());
if (forceLinkWhole) {
libArgs =
RichStream.from(libArgs)
.flatMap(lib -> RichStream.from(linker.linkWhole(lib)))
.toImmutableList();
}
builder.addAllArgs(libArgs);
}
return builder.build();
}
@Override
public Linkage getPreferredLinkage(CxxPlatform cxxPlatform) {
return Linkage.ANY;
}
@Override
public ImmutableMap<String, SourcePath> getSharedLibraries(CxxPlatform cxxPlatform) {
return args.getSharedLibs();
}
@Override
public Iterable<CxxPreprocessorDep> getCxxPreprocessorDeps(CxxPlatform cxxPlatform) {
return FluentIterable.from(getBuildDeps()).filter(CxxPreprocessorDep.class);
}
@Override
public CxxPreprocessorInput getCxxPreprocessorInput(CxxPlatform cxxPlatform) {
CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder();
for (SourcePath headerDir : args.getCxxHeaderDirs()) {
builder.addIncludes(CxxHeadersDir.of(CxxPreprocessables.IncludeType.SYSTEM, headerDir));
}
return builder.build();
}
@Override
public ImmutableMap<BuildTarget, CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
CxxPlatform cxxPlatform) {
return transitiveCxxPreprocessorInputCache.getUnchecked(cxxPlatform);
}
};
}
@BuckStyleImmutable
@Value.Immutable
interface AbstractHaskellPrebuiltLibraryDescriptionArg
extends CommonDescriptionArg, HasDeclaredDeps {
String getVersion();
@Value.Default
default String getId() {
return String.format("%s-%s", getName(), getVersion());
}
SourcePath getDb();
ImmutableList<SourcePath> getImportDirs();
ImmutableList<SourcePath> getStaticLibs();
ImmutableList<SourcePath> getProfiledStaticLibs();
ImmutableMap<String, SourcePath> getSharedLibs();
ImmutableList<String> getExportedLinkerFlags();
ImmutableList<String> getExportedCompilerFlags();
@Value.NaturalOrder
ImmutableSortedSet<SourcePath> getCxxHeaderDirs();
@Value.Default
default boolean isEnableProfiling() {
return false;
}
}
}
|
|
/*
* Copyright (c) 2004-2008 QOS.ch
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.slf4j.impl;
import org.apache.commons.logging.Log;
import org.slf4j.Logger;
import org.slf4j.helpers.FormattingTuple;
import org.slf4j.helpers.MarkerIgnoringBase;
import org.slf4j.helpers.MessageFormatter;
/**
* A wrapper over {@link org.apache.commons.logging.Log
* org.apache.commons.logging.Log} in conformance with the {@link Logger}
* interface.
*
* @author Ceki Gülcü
*/
public final class JCLLoggerAdapter extends MarkerIgnoringBase {
private static final long serialVersionUID = 4141593417490482209L;
final Log log;
// WARN: JCLLoggerAdapter constructor should have only package access so
// that only JCLLoggerFactory be able to create one.
JCLLoggerAdapter(Log log, String name) {
this.log = log;
this.name = name;
}
/**
* Delegates to the {@link Log#isTraceEnabled} method of the underlying
* {@link Log} instance.
*/
public boolean isTraceEnabled() {
return log.isTraceEnabled();
}
//
/**
* Delegates to the {@link Log#trace(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* @param msg - the message object to be logged
*/
public void trace(String msg) {
log.trace(msg);
}
/**
* Delegates to the {@link Log#trace(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level TRACE.
* </p>
*
* @param format
* the format string
* @param arg
* the argument
*/
public void trace(String format, Object arg) {
if (log.isDebugEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg);
log.trace(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#trace(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level TRACE.
* </p>
*
* @param format
* the format string
* @param arg1
* the first argument
* @param arg2
* the second argument
*/
public void trace(String format, Object arg1, Object arg2) {
if (log.isDebugEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg1, arg2);
log.trace(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#trace(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level TRACE.
* </p>
*
* @param format the format string
* @param argArray an array of arguments
*/
public void trace(String format, Object[] argArray) {
if (log.isDebugEnabled()) {
FormattingTuple ft = MessageFormatter.arrayFormat(format, argArray);
log.trace(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#trace(java.lang.Object, java.lang.Throwable)} method of
* the underlying {@link Log} instance.
*
* @param msg
* the message accompanying the exception
* @param t
* the exception (throwable) to log
*/
public void trace(String msg, Throwable t) {
log.trace(msg, t);
}
/**
* Delegates to the {@link Log#isDebugEnabled} method of the underlying
* {@link Log} instance.
*/
public boolean isDebugEnabled() {
return log.isDebugEnabled();
}
//
/**
* Delegates to the {@link Log#debug(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* @param msg - the message object to be logged
*/
public void debug(String msg) {
log.debug(msg);
}
/**
* Delegates to the {@link Log#debug(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level DEBUG.
* </p>
*
* @param format
* the format string
* @param arg
* the argument
*/
public void debug(String format, Object arg) {
if (log.isDebugEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg);
log.debug(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#debug(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level DEBUG.
* </p>
*
* @param format
* the format string
* @param arg1
* the first argument
* @param arg2
* the second argument
*/
public void debug(String format, Object arg1, Object arg2) {
if (log.isDebugEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg1, arg2);
log.debug(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#debug(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level DEBUG.
* </p>
*
* @param format the format string
* @param argArray an array of arguments
*/
public void debug(String format, Object[] argArray) {
if (log.isDebugEnabled()) {
FormattingTuple ft = MessageFormatter.arrayFormat(format, argArray);
log.debug(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#debug(java.lang.Object, java.lang.Throwable)} method of
* the underlying {@link Log} instance.
*
* @param msg
* the message accompanying the exception
* @param t
* the exception (throwable) to log
*/
public void debug(String msg, Throwable t) {
log.debug(msg, t);
}
/**
* Delegates to the {@link Log#isInfoEnabled} method of the underlying
* {@link Log} instance.
*/
public boolean isInfoEnabled() {
return log.isInfoEnabled();
}
/**
* Delegates to the {@link Log#debug(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* @param msg - the message object to be logged
*/
public void info(String msg) {
log.info(msg);
}
/**
* Delegates to the {@link Log#info(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level INFO.
* </p>
*
* @param format
* the format string
* @param arg
* the argument
*/
public void info(String format, Object arg) {
if (log.isInfoEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg);
log.info(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#info(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level INFO.
* </p>
*
* @param format
* the format string
* @param arg1
* the first argument
* @param arg2
* the second argument
*/
public void info(String format, Object arg1, Object arg2) {
if (log.isInfoEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg1, arg2);
log.info(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#info(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level INFO.
* </p>
*
* @param format the format string
* @param argArray an array of arguments
*/
public void info(String format, Object[] argArray) {
if (log.isInfoEnabled()) {
FormattingTuple ft = MessageFormatter.arrayFormat(format, argArray);
log.info(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#info(java.lang.Object, java.lang.Throwable)} method of
* the underlying {@link Log} instance.
*
* @param msg
* the message accompanying the exception
* @param t
* the exception (throwable) to log
*/
public void info(String msg, Throwable t) {
log.info(msg, t);
}
/**
* Delegates to the {@link Log#isWarnEnabled} method of the underlying
* {@link Log} instance.
*/
public boolean isWarnEnabled() {
return log.isWarnEnabled();
}
/**
* Delegates to the {@link Log#warn(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* @param msg - the message object to be logged
*/
public void warn(String msg) {
log.warn(msg);
}
/**
* Delegates to the {@link Log#warn(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level WARN.
* </p>
*
* @param format
* the format string
* @param arg
* the argument
*/
public void warn(String format, Object arg) {
if (log.isWarnEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg);
log.warn(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#warn(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level WARN.
* </p>
*
* @param format
* the format string
* @param arg1
* the first argument
* @param arg2
* the second argument
*/
public void warn(String format, Object arg1, Object arg2) {
if (log.isWarnEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg1, arg2);
log.warn(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#warn(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level WARN.
* </p>
*
* @param format the format string
* @param argArray an array of arguments
*/
public void warn(String format, Object[] argArray) {
if (log.isWarnEnabled()) {
FormattingTuple ft = MessageFormatter.arrayFormat(format, argArray);
log.warn(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#warn(java.lang.Object, java.lang.Throwable)} method of
* the underlying {@link Log} instance.
*
* @param msg
* the message accompanying the exception
* @param t
* the exception (throwable) to log
*/
public void warn(String msg, Throwable t) {
log.warn(msg, t);
}
/**
* Delegates to the {@link Log#isErrorEnabled} method of the underlying
* {@link Log} instance.
*/
public boolean isErrorEnabled() {
return log.isErrorEnabled();
}
/**
* Delegates to the {@link Log#error(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* @param msg - the message object to be logged
*/
public void error(String msg) {
log.error(msg);
}
/**
* Delegates to the {@link Log#error(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level ERROR.
* </p>
*
* @param format
* the format string
* @param arg
* the argument
*/
public void error(String format, Object arg) {
if (log.isErrorEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg);
log.error(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#error(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level ERROR.
* </p>
*
* @param format
* the format string
* @param arg1
* the first argument
* @param arg2
* the second argument
*/
public void error(String format, Object arg1, Object arg2) {
if (log.isErrorEnabled()) {
FormattingTuple ft = MessageFormatter.format(format, arg1, arg2);
log.error(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#error(java.lang.Object)} method of the underlying
* {@link Log} instance.
*
* <p>
* However, this form avoids superfluous object creation when the logger is disabled
* for level ERROR.
* </p>
*
* @param format the format string
* @param argArray an array of arguments
*/
public void error(String format, Object[] argArray) {
if (log.isErrorEnabled()) {
FormattingTuple ft = MessageFormatter.arrayFormat(format, argArray);
log.error(ft.getMessage(), ft.getThrowable());
}
}
/**
* Delegates to the {@link Log#error(java.lang.Object, java.lang.Throwable)} method of
* the underlying {@link Log} instance.
*
* @param msg
* the message accompanying the exception
* @param t
* the exception (throwable) to log
*/
public void error(String msg, Throwable t) {
log.error(msg, t);
}
}
|
|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.topic.impl;
import com.hazelcast.cluster.Address;
import com.hazelcast.cluster.impl.MemberImpl;
import com.hazelcast.config.Config;
import com.hazelcast.config.TopicConfig;
import com.hazelcast.internal.cluster.ClusterService;
import com.hazelcast.internal.metrics.DynamicMetricsProvider;
import com.hazelcast.internal.metrics.MetricDescriptor;
import com.hazelcast.internal.metrics.MetricsCollectionContext;
import com.hazelcast.internal.monitor.impl.LocalTopicStatsImpl;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.services.ManagedService;
import com.hazelcast.internal.services.RemoteService;
import com.hazelcast.internal.services.StatisticsAwareService;
import com.hazelcast.internal.util.ConstructorFunction;
import com.hazelcast.internal.util.HashUtil;
import com.hazelcast.internal.util.MapUtil;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.eventservice.EventPublishingService;
import com.hazelcast.spi.impl.eventservice.EventRegistration;
import com.hazelcast.spi.impl.eventservice.EventService;
import com.hazelcast.spi.properties.ClusterProperty;
import com.hazelcast.topic.ITopic;
import com.hazelcast.topic.LocalTopicStats;
import com.hazelcast.topic.Message;
import com.hazelcast.topic.MessageListener;
import javax.annotation.Nonnull;
import java.util.Collection;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import static com.hazelcast.internal.metrics.MetricDescriptorConstants.TOPIC_PREFIX;
import static com.hazelcast.internal.metrics.impl.ProviderHelper.provide;
import static com.hazelcast.internal.util.ConcurrencyUtil.CALLER_RUNS;
import static com.hazelcast.internal.util.ConcurrencyUtil.getOrPutSynchronized;
public class TopicService implements ManagedService, RemoteService, EventPublishingService,
StatisticsAwareService<LocalTopicStats>, DynamicMetricsProvider {
public static final String SERVICE_NAME = "hz:impl:topicService";
public static final int ORDERING_LOCKS_LENGTH = 1000;
private final ConcurrentMap<String, LocalTopicStatsImpl> statsMap = new ConcurrentHashMap<>();
private final Lock[] orderingLocks = new Lock[ORDERING_LOCKS_LENGTH];
private NodeEngine nodeEngine;
private final ConstructorFunction<String, LocalTopicStatsImpl> localTopicStatsConstructorFunction =
mapName -> new LocalTopicStatsImpl();
private EventService eventService;
private final AtomicInteger counter = new AtomicInteger(0);
private Address localAddress;
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
this.nodeEngine = nodeEngine;
this.localAddress = nodeEngine.getThisAddress();
for (int i = 0; i < orderingLocks.length; i++) {
orderingLocks[i] = new ReentrantLock();
}
eventService = nodeEngine.getEventService();
boolean dsMetricsEnabled = nodeEngine.getProperties().getBoolean(ClusterProperty.METRICS_DATASTRUCTURES);
if (dsMetricsEnabled) {
((NodeEngineImpl) nodeEngine).getMetricsRegistry().registerDynamicMetricsProvider(this);
}
}
// only for testing
public ConcurrentMap<String, LocalTopicStatsImpl> getStatsMap() {
return statsMap;
}
@Override
public void reset() {
statsMap.clear();
}
@Override
public void shutdown(boolean terminate) {
reset();
}
public Lock getOrderLock(String key) {
int index = getOrderLockIndex(key);
return orderingLocks[index];
}
private int getOrderLockIndex(String key) {
int hash = key.hashCode();
return HashUtil.hashToIndex(hash, orderingLocks.length);
}
@Override
public ITopic createDistributedObject(String name, UUID source, boolean local) {
TopicConfig topicConfig = nodeEngine.getConfig().findTopicConfig(name);
if (topicConfig.isGlobalOrderingEnabled()) {
return new TotalOrderedTopicProxy(name, nodeEngine, this);
} else {
return new TopicProxy(name, nodeEngine, this);
}
}
@Override
public void destroyDistributedObject(String objectId, boolean local) {
statsMap.remove(objectId);
nodeEngine.getEventService().deregisterAllListeners(SERVICE_NAME, objectId);
}
@Override
public void dispatchEvent(Object event, Object listener) {
TopicEvent topicEvent = (TopicEvent) event;
ClusterService clusterService = nodeEngine.getClusterService();
MemberImpl member = clusterService.getMember(topicEvent.publisherAddress);
if (member == null) {
member = new MemberImpl.Builder(topicEvent.publisherAddress)
.version(nodeEngine.getVersion())
.build();
}
Message message = new DataAwareMessage(topicEvent.name, topicEvent.data, topicEvent.publishTime, member
, nodeEngine.getSerializationService());
incrementReceivedMessages(topicEvent.name);
MessageListener messageListener = (MessageListener) listener;
messageListener.onMessage(message);
}
public LocalTopicStatsImpl getLocalTopicStats(String name) {
return getOrPutSynchronized(statsMap, name, statsMap, localTopicStatsConstructorFunction);
}
/**
* Increments the number of published messages on the ITopic
* with the name {@code topicName}.
*
* @param topicName the name of the {@link ITopic}
*/
public void incrementPublishes(String topicName) {
getLocalTopicStats(topicName).incrementPublishes();
}
/**
* Increments the number of received messages on the ITopic
* with the name {@code topicName}.
*
* @param topicName the name of the {@link ITopic}
*/
public void incrementReceivedMessages(String topicName) {
getLocalTopicStats(topicName).incrementReceives();
}
public void publishMessage(String topicName, Object payload, boolean multithreaded) {
Collection<EventRegistration> registrations = eventService.getRegistrations(SERVICE_NAME, topicName);
if (!registrations.isEmpty()) {
Data payloadData = nodeEngine.toData(payload);
TopicEvent topicEvent = new TopicEvent(topicName, payloadData, localAddress);
int partitionId = multithreaded ? counter.incrementAndGet() : topicName.hashCode();
eventService.publishEvent(SERVICE_NAME, registrations, topicEvent, partitionId);
}
}
public UUID addLocalMessageListener(@Nonnull String name, @Nonnull MessageListener listener) {
EventRegistration registration = eventService.registerLocalListener(TopicService.SERVICE_NAME, name, listener);
if (registration == null) {
return null;
}
return registration.getId();
}
public
UUID addMessageListener(@Nonnull String name, @Nonnull MessageListener listener) {
return eventService.registerListener(TopicService.SERVICE_NAME, name, listener).getId();
}
public
Future<UUID> addMessageListenerAsync(@Nonnull String name, @Nonnull MessageListener listener) {
return eventService.registerListenerAsync(TopicService.SERVICE_NAME, name, listener)
.thenApplyAsync(EventRegistration::getId, CALLER_RUNS);
}
public boolean removeMessageListener(@Nonnull String name, @Nonnull UUID registrationId) {
return eventService.deregisterListener(TopicService.SERVICE_NAME, name, registrationId);
}
public Future<Boolean> removeMessageListenerAsync(@Nonnull String name, @Nonnull UUID registrationId) {
return eventService.deregisterListenerAsync(TopicService.SERVICE_NAME, name, registrationId);
}
@Override
public Map<String, LocalTopicStats> getStats() {
Map<String, LocalTopicStats> topicStats = MapUtil.createHashMap(statsMap.size());
Config config = nodeEngine.getConfig();
for (Map.Entry<String, LocalTopicStatsImpl> statEntry : statsMap.entrySet()) {
String name = statEntry.getKey();
if (config.getTopicConfig(name).isStatisticsEnabled()) {
topicStats.put(name, statEntry.getValue());
}
}
return topicStats;
}
@Override
public void provideDynamicMetrics(MetricDescriptor descriptor, MetricsCollectionContext context) {
provide(descriptor, context, TOPIC_PREFIX, getStats());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.debug.impl;
import java.io.Serializable;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.rmi.RemoteException;
import java.rmi.server.RemoteObject;
import java.rmi.server.UnicastRemoteObject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import freemarker.core.DebugBreak;
import freemarker.core.Environment;
import freemarker.core.TemplateElement;
import freemarker.core._CoreAPI;
import freemarker.debug.Breakpoint;
import freemarker.debug.DebuggerListener;
import freemarker.debug.EnvironmentSuspendedEvent;
import freemarker.template.Template;
import freemarker.template.utility.UndeclaredThrowableException;
/**
* @version $Id
*/
class RmiDebuggerService
extends
DebuggerService {
private final Map templateDebugInfos = new HashMap();
private final HashSet suspendedEnvironments = new HashSet();
private final Map listeners = new HashMap();
private final ReferenceQueue refQueue = new ReferenceQueue();
private final RmiDebuggerImpl debugger;
private DebuggerServer server;
RmiDebuggerService() {
try {
debugger = new RmiDebuggerImpl(this);
server = new DebuggerServer((Serializable) RemoteObject.toStub(debugger));
server.start();
} catch (RemoteException e) {
e.printStackTrace();
throw new UndeclaredThrowableException(e);
}
}
@Override
List getBreakpointsSpi(String templateName) {
synchronized (templateDebugInfos) {
TemplateDebugInfo tdi = findTemplateDebugInfo(templateName);
return tdi == null ? Collections.EMPTY_LIST : tdi.breakpoints;
}
}
List getBreakpointsSpi() {
List sumlist = new ArrayList();
synchronized (templateDebugInfos) {
for (Iterator iter = templateDebugInfos.values().iterator(); iter.hasNext(); ) {
sumlist.addAll(((TemplateDebugInfo) iter.next()).breakpoints);
}
}
Collections.sort(sumlist);
return sumlist;
}
// TODO See in SuppressFBWarnings
@Override
@SuppressFBWarnings(value={ "UW_UNCOND_WAIT", "WA_NOT_IN_LOOP" }, justification="Will have to be re-desigend; postponed.")
boolean suspendEnvironmentSpi(Environment env, String templateName, int line)
throws RemoteException {
RmiDebuggedEnvironmentImpl denv =
(RmiDebuggedEnvironmentImpl)
RmiDebuggedEnvironmentImpl.getCachedWrapperFor(env);
synchronized (suspendedEnvironments) {
suspendedEnvironments.add(denv);
}
try {
EnvironmentSuspendedEvent breakpointEvent =
new EnvironmentSuspendedEvent(this, templateName, line, denv);
synchronized (listeners) {
for (Iterator iter = listeners.values().iterator(); iter.hasNext(); ) {
DebuggerListener listener = (DebuggerListener) iter.next();
listener.environmentSuspended(breakpointEvent);
}
}
synchronized (denv) {
try {
denv.wait();
} catch (InterruptedException e) {
;// Intentionally ignored
}
}
return denv.isStopped();
} finally {
synchronized (suspendedEnvironments) {
suspendedEnvironments.remove(denv);
}
}
}
@Override
void registerTemplateSpi(Template template) {
String templateName = template.getName();
synchronized (templateDebugInfos) {
TemplateDebugInfo tdi = createTemplateDebugInfo(templateName);
tdi.templates.add(new TemplateReference(templateName, template, refQueue));
// Inject already defined breakpoints into the template
for (Iterator iter = tdi.breakpoints.iterator(); iter.hasNext(); ) {
Breakpoint breakpoint = (Breakpoint) iter.next();
insertDebugBreak(template, breakpoint);
}
}
}
Collection getSuspendedEnvironments() {
return (Collection) suspendedEnvironments.clone();
}
Object addDebuggerListener(DebuggerListener listener) {
Object id;
synchronized (listeners) {
id = Long.valueOf(System.currentTimeMillis());
listeners.put(id, listener);
}
return id;
}
void removeDebuggerListener(Object id) {
synchronized (listeners) {
listeners.remove(id);
}
}
void addBreakpoint(Breakpoint breakpoint) {
String templateName = breakpoint.getTemplateName();
synchronized (templateDebugInfos) {
TemplateDebugInfo tdi = createTemplateDebugInfo(templateName);
List breakpoints = tdi.breakpoints;
int pos = Collections.binarySearch(breakpoints, breakpoint);
if (pos < 0) {
// Add to the list of breakpoints
breakpoints.add(-pos - 1, breakpoint);
// Inject the breakpoint into all templates with this name
for (Iterator iter = tdi.templates.iterator(); iter.hasNext(); ) {
TemplateReference ref = (TemplateReference) iter.next();
Template t = ref.getTemplate();
if (t == null) {
iter.remove();
} else {
insertDebugBreak(t, breakpoint);
}
}
}
}
}
private static void insertDebugBreak(Template t, Breakpoint breakpoint) {
TemplateElement te = findTemplateElement(t.getRootTreeNode(), breakpoint.getLine());
if (te == null) {
return;
}
TemplateElement parent = _CoreAPI.getParentElement(te);
DebugBreak db = new DebugBreak(te);
// TODO: Ensure there always is a parent by making sure
// that the root element in the template is always a MixedContent
// Also make sure it doesn't conflict with anyone's code.
parent.setChildAt(parent.getIndex(te), db);
}
private static TemplateElement findTemplateElement(TemplateElement te, int line) {
if (te.getBeginLine() > line || te.getEndLine() < line) {
return null;
}
// Find the narrowest match
List childMatches = new ArrayList();
for (Enumeration children = te.children(); children.hasMoreElements(); ) {
TemplateElement child = (TemplateElement) children.nextElement();
TemplateElement childmatch = findTemplateElement(child, line);
if (childmatch != null) {
childMatches.add(childmatch);
}
}
//find a match that exactly matches the begin/end line
TemplateElement bestMatch = null;
for (int i = 0; i < childMatches.size(); i++) {
TemplateElement e = (TemplateElement) childMatches.get(i);
if ( bestMatch == null ) {
bestMatch = e;
}
if ( e.getBeginLine() == line && e.getEndLine() > line ) {
bestMatch = e;
}
if ( e.getBeginLine() == e.getEndLine() && e.getBeginLine() == line) {
bestMatch = e;
break;
}
}
if ( bestMatch != null) {
return bestMatch;
}
// If no child provides narrower match, return this
return te;
}
private TemplateDebugInfo findTemplateDebugInfo(String templateName) {
processRefQueue();
return (TemplateDebugInfo) templateDebugInfos.get(templateName);
}
private TemplateDebugInfo createTemplateDebugInfo(String templateName) {
TemplateDebugInfo tdi = findTemplateDebugInfo(templateName);
if (tdi == null) {
tdi = new TemplateDebugInfo();
templateDebugInfos.put(templateName, tdi);
}
return tdi;
}
void removeBreakpoint(Breakpoint breakpoint) {
String templateName = breakpoint.getTemplateName();
synchronized (templateDebugInfos) {
TemplateDebugInfo tdi = findTemplateDebugInfo(templateName);
if (tdi != null) {
List breakpoints = tdi.breakpoints;
int pos = Collections.binarySearch(breakpoints, breakpoint);
if (pos >= 0) {
breakpoints.remove(pos);
for (Iterator iter = tdi.templates.iterator(); iter.hasNext(); ) {
TemplateReference ref = (TemplateReference) iter.next();
Template t = ref.getTemplate();
if (t == null) {
iter.remove();
} else {
removeDebugBreak(t, breakpoint);
}
}
}
if (tdi.isEmpty()) {
templateDebugInfos.remove(templateName);
}
}
}
}
private void removeDebugBreak(Template t, Breakpoint breakpoint) {
TemplateElement te = findTemplateElement(t.getRootTreeNode(), breakpoint.getLine());
if (te == null) {
return;
}
DebugBreak db = null;
while (te != null) {
if (te instanceof DebugBreak) {
db = (DebugBreak) te;
break;
}
te = _CoreAPI.getParentElement(te);
}
if (db == null) {
return;
}
TemplateElement parent = _CoreAPI.getParentElement(db);
parent.setChildAt(parent.getIndex(db), _CoreAPI.getChildElement(db, 0));
}
void removeBreakpoints(String templateName) {
synchronized (templateDebugInfos) {
TemplateDebugInfo tdi = findTemplateDebugInfo(templateName);
if (tdi != null) {
removeBreakpoints(tdi);
if (tdi.isEmpty()) {
templateDebugInfos.remove(templateName);
}
}
}
}
void removeBreakpoints() {
synchronized (templateDebugInfos) {
for (Iterator iter = templateDebugInfos.values().iterator(); iter.hasNext(); ) {
TemplateDebugInfo tdi = (TemplateDebugInfo) iter.next();
removeBreakpoints(tdi);
if (tdi.isEmpty()) {
iter.remove();
}
}
}
}
private void removeBreakpoints(TemplateDebugInfo tdi) {
tdi.breakpoints.clear();
for (Iterator iter = tdi.templates.iterator(); iter.hasNext(); ) {
TemplateReference ref = (TemplateReference) iter.next();
Template t = ref.getTemplate();
if (t == null) {
iter.remove();
} else {
removeDebugBreaks(t.getRootTreeNode());
}
}
}
private void removeDebugBreaks(TemplateElement te) {
int count = te.getChildCount();
for (int i = 0; i < count; ++i) {
TemplateElement child = _CoreAPI.getChildElement(te, i);
while (child instanceof DebugBreak) {
TemplateElement dbchild = _CoreAPI.getChildElement(child, 0);
te.setChildAt(i, dbchild);
child = dbchild;
}
removeDebugBreaks(child);
}
}
private static final class TemplateDebugInfo {
final List templates = new ArrayList();
final List breakpoints = new ArrayList();
boolean isEmpty() {
return templates.isEmpty() && breakpoints.isEmpty();
}
}
private static final class TemplateReference extends WeakReference {
final String templateName;
TemplateReference(String templateName, Template template, ReferenceQueue queue) {
super(template, queue);
this.templateName = templateName;
}
Template getTemplate() {
return (Template) get();
}
}
private void processRefQueue() {
for (; ; ) {
TemplateReference ref = (TemplateReference) refQueue.poll();
if (ref == null) {
break;
}
TemplateDebugInfo tdi = findTemplateDebugInfo(ref.templateName);
if (tdi != null) {
tdi.templates.remove(ref);
if (tdi.isEmpty()) {
templateDebugInfos.remove(ref.templateName);
}
}
}
}
@Override
void shutdownSpi() {
server.stop();
try {
UnicastRemoteObject.unexportObject(this.debugger, true);
} catch (Exception e) {
}
RmiDebuggedEnvironmentImpl.cleanup();
}
}
|
|
/*
* Copyright 2014-2021 Lukas Krejci
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.revapi.configuration;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CoderResult;
import java.nio.charset.CodingErrorAction;
/**
* {@link InputStream} implementation that reads a character stream from a {@link Reader} and transforms it to a byte
* stream using a specified charset encoding. The stream is transformed using a {@link CharsetEncoder} object,
* guaranteeing that all charset encodings supported by the JRE are handled correctly. In particular for charsets such
* as UTF-16, the implementation ensures that one and only one byte order marker is produced.
*
* <p>
* Since in general it is not possible to predict the number of characters to be read from the {@link Reader} to satisfy
* a read request on the {@link ReaderInputStream}, all reads from the {@link Reader} are buffered. There is therefore
* no well defined correlation between the current position of the {@link Reader} and that of the
* {@link ReaderInputStream}. This also implies that in general there is no need to wrap the underlying {@link Reader}
* in a {@link java.io.BufferedReader}.
*
* <p>
* {@link ReaderInputStream} implements the inverse transformation of {@link java.io.InputStreamReader}; in the
* following example, reading from <tt>in2</tt> would return the same byte sequence as reading from <tt>in</tt>
* (provided that the initial byte sequence is legal with respect to the charset encoding):
*
* <pre>
* InputStream in = ...
* Charset cs = ...
* InputStreamReader reader = new InputStreamReader(in, cs);
* ReaderInputStream in2 = new ReaderInputStream(reader, cs);
* </pre>
*
* {@link ReaderInputStream} implements the same transformation as {@link java.io.OutputStreamWriter}, except that the
* control flow is reversed: both classes transform a character stream into a byte stream, but
* {@link java.io.OutputStreamWriter} pushes data to the underlying stream, while {@link ReaderInputStream} pulls it
* from the underlying stream.
*
* Note that while there are use cases where there is no alternative to using this class, very often the need to use
* this class is an indication of a flaw in the design of the code. This class is typically used in situations where an
* existing API only accepts an {@link InputStream}, but where the most natural way to produce the data is as a
* character stream, i.e. by providing a {@link Reader} instance. An example of a situation where this problem may
* appear is when implementing the {@link javax.activation.DataSource} interface from the Java Activation Framework.
*
* Given the fact that the {@link Reader} class doesn't provide any way to predict whether the next read operation will
* block or not, it is not possible to provide a meaningful implementation of the {@link InputStream#available()}
* method. A call to this method will always return 0. Also, this class doesn't support {@link InputStream#mark(int)}.
*
* Instances of {@link ReaderInputStream} are not thread safe.
*
* @see org.apache.commons.io.output.WriterOutputStream
*
* @since 2.0
*
* @deprecated remove this once {@link JSONUtil#stripComments(InputStream, Charset)} is removed.
*/
@Deprecated
final class ReaderInputStream extends InputStream {
private static final int DEFAULT_BUFFER_SIZE = 1024;
private final Reader reader;
private final CharsetEncoder encoder;
/**
* CharBuffer used as input for the decoder. It should be reasonably large as we read data from the underlying
* Reader into this buffer.
*/
private final CharBuffer encoderIn;
/**
* ByteBuffer used as output for the decoder. This buffer can be small as it is only used to transfer data from the
* decoder to the buffer provided by the caller.
*/
private final ByteBuffer encoderOut;
private CoderResult lastCoderResult;
private boolean endOfInput;
/**
* Construct a new {@link ReaderInputStream}.
*
* @param reader
* the target {@link Reader}
* @param encoder
* the charset encoder
*
* @since 2.1
*/
public ReaderInputStream(final Reader reader, final CharsetEncoder encoder) {
this(reader, encoder, DEFAULT_BUFFER_SIZE);
}
/**
* Construct a new {@link ReaderInputStream}.
*
* @param reader
* the target {@link Reader}
* @param encoder
* the charset encoder
* @param bufferSize
* the size of the input buffer in number of characters
*
* @since 2.1
*/
public ReaderInputStream(final Reader reader, final CharsetEncoder encoder, final int bufferSize) {
this.reader = reader;
this.encoder = encoder;
this.encoderIn = CharBuffer.allocate(bufferSize);
this.encoderIn.flip();
this.encoderOut = ByteBuffer.allocate(128);
this.encoderOut.flip();
}
/**
* Construct a new {@link ReaderInputStream}.
*
* @param reader
* the target {@link Reader}
* @param charset
* the charset encoding
* @param bufferSize
* the size of the input buffer in number of characters
*/
public ReaderInputStream(final Reader reader, final Charset charset, final int bufferSize) {
this(reader, charset.newEncoder().onMalformedInput(CodingErrorAction.REPLACE)
.onUnmappableCharacter(CodingErrorAction.REPLACE), bufferSize);
}
/**
* Construct a new {@link ReaderInputStream} with a default input buffer size of 1024 characters.
*
* @param reader
* the target {@link Reader}
* @param charset
* the charset encoding
*/
public ReaderInputStream(final Reader reader, final Charset charset) {
this(reader, charset, DEFAULT_BUFFER_SIZE);
}
/**
* Construct a new {@link ReaderInputStream}.
*
* @param reader
* the target {@link Reader}
* @param charsetName
* the name of the charset encoding
* @param bufferSize
* the size of the input buffer in number of characters
*/
public ReaderInputStream(final Reader reader, final String charsetName, final int bufferSize) {
this(reader, Charset.forName(charsetName), bufferSize);
}
/**
* Construct a new {@link ReaderInputStream} with a default input buffer size of 1024 characters.
*
* @param reader
* the target {@link Reader}
* @param charsetName
* the name of the charset encoding
*/
public ReaderInputStream(final Reader reader, final String charsetName) {
this(reader, charsetName, DEFAULT_BUFFER_SIZE);
}
/**
* Construct a new {@link ReaderInputStream} that uses the default character encoding with a default input buffer
* size of 1024 characters.
*
* @param reader
* the target {@link Reader}
*
* @deprecated 2.5 use {@link #ReaderInputStream(Reader, Charset)} instead
*/
@Deprecated
public ReaderInputStream(final Reader reader) {
this(reader, Charset.defaultCharset());
}
/**
* Fills the internal char buffer from the reader.
*
* @throws IOException
* If an I/O error occurs
*/
private void fillBuffer() throws IOException {
if (!endOfInput && (lastCoderResult == null || lastCoderResult.isUnderflow())) {
encoderIn.compact();
final int position = encoderIn.position();
// We don't use Reader#read(CharBuffer) here because it is more efficient
// to write directly to the underlying char array (the default implementation
// copies data to a temporary char array).
final int c = reader.read(encoderIn.array(), position, encoderIn.remaining());
if (c == -1) {
endOfInput = true;
} else {
encoderIn.position(position + c);
}
encoderIn.flip();
}
encoderOut.compact();
lastCoderResult = encoder.encode(encoderIn, encoderOut, endOfInput);
encoderOut.flip();
}
/**
* Read the specified number of bytes into an array.
*
* @param b
* the byte array to read into
* @param off
* the offset to start reading bytes into
* @param len
* the number of bytes to read
*
* @return the number of bytes read or <code>-1</code> if the end of the stream has been reached
*
* @throws IOException
* if an I/O error occurs
*/
@Override
public int read(final byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException("Byte array must not be null");
}
if (len < 0 || off < 0 || (off + len) > b.length) {
throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len);
}
int read = 0;
if (len == 0) {
return 0; // Always return 0 if len == 0
}
while (len > 0) {
if (encoderOut.hasRemaining()) {
final int c = Math.min(encoderOut.remaining(), len);
encoderOut.get(b, off, c);
off += c;
len -= c;
read += c;
} else {
fillBuffer();
if (endOfInput && !encoderOut.hasRemaining()) {
break;
}
}
}
return read == 0 && endOfInput ? -1 : read;
}
/**
* Read the specified number of bytes into an array.
*
* @param b
* the byte array to read into
*
* @return the number of bytes read or <code>-1</code> if the end of the stream has been reached
*
* @throws IOException
* if an I/O error occurs
*/
@Override
public int read(final byte[] b) throws IOException {
return read(b, 0, b.length);
}
/**
* Read a single byte.
*
* @return either the byte read or <code>-1</code> if the end of the stream has been reached
*
* @throws IOException
* if an I/O error occurs
*/
@Override
public int read() throws IOException {
for (;;) {
if (encoderOut.hasRemaining()) {
return encoderOut.get() & 0xFF;
} else {
fillBuffer();
if (endOfInput && !encoderOut.hasRemaining()) {
return -1;
}
}
}
}
/**
* Close the stream. This method will cause the underlying {@link Reader} to be closed.
*
* @throws IOException
* if an I/O error occurs
*/
@Override
public void close() throws IOException {
reader.close();
}
}
|
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.apple;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.core.build.buildable.context.FakeBuildableContext;
import com.facebook.buck.core.build.context.FakeBuildContext;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.core.sourcepath.FakeSourcePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.rules.keys.DefaultRuleKeyFactory;
import com.facebook.buck.rules.keys.TestDefaultRuleKeyFactory;
import com.facebook.buck.sandbox.NoSandboxExecutionStrategy;
import com.facebook.buck.shell.AbstractGenruleStep;
import com.facebook.buck.shell.ShellStep;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.util.environment.Platform;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.hash.HashCode;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import org.junit.Before;
import org.junit.Test;
public class ExternallyBuiltApplePackageTest {
private String bundleLocation;
private BuildTarget buildTarget;
private ProjectFilesystem projectFilesystem;
private ActionGraphBuilder graphBuilder;
private ExternallyBuiltApplePackage.ApplePackageConfigAndPlatformInfo config;
@Before
public void setUp() {
assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
bundleLocation = "Fake/Bundle/Location";
buildTarget = BuildTargetFactory.newInstance("//foo", "package");
projectFilesystem = new FakeProjectFilesystem();
graphBuilder = new TestActionGraphBuilder();
config =
ImmutableApplePackageConfigAndPlatformInfo.of(
AppleConfig.ApplePackageConfig.of("echo $SDKROOT $OUT", "api"),
FakeAppleRuleDescriptions.DEFAULT_IPHONEOS_ARMV7_PLATFORM);
}
@Test
public void sdkrootEnvironmentVariableIsSet() {
ExternallyBuiltApplePackage rule =
new ExternallyBuiltApplePackage(
buildTarget,
projectFilesystem,
new NoSandboxExecutionStrategy(),
graphBuilder,
config,
FakeSourcePath.of(bundleLocation),
true,
Optional.empty(),
Optional.empty());
graphBuilder.addToIndex(rule);
ShellStep step =
Iterables.getOnlyElement(
Iterables.filter(
rule.getBuildSteps(
FakeBuildContext.withSourcePathResolver(graphBuilder.getSourcePathResolver()),
new FakeBuildableContext()),
AbstractGenruleStep.class));
assertThat(
step.getEnvironmentVariables(TestExecutionContext.newInstance()),
hasEntry(
"SDKROOT",
FakeAppleRuleDescriptions.DEFAULT_IPHONEOS_ARMV7_PLATFORM
.getAppleSdkPaths()
.getSdkPath()
.toString()));
}
@Test
public void outputContainsCorrectExtension() {
ExternallyBuiltApplePackage rule =
new ExternallyBuiltApplePackage(
buildTarget,
projectFilesystem,
new NoSandboxExecutionStrategy(),
graphBuilder,
config,
FakeSourcePath.of("Fake/Bundle/Location"),
true,
Optional.empty(),
Optional.empty());
graphBuilder.addToIndex(rule);
assertThat(
graphBuilder
.getSourcePathResolver()
.getRelativePath(Objects.requireNonNull(rule.getSourcePathToOutput()))
.toString(),
endsWith(".api"));
}
@Test
public void commandContainsCorrectCommand() {
ExternallyBuiltApplePackage rule =
new ExternallyBuiltApplePackage(
buildTarget,
projectFilesystem,
new NoSandboxExecutionStrategy(),
graphBuilder,
config,
FakeSourcePath.of("Fake/Bundle/Location"),
true,
Optional.empty(),
Optional.empty());
graphBuilder.addToIndex(rule);
AbstractGenruleStep step =
Iterables.getOnlyElement(
Iterables.filter(
rule.getBuildSteps(
FakeBuildContext.withSourcePathResolver(graphBuilder.getSourcePathResolver()),
new FakeBuildableContext()),
AbstractGenruleStep.class));
assertThat(
step.getScriptFileContents(TestExecutionContext.newInstance()),
is(equalTo("echo $SDKROOT $OUT")));
}
@Test
public void platformVersionAffectsRuleKey() {
Function<String, ExternallyBuiltApplePackage> packageWithVersion =
input ->
new ExternallyBuiltApplePackage(
buildTarget,
projectFilesystem,
new NoSandboxExecutionStrategy(),
graphBuilder,
config.withPlatform(config.getPlatform().withBuildVersion(Optional.of(input))),
FakeSourcePath.of("Fake/Bundle/Location"),
true,
Optional.empty(),
Optional.empty());
assertNotEquals(
newRuleKeyFactory().build(packageWithVersion.apply("real")),
newRuleKeyFactory().build(packageWithVersion.apply("fake")));
}
@Test
public void sdkVersionAffectsRuleKey() {
Function<String, ExternallyBuiltApplePackage> packageWithSdkVersion =
input ->
new ExternallyBuiltApplePackage(
buildTarget,
projectFilesystem,
new NoSandboxExecutionStrategy(),
graphBuilder,
config.withPlatform(
config
.getPlatform()
.withAppleSdk(config.getPlatform().getAppleSdk().withVersion(input))),
FakeSourcePath.of("Fake/Bundle/Location"),
true,
Optional.empty(),
Optional.empty());
assertNotEquals(
newRuleKeyFactory().build(packageWithSdkVersion.apply("real")),
newRuleKeyFactory().build(packageWithSdkVersion.apply("fake")));
}
private DefaultRuleKeyFactory newRuleKeyFactory() {
return new TestDefaultRuleKeyFactory(
new FakeFileHashCache(
ImmutableMap.of(Paths.get(bundleLocation).toAbsolutePath(), HashCode.fromInt(5))),
graphBuilder);
}
}
|
|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.sql.impl.schema.map;
import com.hazelcast.sql.impl.extract.QueryPath;
import com.hazelcast.sql.impl.extract.QueryTargetDescriptor;
import com.hazelcast.sql.impl.optimizer.PlanObjectKey;
import com.hazelcast.sql.impl.schema.TableField;
import com.hazelcast.sql.impl.schema.TableStatistics;
import com.hazelcast.sql.impl.type.QueryDataType;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Stream;
public class PartitionedMapTable extends AbstractMapTable {
private final List<MapTableIndex> indexes;
private final boolean hd;
@SuppressWarnings("checkstyle:ParameterNumber")
public PartitionedMapTable(
String schemaName,
String tableName,
String mapName,
List<TableField> fields,
TableStatistics statistics,
QueryTargetDescriptor keyDescriptor,
QueryTargetDescriptor valueDescriptor,
Object keyJetMetadata,
Object valueJetMetadata,
List<MapTableIndex> indexes,
boolean hd
) {
super(
schemaName,
tableName,
mapName,
fields,
statistics,
keyDescriptor,
valueDescriptor,
keyJetMetadata,
valueJetMetadata
);
this.indexes = indexes;
this.hd = hd;
}
@Override
public PlanObjectKey getObjectKey() {
if (!isValid()) {
return null;
}
return new PartitionedMapPlanObjectKey(
getSchemaName(),
getSqlName(),
getMapName(),
getFields(),
getConflictingSchemas(),
getKeyDescriptor(),
getValueDescriptor(),
getKeyJetMetadata(),
getValueJetMetadata(),
getIndexes(),
isHd()
);
}
public List<MapTableIndex> getIndexes() {
checkException();
return indexes != null ? indexes : Collections.emptyList();
}
public boolean isHd() {
return hd;
}
public Stream<MapTableField> keyFields() {
return getFields().stream().map(field -> (MapTableField) field).filter(field -> field.getPath().isKey());
}
public Stream<MapTableField> valueFields() {
return getFields().stream().map(field -> (MapTableField) field).filter(field -> !field.getPath().isKey());
}
public QueryPath[] paths() {
return getFields().stream().map(field -> ((MapTableField) field).getPath()).toArray(QueryPath[]::new);
}
public QueryDataType[] types() {
return getFields().stream().map(TableField::getType).toArray(QueryDataType[]::new);
}
public QueryPath[] valuePaths() {
return valueFields().map(MapTableField::getPath).toArray(QueryPath[]::new);
}
public QueryDataType[] valueTypes() {
return valueFields().map(TableField::getType).toArray(QueryDataType[]::new);
}
static class PartitionedMapPlanObjectKey implements PlanObjectKey {
private final String schemaName;
private final String tableName;
private final String mapName;
private final List<TableField> fields;
private final QueryTargetDescriptor keyDescriptor;
private final QueryTargetDescriptor valueDescriptor;
private final Object keyJetMetadata;
private final Object valueJetMetadata;
private final List<MapTableIndex> indexes;
private final boolean hd;
private final Set<String> conflictingSchemas;
@SuppressWarnings("checkstyle:ParameterNumber")
PartitionedMapPlanObjectKey(
String schemaName,
String tableName,
String mapName,
List<TableField> fields,
Set<String> conflictingSchemas,
QueryTargetDescriptor keyDescriptor,
QueryTargetDescriptor valueDescriptor,
Object keyJetMetadata,
Object valueJetMetadata,
List<MapTableIndex> indexes,
boolean hd
) {
this.schemaName = schemaName;
this.tableName = tableName;
this.mapName = mapName;
this.fields = fields;
this.keyDescriptor = keyDescriptor;
this.valueDescriptor = valueDescriptor;
this.keyJetMetadata = keyJetMetadata;
this.valueJetMetadata = valueJetMetadata;
this.indexes = indexes;
this.hd = hd;
this.conflictingSchemas = conflictingSchemas;
}
@Override
@SuppressWarnings("checkstyle:cyclomaticcomplexity")
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PartitionedMapPlanObjectKey that = (PartitionedMapPlanObjectKey) o;
return hd == that.hd
&& schemaName.equals(that.schemaName)
&& tableName.equals(that.tableName)
&& mapName.equals(that.mapName)
&& fields.equals(that.fields)
&& keyDescriptor.equals(that.keyDescriptor)
&& valueDescriptor.equals(that.valueDescriptor)
&& Objects.equals(keyJetMetadata, that.keyJetMetadata)
&& Objects.equals(valueJetMetadata, that.valueJetMetadata)
&& indexes.equals(that.indexes)
&& conflictingSchemas.equals(that.conflictingSchemas);
}
@Override
public int hashCode() {
int result = schemaName.hashCode();
result = 31 * result + tableName.hashCode();
result = 31 * result + mapName.hashCode();
result = 31 * result + fields.hashCode();
result = 31 * result + keyDescriptor.hashCode();
result = 31 * result + valueDescriptor.hashCode();
result = 31 * result + Objects.hashCode(keyJetMetadata);
result = 31 * result + Objects.hashCode(valueJetMetadata);
result = 31 * result + indexes.hashCode();
result = 31 * result + (hd ? 1 : 0);
result = 31 * result + conflictingSchemas.hashCode();
return result;
}
}
}
|
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package io.bigio;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author atrimble
*/
public class MessageTest {
private static final Logger LOG = LoggerFactory.getLogger(MessageTest.class);
private static BigIO speaker;
private static final String MESSAGE = "This is a test";
private final BlockingQueue<MyMessage> queue = new ArrayBlockingQueue<>(1);
private final MyMessageListener listener = new MyMessageListener();
private final VolumeListener volumeListener = new VolumeListener();
private final DelayedMessageListener delayedListener = new DelayedMessageListener();
private static boolean failed = false;
@Before
public void init() {
speaker = BigIO.bootstrap();
}
@After
public void shutdown() {
speaker.shutdown();
}
@Test
public void testVolume() throws Exception {
failed = false;
speaker.addListener("VolumeTopic", volumeListener);
for(int i = 0; i < 10000; ++i) {
speaker.send("VolumeTopic", new MyMessage(MESSAGE + i));
}
Thread.sleep(1000l);
assertTrue(volumeListener.counter == 10000);
}
@Test
public void testMessage() throws Exception {
failed = false;
speaker.addListener("MyTopic", listener);
speaker.send("MyTopic", new MyMessage(MESSAGE + "1"));
MyMessage m = queue.poll(2000l, TimeUnit.MILLISECONDS);
assertNotNull(m);
assertEquals(m.getMessage(), MESSAGE + "1");
if(failed) {
fail();
}
speaker.send("BadTopic", new MyMessage(MESSAGE + "2"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
/*
speaker.removeAllListeners("MyTopic");
speaker.send("MyTopic", new MyMessage(MESSAGE + "3"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
*/
queue.clear();
}
@Test
public void testAllPartitions() throws Exception {
failed = false;
speaker.addListener("MyTopic", ".*", listener);
speaker.send("MyTopic", "MyPartition", new MyMessage(MESSAGE + "1"));
MyMessage m = queue.poll(2000l, TimeUnit.MILLISECONDS);
assertNotNull(m);
assertEquals(m.getMessage(), MESSAGE + "1");
if(failed) {
fail();
}
speaker.send("BadTopic", new MyMessage(MESSAGE + "2"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
/*
speaker.removeAllListeners("MyTopic");
speaker.send("MyTopic", new MyMessage(MESSAGE + "3"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
*/
queue.clear();
}
@Test
public void testSpecificPartitions() throws Exception {
failed = false;
speaker.addListener("MyTopic", "MyPartition", listener);
speaker.send("MyTopic", "MyPartition", new MyMessage(MESSAGE + "1"));
MyMessage m = queue.poll(2000l, TimeUnit.MILLISECONDS);
assertNotNull(m);
assertEquals(m.getMessage(), MESSAGE + "1");
if(failed) {
fail();
}
speaker.send("MyTopic", new MyMessage(MESSAGE + "2"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
speaker.send("BadTopic", new MyMessage(MESSAGE + "2"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
speaker.send("MyTopic", "BadPartition", new MyMessage(MESSAGE + "2"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
//speaker.removeAllListeners("MyTopic");
speaker.send("MyTopic", new MyMessage(MESSAGE + "3"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
queue.clear();
}
@Test
public void testRoundRobin() throws Exception {
failed = false;
speaker.setDeliveryType("MyTopic", DeliveryType.ROUND_ROBIN);
speaker.addListener("MyTopic", listener);
speaker.send("MyTopic", new MyMessage(MESSAGE + "4"));
MyMessage m = queue.poll(2000l, TimeUnit.MILLISECONDS);
assertNotNull(m);
assertEquals(m.getMessage(), MESSAGE + "4");
if(failed) {
fail();
}
speaker.send("BadTopic", new MyMessage(MESSAGE + "5"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
/* speaker.removeAllListeners("MyTopic");
speaker.send("MyTopic", new MyMessage(MESSAGE + "5"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
*/
queue.clear();
}
@Test
public void testRandom() throws Exception {
failed = false;
speaker.setDeliveryType("MyTopic", DeliveryType.RANDOM);
speaker.addListener("MyTopic", listener);
speaker.send("MyTopic", new MyMessage(MESSAGE + "6"));
MyMessage m = queue.poll(2000l, TimeUnit.MILLISECONDS);
assertNotNull(m);
assertEquals(m.getMessage(), MESSAGE + "6");
if(failed) {
fail();
}
speaker.send("BadTopic", new MyMessage(MESSAGE + "7"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
/*
speaker.removeAllListeners("MyTopic");
speaker.send("MyTopic", new MyMessage(MESSAGE + "7"));
m = queue.poll(500l, TimeUnit.MILLISECONDS);
assertNull(m);
*/
queue.clear();
}
@Test
public void testDelay() throws Exception {
failed = false;
speaker.addListener("DelayedTopic", delayedListener);
speaker.send("DelayedTopic", new MyMessage(MESSAGE + "8"), 2000);
MyMessage m = queue.poll(1000l, TimeUnit.MILLISECONDS);
assertNull(m);
failed = false;
m = queue.poll(2500l, TimeUnit.MILLISECONDS);
assertNotNull(m);
assertEquals(m.getMessage(), MESSAGE + "8");
if(failed) {
fail();
}
//speaker.removeAllListeners("DelayedTopic");
queue.clear();
}
private class MyMessageListener implements MessageListener<MyMessage> {
@Override
public void receive(MyMessage message) {
LOG.info("Got a message " + message.getMessage());
boolean success = queue.offer(message);
if (!success) {
failed = true;
}
}
}
private class VolumeListener implements MessageListener<MyMessage> {
public int counter = 0;
@Override
public void receive(MyMessage message) {
++counter;
}
}
private class DelayedMessageListener implements MessageListener<MyMessage> {
@Override
public void receive(MyMessage message) {
LOG.info("Got a delayed message " + message.getMessage());
boolean success = queue.offer(message);
if (!success) {
failed = true;
}
}
}
private static final class MyMessage {
private String message;
public MyMessage() {
}
public MyMessage(String message) {
this.message = message;
}
public void setMessage(String message) {
this.message = message;
}
public String getMessage() {
return message;
}
}
}
|
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2016-2021 Gerrit Grunwald.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.tilesfx.skins;
import eu.hansolo.tilesfx.Tile;
import eu.hansolo.tilesfx.fonts.Fonts;
import eu.hansolo.tilesfx.tools.Helper;
import javafx.geometry.VPos;
import javafx.scene.shape.Rectangle;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
/**
* Created by hansolo on 19.12.16.
*/
public class ClockTileSkin extends TileSkin {
private DateTimeFormatter timeFormatter;
private DateTimeFormatter dateFormatter;
private DateTimeFormatter dayOfWeekFormatter;
private Text titleText;
private Text text;
private Rectangle timeRect;
private Text timeText;
private Text dayOfWeekText;
private Text dateText;
// ******************** Constructors **************************************
public ClockTileSkin(final Tile TILE) {
super(TILE);
}
// ******************** Initialization ************************************
@Override protected void initGraphics() {
super.initGraphics();
currentValueListener = o -> {
if (tile.isRunning()) { return; } // Update time only if clock is not already running
updateTime(ZonedDateTime.ofInstant(Instant.ofEpochSecond(tile.getCurrentTime()), ZoneId.of(ZoneId.systemDefault().getId())));
};
timeListener = o -> updateTime(tile.getTime());
timeFormatter = DateTimeFormatter.ofPattern("HH:mm", tile.getLocale());
dateFormatter = DateTimeFormatter.ofPattern("dd MMM YYYY", tile.getLocale());
dayOfWeekFormatter = DateTimeFormatter.ofPattern("EEEE", tile.getLocale());
titleText = new Text("");
titleText.setTextOrigin(VPos.TOP);
Helper.enableNode(titleText, !tile.getTitle().isEmpty());
text = new Text(tile.getText());
text.setFill(tile.getUnitColor());
Helper.enableNode(text, tile.isTextVisible());
timeRect = new Rectangle();
timeText = new Text(timeFormatter.format(tile.getTime()));
timeText.setTextOrigin(VPos.CENTER);
dateText = new Text(dateFormatter.format(tile.getTime()));
dayOfWeekText = new Text(dayOfWeekFormatter.format(tile.getTime()));
getPane().getChildren().addAll(titleText, text, timeRect, timeText, dateText, dayOfWeekText);
}
@Override protected void registerListeners() {
super.registerListeners();
if (!tile.isAnimated()) { tile.timeProperty().addListener(timeListener); }
}
// ******************** Methods *******************************************
@Override protected void handleEvents(final String EVENT_TYPE) {
super.handleEvents(EVENT_TYPE);
if ("VISIBILITY".equals(EVENT_TYPE)) {
Helper.enableNode(titleText, !tile.getTitle().isEmpty());
Helper.enableNode(text, tile.isTextVisible());
}
}
public void updateTime(final ZonedDateTime TIME) {
timeText.setText(timeFormatter.format(tile.getTime()));
timeText.setX((width - timeText.getLayoutBounds().getWidth()) * 0.5);
timeText.setY(height * 0.35);
dayOfWeekText.setText(dayOfWeekFormatter.format(TIME));
dayOfWeekText.setX(size * 0.05);
dateText.setText(dateFormatter.format(TIME));
dateText.setX(size * 0.05);
}
@Override public void dispose() {
if (!tile.isAnimated()) { tile.timeProperty().removeListener(timeListener); }
super.dispose();
}
// ******************** Resizing ******************************************
@Override protected void resizeDynamicText() {
double maxWidth = width - size * 0.1;
double fontSize = size * 0.3;
timeText.setFont(Fonts.latoRegular(fontSize));
timeText.setText(timeFormatter.format(tile.getTime()));
Helper.adjustTextSize(timeText, maxWidth, fontSize);
timeText.setX((width - timeText.getLayoutBounds().getWidth()) * 0.5);
timeText.setY(size * 0.35);
//maxWidth = width - size * 0.1;
fontSize = size * 0.1;
dayOfWeekText.setFont(Fonts.latoRegular(fontSize));
if (dayOfWeekText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(dayOfWeekText, maxWidth, fontSize); }
dayOfWeekText.setX(size * 0.05);
dayOfWeekText.setY(height - size * 0.275);
dayOfWeekText.setY(timeRect.getLayoutBounds().getMaxY() + size * 0.11);
//maxWidth = width - size * 0.1;
dateText.setFont(Fonts.latoRegular(fontSize));
if (dateText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(dateText, maxWidth, fontSize); }
dateText.setX(size * 0.05);
dateText.setY(height - size * 0.15);
dateText.setY(timeRect.getLayoutBounds().getMaxY() + size * 0.235);
}
@Override protected void resizeStaticText() {
double maxWidth = size * 0.9;
double fontSize = size * textSize.factor;
boolean customFontEnabled = tile.isCustomFontEnabled();
Font customFont = tile.getCustomFont();
Font font = (customFontEnabled && customFont != null) ? Font.font(customFont.getFamily(), fontSize) : Fonts.latoRegular(fontSize);
titleText.setFont(font);
if (titleText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(titleText, maxWidth, fontSize); }
switch(tile.getTitleAlignment()) {
default :
case LEFT : titleText.relocate(size * 0.05, size * 0.05); break;
case CENTER: titleText.relocate((width - titleText.getLayoutBounds().getWidth()) * 0.5, size * 0.05); break;
case RIGHT : titleText.relocate(width - (size * 0.05) - titleText.getLayoutBounds().getWidth(), size * 0.05); break;
}
maxWidth = size * 0.9;
text.setText(tile.getText());
text.setFont(font);
if (text.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(text, maxWidth, fontSize); }
switch(tile.getTextAlignment()) {
default :
case LEFT : text.setX(size * 0.05); break;
case CENTER: text.setX((width - text.getLayoutBounds().getWidth()) * 0.5); break;
case RIGHT : text.setX(width - (size * 0.05) - text.getLayoutBounds().getWidth()); break;
}
text.setY(height - size * 0.05);
}
@Override protected void resize() {
super.resize();
timeRect.setWidth(width);
timeRect.setHeight(height * 0.4);
timeRect.setX(0);
timeRect.setY(contentBounds.getY());
}
@Override protected void redraw() {
super.redraw();
titleText.setText(tile.getTitle());
text.setText(tile.getText());
timeFormatter = DateTimeFormatter.ofPattern("HH:mm", tile.getLocale());
dateFormatter = DateTimeFormatter.ofPattern("dd MMM YYYY", tile.getLocale());
dayOfWeekFormatter = DateTimeFormatter.ofPattern("EEEE", tile.getLocale());
ZonedDateTime time = tile.getTime();
updateTime(time);
resizeStaticText();
resizeDynamicText();
titleText.setFill(tile.getTitleColor());
text.setFill(tile.getTextColor());
timeRect.setFill(tile.getBackgroundColor().darker());
timeText.setFill(tile.getTitleColor());
dateText.setFill(tile.getDateColor());
dayOfWeekText.setFill(tile.getDateColor());
}
}
|
|
package org.jasig.cas.ticket;
import org.jasig.cas.authentication.Authentication;
import org.jasig.cas.authentication.principal.Service;
import org.jasig.cas.mock.MockService;
import org.jasig.cas.ticket.support.NeverExpiresExpirationPolicy;
import org.jasig.cas.util.DefaultUniqueTicketIdGenerator;
import org.junit.Test;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
/**
* @author Scott Battaglia
* @since 3.0.0
*/
public class TicketGrantingTicketImplTests {
private final UniqueTicketIdGenerator uniqueTicketIdGenerator = new DefaultUniqueTicketIdGenerator();
@Test
public void verifyEquals() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
assertNotNull(t);
assertFalse(t.equals(new Object()));
assertTrue(t.equals(t));
}
@Test(expected=Exception.class)
public void verifyNullAuthentication() {
new TicketGrantingTicketImpl("test", null, null, null,
new NeverExpiresExpirationPolicy());
}
@Test
public void verifyGetAuthentication() {
final Authentication authentication = org.jasig.cas.authentication.TestUtils.getAuthentication();
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
authentication, new NeverExpiresExpirationPolicy());
assertEquals(t.getAuthentication(), authentication);
assertEquals(t.getId(), t.toString());
}
@Test
public void verifyIsRootTrue() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
assertTrue(t.isRoot());
}
@Test
public void verifyIsRootFalse() {
final TicketGrantingTicketImpl t1 = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test",
org.jasig.cas.authentication.TestUtils.getService("gantor"), t1,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
assertFalse(t.isRoot());
}
@Test
public void verifyGetChainedPrincipalsWithOne() {
final Authentication authentication = org.jasig.cas.authentication.TestUtils.getAuthentication();
final List<Authentication> principals = new ArrayList<>();
principals.add(authentication);
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
authentication, new NeverExpiresExpirationPolicy());
assertEquals(principals, t.getChainedAuthentications());
}
@Test
public void verifyCheckCreationTime() {
final Authentication authentication = org.jasig.cas.authentication.TestUtils.getAuthentication();
final List<Authentication> principals = new ArrayList<>();
principals.add(authentication);
final ZonedDateTime startTime = ZonedDateTime.now(ZoneOffset.UTC).minusNanos(100);
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
authentication, new NeverExpiresExpirationPolicy());
final ZonedDateTime finishTime = ZonedDateTime.now(ZoneOffset.UTC).plusNanos(100);
assertTrue(startTime.isBefore(t.getCreationTime()) && finishTime.isAfter(t.getCreationTime()));
}
@Test
public void verifyGetChainedPrincipalsWithTwo() {
final Authentication authentication = org.jasig.cas.authentication.TestUtils.getAuthentication();
final Authentication authentication1 = org.jasig.cas.authentication.TestUtils.getAuthentication("test1");
final List<Authentication> principals = new ArrayList<>();
principals.add(authentication);
principals.add(authentication1);
final TicketGrantingTicketImpl t1 = new TicketGrantingTicketImpl("test", null, null,
authentication1, new NeverExpiresExpirationPolicy());
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test",
org.jasig.cas.authentication.TestUtils.getService("gantor"), t1,
authentication, new NeverExpiresExpirationPolicy());
assertEquals(principals, t.getChainedAuthentications());
}
@Test
public void verifyServiceTicketAsFromInitialCredentials() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
final ServiceTicket s = t.grantServiceTicket(this.uniqueTicketIdGenerator
.getNewTicketId(ServiceTicket.PREFIX), org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(), false, true);
assertTrue(s.isFromNewLogin());
}
@Test
public void verifyServiceTicketAsFromNotInitialCredentials() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
true);
final ServiceTicket s = t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
true);
assertFalse(s.isFromNewLogin());
}
@Test
public void verifyWebApplicationServices() {
final MockService testService = new MockService("test");
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(this.uniqueTicketIdGenerator
.getNewTicketId(ServiceTicket.PREFIX), testService,
new NeverExpiresExpirationPolicy(), false, true);
Map<String, Service> services = t.getServices();
assertEquals(1, services.size());
final String ticketId = services.keySet().iterator().next();
assertEquals(testService, services.get(ticketId));
t.removeAllServices();
services = t.getServices();
assertEquals(0, services.size());
}
@Test
public void verifyWebApplicationExpire() {
final MockService testService = new MockService("test");
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(this.uniqueTicketIdGenerator
.getNewTicketId(ServiceTicket.PREFIX), testService,
new NeverExpiresExpirationPolicy(), false, true);
assertFalse(t.isExpired());
t.markTicketExpired();
assertTrue(t.isExpired());
}
@Test
public void verifyDoubleGrantSameServiceTicketKeepMostRecentSession() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
true);
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
true);
assertEquals(1, t.getServices().size());
}
@Test
public void verifyDoubleGrantSimilarServiceTicketKeepMostRecentSession() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService("http://host.com?test"),
new NeverExpiresExpirationPolicy(),
false,
true);
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService("http://host.com;JSESSIONID=xxx"),
new NeverExpiresExpirationPolicy(),
false,
true);
assertEquals(1, t.getServices().size());
}
@Test
public void verifyDoubleGrantSimilarServiceWithPathTicketKeepMostRecentSession() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService("http://host.com/webapp1"),
new NeverExpiresExpirationPolicy(),
false,
true);
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService("http://host.com/webapp1?test=true"),
new NeverExpiresExpirationPolicy(),
false,
true);
assertEquals(1, t.getServices().size());
}
@Test
public void verifyDoubleGrantSameServiceTicketKeepAll() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
true);
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
false);
assertEquals(2, t.getServices().size());
}
@Test
public void verifyDoubleGrantDifferentServiceTicket() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService(),
new NeverExpiresExpirationPolicy(),
false,
true);
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService2(),
new NeverExpiresExpirationPolicy(),
false,
true);
assertEquals(2, t.getServices().size());
}
@Test
public void verifyDoubleGrantDifferentServiceOnPathTicket() {
final TicketGrantingTicket t = new TicketGrantingTicketImpl("test", null, null,
org.jasig.cas.authentication.TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy());
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService("http://host.com/webapp1"),
new NeverExpiresExpirationPolicy(),
false,
true);
t.grantServiceTicket(
this.uniqueTicketIdGenerator.getNewTicketId(ServiceTicket.PREFIX),
org.jasig.cas.services.TestUtils.getService("http://host.com/webapp2"),
new NeverExpiresExpirationPolicy(),
false,
true);
assertEquals(2, t.getServices().size());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.cache.Cache;
import javax.cache.configuration.Factory;
import javax.cache.integration.CacheLoaderException;
import javax.cache.integration.CacheWriterException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreAdapter;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.TestRecordingCommunicationSpi;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionSupplyMessage;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearGetRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearSingleGetRequest;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Ignore;
import org.junit.Test;
import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL_SNAPSHOT;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheMode.REPLICATED;
/**
*
*/
public class IgniteCacheReadFromBackupTest extends GridCommonAbstractTest {
/** */
private static final int NODES = 4;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
TestRecordingCommunicationSpi commSpi = new TestRecordingCommunicationSpi();
cfg.setCommunicationSpi(commSpi);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
startGridsMultiThreaded(NODES);
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetFromBackupStoreReadThroughEnabled() throws Exception {
checkGetFromBackupStoreReadThroughEnabled(cacheConfigurations());
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10274")
@Test
public void testMvccGetFromBackupStoreReadThroughEnabled() throws Exception {
checkGetFromBackupStoreReadThroughEnabled(mvccCacheConfigurations());
}
/**
* @throws Exception If failed.
*/
private void checkGetFromBackupStoreReadThroughEnabled(List<CacheConfiguration<Object, Object>> cacheCfgs) throws Exception {
for (CacheConfiguration<Object, Object> ccfg : cacheCfgs) {
ccfg.setCacheStoreFactory(new TestStoreFactory());
ccfg.setReadThrough(true);
boolean near = (ccfg.getNearConfiguration() != null);
log.info("Test cache [mode=" + ccfg.getCacheMode() +
", atomicity=" + ccfg.getAtomicityMode() +
", backups=" + ccfg.getBackups() +
", near=" + near + "]");
ignite(0).createCache(ccfg);
awaitPartitionMapExchange();
try {
for (int i = 0; i < NODES; i++) {
Ignite ignite = ignite(i);
log.info("Check node: " + ignite.name());
IgniteCache<Integer, Integer> cache = ignite.cache(ccfg.getName());
TestRecordingCommunicationSpi spi = recordGetRequests(ignite, near);
Integer key = backupKey(cache);
assertNull(cache.get(key));
List<Object> msgs = spi.recordedMessages(false);
assertEquals(1, msgs.size());
}
}
finally {
ignite(0).destroyCache(ccfg.getName());
}
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetFromBackupStoreReadThroughDisabled() throws Exception {
checkGetFromBackupStoreReadThroughDisabled(cacheConfigurations());
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10274")
@Test
public void testMvccGetFromBackupStoreReadThroughDisabled() throws Exception {
checkGetFromBackupStoreReadThroughDisabled(mvccCacheConfigurations());
}
/**
* @throws Exception If failed.
*/
private void checkGetFromBackupStoreReadThroughDisabled(List<CacheConfiguration<Object, Object>> cacheCfgs) throws Exception {
for (CacheConfiguration<Object, Object> ccfg : cacheCfgs) {
ccfg.setCacheStoreFactory(new TestStoreFactory());
ccfg.setReadThrough(false);
boolean near = (ccfg.getNearConfiguration() != null);
log.info("Test cache [mode=" + ccfg.getCacheMode() +
", atomicity=" + ccfg.getAtomicityMode() +
", backups=" + ccfg.getBackups() +
", near=" + near + "]");
ignite(0).createCache(ccfg);
awaitPartitionMapExchange();
try {
checkLocalRead(NODES, ccfg);
}
finally {
ignite(0).destroyCache(ccfg.getName());
}
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetFromPrimaryPreloadInProgress() throws Exception {
checkGetFromPrimaryPreloadInProgress(cacheConfigurations());
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10274")
@Test
public void testMvccGetFromPrimaryPreloadInProgress() throws Exception {
checkGetFromPrimaryPreloadInProgress(mvccCacheConfigurations());
}
/**
* @throws Exception If failed.
*/
private void checkGetFromPrimaryPreloadInProgress(List<CacheConfiguration<Object, Object>> cacheCfgs) throws Exception {
for (final CacheConfiguration<Object, Object> ccfg : cacheCfgs) {
boolean near = (ccfg.getNearConfiguration() != null);
log.info("Test cache [mode=" + ccfg.getCacheMode() +
", atomicity=" + ccfg.getAtomicityMode() +
", backups=" + ccfg.getBackups() +
", near=" + near + "]");
ignite(0).createCache(ccfg);
awaitPartitionMapExchange();
try {
Map<Ignite, Integer> backupKeys = new HashMap<>();
Map<Ignite, Integer> nearKeys = new HashMap<>();
for (int i = 0; i < NODES; i++) {
Ignite ignite = ignite(i);
IgniteCache<Integer, Integer> cache = ignite.cache(ccfg.getName());
backupKeys.put(ignite, backupKey(cache));
if (ccfg.getCacheMode() == PARTITIONED)
nearKeys.put(ignite, nearKey(cache));
TestRecordingCommunicationSpi spi =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
final int grpId = groupIdForCache(ignite, ccfg.getName());
spi.blockMessages(new IgniteBiPredicate<ClusterNode, Message>() {
@Override public boolean apply(ClusterNode node, Message msg) {
if (!msg.getClass().equals(GridDhtPartitionSupplyMessage.class))
return false;
return ((GridDhtPartitionSupplyMessage)msg).groupId() == grpId;
}
});
}
try (Ignite newNode = startGrid(NODES)) {
IgniteCache<Integer, Integer> cache = newNode.cache(ccfg.getName());
TestRecordingCommunicationSpi newNodeSpi = recordGetRequests(newNode, near);
Integer key = backupKey(cache);
assertNull(cache.get(key));
List<Object> msgs = newNodeSpi.recordedMessages(false);
assertEquals(1, msgs.size());
for (int i = 0; i < NODES; i++) {
Ignite ignite = ignite(i);
log.info("Check node: " + ignite.name());
checkLocalRead(ignite, ccfg, backupKeys.get(ignite), nearKeys.get(ignite));
}
for (int i = 0; i < NODES; i++) {
Ignite ignite = ignite(i);
TestRecordingCommunicationSpi spi =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
spi.stopBlock(true);
}
awaitPartitionMapExchange();
checkLocalRead(NODES + 1, ccfg);
}
}
finally {
ignite(0).destroyCache(ccfg.getName());
}
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testNoPrimaryReadPreloadFinished() throws Exception {
checkNoPrimaryReadPreloadFinished(cacheConfigurations());
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10274")
@Test
public void testMvccNoPrimaryReadPreloadFinished() throws Exception {
checkNoPrimaryReadPreloadFinished(mvccCacheConfigurations());
}
/**
* @throws Exception If failed.
*/
private void checkNoPrimaryReadPreloadFinished(List<CacheConfiguration<Object, Object>> cacheCfgs) throws Exception {
for (CacheConfiguration<Object, Object> ccfg : cacheCfgs) {
boolean near = (ccfg.getNearConfiguration() != null);
log.info("Test cache [mode=" + ccfg.getCacheMode() +
", atomicity=" + ccfg.getAtomicityMode() +
", backups=" + ccfg.getBackups() +
", near=" + near + "]");
ignite(0).createCache(ccfg);
awaitPartitionMapExchange();
try {
checkLocalRead(NODES, ccfg);
}
finally {
ignite(0).destroyCache(ccfg.getName());
}
}
}
/**
* @param nodes Number of nodes.
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void checkLocalRead(int nodes, CacheConfiguration<Object, Object> ccfg) throws Exception {
for (int i = 0; i < nodes; i++) {
Ignite ignite = ignite(i);
log.info("Check node: " + ignite.name());
IgniteCache<Integer, Integer> cache = ignite.cache(ccfg.getName());
List<Integer> backupKeys = backupKeys(cache, 2, 0);
Integer backupKey = backupKeys.get(0);
Integer nearKey = ccfg.getCacheMode() == PARTITIONED ? nearKey(cache) : null;
checkLocalRead(ignite, ccfg, backupKey, nearKey);
Set<Integer> keys = new HashSet<>(backupKeys);
Map<Integer, Integer> vals = cache.getAll(keys);
for (Integer key : keys)
assertNull(vals.get(key));
TestRecordingCommunicationSpi spi =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
List<Object> msgs = spi.recordedMessages(false);
assertEquals(0, msgs.size());
}
}
/**
* @param ignite Node.
* @param ccfg Cache configuration.
* @param backupKey Backup key.
* @param nearKey Near key.
* @throws Exception If failed.
*/
private void checkLocalRead(Ignite ignite,
CacheConfiguration<Object, Object> ccfg,
Integer backupKey,
Integer nearKey) throws Exception {
IgniteCache<Integer, Integer> cache = ignite.cache(ccfg.getName());
TestRecordingCommunicationSpi spi = recordGetRequests(ignite, ccfg.getNearConfiguration() != null);
List<Object> msgs;
if (nearKey != null) {
assertNull(cache.get(nearKey));
msgs = spi.recordedMessages(false);
assertEquals(1, msgs.size());
}
assertNull(cache.get(backupKey));
msgs = spi.recordedMessages(false);
assertTrue(msgs.isEmpty());
}
/**
* @param ignite Node.
* @param near Near cache flag.
* @return Communication SPI.
*/
private TestRecordingCommunicationSpi recordGetRequests(Ignite ignite, boolean near) {
TestRecordingCommunicationSpi spi =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
spi.record(near ? GridNearGetRequest.class : GridNearSingleGetRequest.class);
return spi;
}
/**
* @return Cache configurations to test.
*/
private List<CacheConfiguration<Object, Object>> cacheConfigurations() {
List<CacheConfiguration<Object, Object>> ccfgs = new ArrayList<>();
ccfgs.add(cacheConfiguration(REPLICATED, ATOMIC, 0, false));
ccfgs.add(cacheConfiguration(REPLICATED, TRANSACTIONAL, 0, false));
ccfgs.add(cacheConfiguration(PARTITIONED, ATOMIC, 1, false));
ccfgs.add(cacheConfiguration(PARTITIONED, ATOMIC, 1, true));
ccfgs.add(cacheConfiguration(PARTITIONED, ATOMIC, 2, false));
ccfgs.add(cacheConfiguration(PARTITIONED, TRANSACTIONAL, 1, false));
ccfgs.add(cacheConfiguration(PARTITIONED, TRANSACTIONAL, 1, true));
ccfgs.add(cacheConfiguration(PARTITIONED, TRANSACTIONAL, 2, false));
return ccfgs;
}
/**
* @return Cache configurations to test.
*/
private List<CacheConfiguration<Object, Object>> mvccCacheConfigurations() {
List<CacheConfiguration<Object, Object>> ccfgs = new ArrayList<>();
ccfgs.add(cacheConfiguration(REPLICATED, TRANSACTIONAL_SNAPSHOT, 0, false));
ccfgs.add(cacheConfiguration(PARTITIONED, TRANSACTIONAL_SNAPSHOT, 1, false));
ccfgs.add(cacheConfiguration(PARTITIONED, TRANSACTIONAL_SNAPSHOT, 1, true));
ccfgs.add(cacheConfiguration(PARTITIONED, TRANSACTIONAL_SNAPSHOT, 2, false));
return ccfgs;
}
/**
* @param cacheMode Cache mode.
* @param atomicityMode Cache atomicity mode.
* @param backups Number of backups.
* @param nearEnabled {@code True} if near cache should be enabled.
* @return Cache configuration.
*/
private CacheConfiguration<Object, Object> cacheConfiguration(CacheMode cacheMode,
CacheAtomicityMode atomicityMode,
int backups,
boolean nearEnabled) {
CacheConfiguration<Object, Object> ccfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME);
ccfg.setCacheMode(cacheMode);
ccfg.setAtomicityMode(atomicityMode);
if (cacheMode != REPLICATED) {
ccfg.setBackups(backups);
if (nearEnabled)
ccfg.setNearConfiguration(new NearCacheConfiguration<>());
}
return ccfg;
}
/**
*
*/
private static class TestStoreFactory implements Factory<CacheStore<Object, Object>> {
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public CacheStore<Object, Object> create() {
return new CacheStoreAdapter() {
@Override public Object load(Object key) throws CacheLoaderException {
return null;
}
@Override public void write(Cache.Entry entry) throws CacheWriterException {
// No-op.
}
@Override public void delete(Object key) throws CacheWriterException {
// No-op.
}
};
}
}
}
|
|
package main;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.DateFormat;
import java.util.Date;
import office.OfficeRecords;
import officeReports.DailyReportso;
import officeReports.MonthlyGeneratoro;
import officeReports.OfficePersonalReport;
import officechart.OfficeSpecials;
import officeentry.Office_Entry;
import officereg.AddOfficeStaffs;
import changepass.ChangePassword;
import infos.About;
import infos.Help;
import gateUsers.Users;
import gatereports.DailyReports;
import gatereports.MonthlyGenerator;
import gatereports.PersonalReports;
import gatereports.SpecialReports;
import database.DBConnect;
import dialogs.Confirmation;
import dialogs.ErrorMessage;
import attendance.AllAttendance;
import register.AddNew;
import reports.ChooseOne;
import reports.VisitorsReports;
import settings.SysSettings;
import javafx.animation.Animation;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.RotateTransition;
import javafx.animation.Timeline;
import javafx.application.Application;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.event.EventHandler;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.geometry.Rectangle2D;
import javafx.scene.Cursor;
import javafx.scene.Scene;
import javafx.scene.control.Label;
import javafx.scene.control.ScrollPane;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.HBox;
import javafx.scene.layout.StackPane;
import javafx.scene.layout.VBox;
import javafx.scene.shape.Rectangle;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import javafx.scene.text.TextAlignment;
import javafx.scene.transform.Rotate;
import javafx.stage.Stage;
import javafx.util.Duration;
import login.Login;
public class MainWindow extends Application{
Stage window;
Scene scene;
private Rectangle2D boxBounds = new Rectangle2D(600,600,870,700);
private VBox bottomPane;
private VBox layall;
VBox layoffice;
private Rectangle clipRect;
private Timeline timelineUp;
private Timeline timelineDown, timelineoffice;
Label welc;
Text myclock;
Image img= new Image(MainWindow.class.getResourceAsStream("/pic/meru_logo.png"));
ImageView ivt= new ImageView(img);
ScrollPane sc = new ScrollPane();
Label label1, label2, label21, label22, label3, label4, label5, label51, label52, label53, label54, label6, label61, tpic;
Text welText, appuser, register, entries, moreinfo;
public static Label lname;
Label lallapps;
Label lreg;
Label lpower;
Label lusers, lgsearch, losearch;
Label lhelp;
Label latt;
Label lsetting;
Text freq, systems;
public static Text ladmins;
//for office
Label loreg, loexit, lologout, loview, loatt, loallapps, ldaygen, lmongen, lspege;
Text reports, registration, exito, weloText;
/*Creating Context Menus
final ContextMenu contextMenu = new ContextMenu();
MenuItem daily = new MenuItem("Daily Reports");
MenuItem monthly = new MenuItem("Monthly Reports");
MenuItem special = new MenuItem("Special Reports");
*/
@Override
public void start(Stage shelp) throws Exception {
window= shelp;
window= new Stage();
window.getIcons().add(new Image("/pic/slogo.png"));
HBox root = new HBox();
root.autosize();
scene = new Scene(root, 820,690);
scene.getStylesheets().add(ExDetails.class.getResource("gatepass.css").toExternalForm());
window.setOnCloseRequest(e -> {
e.consume();
closeProgram();
});
window.setTitle("Main GatePass System");
window.setScene(scene);
window.setMaximized(true);
window.show();
configureBox(root);
}
private void configureBox(HBox root) {
StackPane container = new StackPane();
//container.setPrefHeight(700);
container.setPrefSize(boxBounds.getWidth(), boxBounds.getHeight());
container.setStyle("-fx-border-width:0px;-fx-border-style:solid;-fx-border-color:#999999;");
// First PANE
//ivt.setFitWidth(150);
//ivt.setFitHeight(150);
tpic= new Label();
tpic.setId("meru_logo");
tpic.setGraphic(ivt);
welc= new Label();
welc.setText("BIOMETRIC SECURITY CHECK FOR \nGATE PASS AND ACCESS TO EXAMINATIONS OFFICE \nFOR MERU UNIVERSITY");
welc.setId("meru_logo");
welc.setTextAlignment(TextAlignment.CENTER);
VBox vcont= new VBox(15);
//vcont.setAlignment(Pos.CENTER);
vcont.getChildren().addAll(welc);
vcont.setPadding(new Insets(35,10,5,10));
myclock= new Text();
myclock.setId("lview");
myclock.setFont(Font.font("Calibri", 17));
final Timeline timeline = new Timeline(new KeyFrame(Duration.seconds(1), new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
myclock.setText(DateFormat.getDateTimeInstance().format(new Date()));
}
}));
timeline.setCycleCount(Animation.INDEFINITE);
timeline.play();
HBox layclock= new HBox();
layclock.getChildren().add(myclock);
layclock.setAlignment(Pos.TOP_RIGHT);
layclock.setPadding(new Insets(0,10,10,10));
bottomPane = new VBox(10);
bottomPane.setAlignment(Pos.TOP_CENTER);
bottomPane.getChildren().addAll(layclock,tpic, vcont);
bottomPane.setPadding(new Insets(20,10,5,0));
bottomPane.setStyle("-fx-background-color: linear-gradient(#E4EAA2, #9CD672)");
// 2nd PANE
welText= new Text("Gate Access Programs");
welText.setId("welText");
appuser= new Text("Manage Users");
appuser.setId("identify_groups");
register= new Text("Register");
register.setId("identify_groups");
entries= new Text("Continuous Entry");
entries.setId("identify_groups");
moreinfo= new Text("Help");
moreinfo.setId("identify_groups");
Image us = new Image(ExDetails.class.getResourceAsStream("/pic2/User.png"));
ImageView ivus= new ImageView(us);
label1= new Label(" ",ivus);
label1.setText("\n\n\n\nUsers");
label1.setMinWidth(220);
label1.setMinHeight(120);
label1.setAlignment(Pos.CENTER_LEFT);
label1.setCursor(Cursor.HAND);
label1.setPadding(new Insets(1,1,1,5));
rotateHer(label1,ivus);
//in Insets top,right,bottom,left
label1.setOnMouseClicked(e -> {
try {
new Users().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image cp = new Image(ExDetails.class.getResourceAsStream("/pic2/changepass.png"));
ImageView ivcp= new ImageView(cp);
label2= new Label("",ivcp);
label2.setText("\n\n\n\nChange Password");
label2.setAlignment(Pos.CENTER_LEFT);
label2.setCursor(Cursor.HAND);
label2.setPadding(new Insets(1,1,1,5));
label2.setMinWidth(240);
label2.setMinHeight(120);
rotateHer(label2,ivcp);
label2.setOnMouseClicked(e -> {
try {
new ChangePassword().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error", "Program has problems");
e1.printStackTrace();
}
});
Image lo = new Image(ExDetails.class.getResourceAsStream("/pic3/logout-1-512.png"));
ImageView ivlo= new ImageView(lo);
label21= new Label("", ivlo);
label21.setText("\n\n\n\nLogout");
label21.setAlignment(Pos.CENTER_LEFT);
label21.setCursor(Cursor.HAND);
label21.setPadding(new Insets(1,1,1,3));
label21.setMinWidth(120);
label21.setMinHeight(120);
rotateHer(label21,ivlo);
label21.setOnMouseClicked(e -> {
try {
String fileUpdate="UPDATE logs SET name= '" + lname.getText()
+ "', signedout ='" +myclock.getText()+"'"
+ "where signedin='" + Login.d1 + "'";
//connect to database
DBConnect.connect();
try {
DBConnect.stmt.execute(fileUpdate);
} catch (SQLException e1) {
ErrorMessage.display("Auto Logs Error", ""+e1.getMessage());
e1.printStackTrace();
}
window.close();
new Login().start(new Stage());
Login.txtname.setText(lname.getText());
} catch (Exception e1) {
ErrorMessage.display("Launch Error", "Something wrong happened when starting program\n consult Admin");
e1.printStackTrace();
}
});
Image ex = new Image(ExDetails.class.getResourceAsStream("/pic2/logout.png"));
ImageView ivex= new ImageView(ex);
label22= new Label("",ivex);
label22.setText("\n\n\n\nExit");
label22.setAlignment(Pos.CENTER_LEFT);
label22.setCursor(Cursor.HAND);
label22.setPadding(new Insets(1,1,1,5));
label22.setMinWidth(120);
label22.setMinHeight(120);
rotateHer(label22,ivex);
Image rnew = new Image(ExDetails.class.getResourceAsStream("/pic2/reg.png"));
ImageView ivrnew= new ImageView(rnew);
label3= new Label(" ", ivrnew);
label3.setText("\n\n\n\nRegister New");
label3.setAlignment(Pos.CENTER_LEFT);
label3.setCursor(Cursor.HAND);
label3.setPadding(new Insets(1,1,1,5));
label3.setMinWidth(220);
label3.setMinHeight(120);
rotateHer(label3, ivrnew);
label3.setOnMouseClicked( e -> {
try {
new AddNew().start(new Stage());
//window.close();
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image rgs = new Image(ExDetails.class.getResourceAsStream("/pic/myfinger.png"));
ImageView ivrgs= new ImageView(rgs);
lgsearch= new Label(" ", ivrgs);
lgsearch.setText("\n\n\n\nAuto Search...");
lgsearch.setAlignment(Pos.CENTER_LEFT);
lgsearch.setCursor(Cursor.HAND);
lgsearch.setPadding(new Insets(1,1,1,5));
lgsearch.setMinWidth(220);
lgsearch.setMinHeight(120);
rotateHer(lgsearch, ivrgs);
lgsearch.setOnMouseClicked( e -> {
try {
new PersonalReports().start(new Stage());
//window.close();
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image vi = new Image(ExDetails.class.getResourceAsStream("/pic2/viewr.png"));
ImageView ivvi= new ImageView(vi);
label4= new Label("", ivvi);
label4.setText("\n\n\n\nView Records");
label4.setAlignment(Pos.CENTER_LEFT);
label4.setCursor(Cursor.HAND);
label4.setPadding(new Insets(1,1,1,5));
label4.setMinWidth(241);
label4.setMinHeight(120);
rotateHer(label4, ivvi);
Image att = new Image(ExDetails.class.getResourceAsStream("/pic2/att.png"));
ImageView ivatt= new ImageView(att);
label5= new Label("", ivatt);
label5.setText("\n\n\n\nContinuous\nAttendance");
label5.setAlignment(Pos.CENTER_LEFT);
label5.setCursor(Cursor.HAND);
label5.setPadding(new Insets(1,1,1,5));
label5.setMinWidth(220);
label5.setMinHeight(120);
rotateHer(label5, ivatt);
label5.setOnMouseClicked(e -> {
try {
new AllAttendance().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image dr = new Image(ExDetails.class.getResourceAsStream("/pic2/dreport.png"));
ImageView ivdr= new ImageView(dr);
label51= new Label("",ivdr);
label51.setText("\n\n\n\nDaily Report");
label51.setAlignment(Pos.CENTER_LEFT);
label51.setCursor(Cursor.HAND);
label51.setPadding(new Insets(1,1,1,5));
label51.setMinWidth(240);
label51.setMinHeight(120);
rotateHer(label51, ivdr);
Image mr = new Image(ExDetails.class.getResourceAsStream("/pic2/mreport.png"));
ImageView ivmr= new ImageView(mr);
label52= new Label("", ivmr);
label52.setText("\n\n\n\nMonth Report");
label52.setAlignment(Pos.CENTER_LEFT);
label52.setCursor(Cursor.HAND);
label52.setPadding(new Insets(1,1,1,5));
label52.setMinWidth(240);
label52.setMinHeight(120);
rotateHer(label52, ivmr);
Image vr = new Image(ExDetails.class.getResourceAsStream("/pic2/vreport.png"));
ImageView ivvr= new ImageView(vr);
label53= new Label(" ",ivvr);
label53.setText("\n\n\n\n Visitor Reports");
label53.setAlignment(Pos.CENTER_LEFT);
label53.setCursor(Cursor.HAND);
label53.setPadding(new Insets(1,1,1,5));
label53.setMinWidth(240);
label53.setMinHeight(120);
rotateHer(label53, ivvr);
Image sr = new Image(ExDetails.class.getResourceAsStream("/pic2/sreport.png"));
ImageView ivsr= new ImageView(sr);
label54= new Label("", ivsr);
label54.setText("\n\n\n\nSpecial Reports");
label54.setAlignment(Pos.CENTER_LEFT);
label54.setCursor(Cursor.HAND);
label54.setPadding(new Insets(1,1,1,5));
label54.setMinWidth(240);
label54.setMinHeight(120);
rotateHer(label54, ivsr);
Image he = new Image(ExDetails.class.getResourceAsStream("/pic2/Help.png"));
ImageView ivhe= new ImageView(he);
label6= new Label(" ",ivhe);
label6.setText("\n\n\n\n\tHelp?");
label6.setAlignment(Pos.CENTER_LEFT);
label6.setCursor(Cursor.HAND);
label6.setPadding(new Insets(1,1,1,5));
label6.setMinWidth(220);
label6.setMinHeight(120);
rotateHer(label6, ivhe);
label6.setOnMouseClicked(e ->{
try {
new Help().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image ab = new Image(ExDetails.class.getResourceAsStream("/pic2/info.png"));
ImageView ivab= new ImageView(ab);
label61= new Label(" ",ivab);
label61.setText("\n\n\n\n\tAbout");
label61.setAlignment(Pos.CENTER_LEFT);
label61.setCursor(Cursor.HAND);
label61.setPadding(new Insets(1,1,1,5));
label61.setMinWidth(241);
label61.setMinHeight(120);
rotateHer(label61, ivab);
HBox lay= new HBox(10);
lay.setPadding(new Insets(5,10,10,0));
lay.getChildren().addAll(welText);
HBox lay1= new HBox(7);
lay1.setPadding(new Insets(10,10,10,0));
lay1.getChildren().addAll(label1, label2, label21, label22);
HBox lay2= new HBox(7);
lay2.setPadding(new Insets(10,10,10,0));
lay2.getChildren().addAll(label3, label4);
HBox lay3= new HBox(7);
lay3.getChildren().addAll(label5, label51, label52);
HBox lay31= new HBox(7);
lay31.getChildren().addAll(lgsearch, label53, label54);
VBox lay3all= new VBox(7);
lay3all.setPadding(new Insets(10,10,10,0));
lay3all.getChildren().addAll(lay3, lay31);
HBox lay4= new HBox(7);
lay4.setPadding(new Insets(10,10,10,0));
lay4.getChildren().addAll(label6, label61);
layall= new VBox(5);
layall.setId("scroll_main");
layall.setPadding(new Insets(5,10,10,15));
layall.getChildren().addAll(lay,appuser, lay1, register,lay2, entries, lay3all, moreinfo, lay4);
//office pane
Image ioreg = new Image(ExDetails.class.getResourceAsStream("/pic2/User.png"));
ImageView ivoreg= new ImageView(ioreg);
loreg= new Label(" ", ivoreg);
loreg.setText("\n\n\n\n Register");
loreg.setAlignment(Pos.CENTER_LEFT);
loreg.setCursor(Cursor.HAND);
loreg.setPadding(new Insets(1,1,1,5));
loreg.setMinWidth(240);
loreg.setMinHeight(120);
rotateHer(loreg,ivoreg);
Image ioexit = new Image(ExDetails.class.getResourceAsStream("/pic2/logout.png"));
ImageView ivoexit= new ImageView(ioexit);
loexit= new Label(" ", ivoexit);
loexit.setText("\n\n\n\n Exit");
loexit.setAlignment(Pos.CENTER_LEFT);
loexit.setCursor(Cursor.HAND);
loexit.setPadding(new Insets(1,1,1,5));
loexit.setMinWidth(240);
loexit.setMinHeight(120);
rotateHer(loexit,ivoexit);
Image ioatt = new Image(ExDetails.class.getResourceAsStream("/pic/attendance.png"));
ImageView ivoatt= new ImageView(ioatt);
loatt= new Label("", ivoatt);
loatt.setText("\n\n\n\nContinuous entry");
loatt.setAlignment(Pos.CENTER_LEFT);
loatt.setCursor(Cursor.HAND);
loatt.setPadding(new Insets(1,1,1,5));
loatt.setMinWidth(240);
loatt.setMinHeight(120);
rotateHer(loatt,ivoatt);
Image igenrep = new Image(ExDetails.class.getResourceAsStream("/pic2/dreport.png"));
ImageView ivogenrep= new ImageView(igenrep);
ldaygen= new Label(" ", ivogenrep);
ldaygen.setText("\n\n\n\n Daily Reports");
ldaygen.setAlignment(Pos.CENTER_LEFT);
ldaygen.setCursor(Cursor.HAND);
ldaygen.setPadding(new Insets(1,1,1,5));
ldaygen.setMinWidth(240);
ldaygen.setMinHeight(120);
rotateHer(ldaygen,ivogenrep);
Image isrep = new Image(ExDetails.class.getResourceAsStream("/pic2/mreport.png"));
ImageView ivsrep= new ImageView(isrep);
lmongen= new Label(" ", ivsrep);
lmongen.setText("\n\n\n\n Month\nReports");
lmongen.setAlignment(Pos.CENTER_LEFT);
lmongen.setCursor(Cursor.HAND);
lmongen.setPadding(new Insets(1,1,1,5));
lmongen.setMinWidth(120);
lmongen.setMinHeight(120);
rotateHer(lmongen,ivsrep);
Image imrep = new Image(ExDetails.class.getResourceAsStream("/pic2/sreport.png"));
ImageView ivmrep= new ImageView(imrep);
lspege= new Label(" ", ivmrep);
lspege.setText("\n\n\n\n Special Reports");
lspege.setAlignment(Pos.CENTER_LEFT);
lspege.setCursor(Cursor.HAND);
lspege.setPadding(new Insets(1,1,1,5));
lspege.setMinWidth(240);
lspege.setMinHeight(120);
rotateHer(lspege,ivmrep);
Image ros = new Image(ExDetails.class.getResourceAsStream("/pic/myfinger.png"));
ImageView ivos= new ImageView(ros);
losearch= new Label(" ", ivos);
losearch.setText("\n\n\n\nSearch User");
losearch.setAlignment(Pos.CENTER_LEFT);
losearch.setCursor(Cursor.HAND);
losearch.setPadding(new Insets(1,1,1,5));
losearch.setMinWidth(240);
losearch.setMinHeight(120);
rotateHer(losearch, ivos);
losearch.setOnMouseClicked( e -> {
try {
new OfficePersonalReport().start(new Stage());
//window.close();
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image ioview = new Image(ExDetails.class.getResourceAsStream("/pic2/viewr.png"));
ImageView ivoview= new ImageView(ioview);
loview= new Label(" ", ivoview);
loview.setText("\n\n\n\n View Reports");
loview.setAlignment(Pos.CENTER_LEFT);
loview.setCursor(Cursor.HAND);
loview.setPadding(new Insets(1,1,1,5));
loview.setMinWidth(240);
loview.setMinHeight(120);
rotateHer(loview,ivoview);
weloText= new Text("Office Utilities");
weloText.setId("welText");
reports= new Text("Reports");
registration= new Text("Registration");
exito= new Text("More (S & E)");
// loview logenreports;
HBox laywel= new HBox(10);
laywel.setPadding(new Insets(5,10,10,0));
laywel.getChildren().addAll(weloText);
HBox lay11= new HBox(7);
lay11.getChildren().addAll(loreg, loatt, ldaygen);
lay11.setPadding(new Insets(10,10,10,0));
HBox lay12= new HBox(7);
lay12.getChildren().addAll(loview, lmongen, lspege);
lay12.setPadding(new Insets(10,10,10,0));
HBox lay13= new HBox(7);
lay13.getChildren().addAll(losearch, loexit);
lay13.setPadding(new Insets(10,10,10,0));
layoffice= new VBox(7);
layoffice.setPadding(new Insets(10,10,10,20));
layoffice.getChildren().addAll(laywel, registration, lay11, reports, lay12, exito, lay13);
layoffice.setId("scroll_main");
container.getChildren().addAll(bottomPane, layall, layoffice);
setAnimation();
sc.setContent(container);
sc.setId("scroll_main");
root.setStyle("-fx-background-color: linear-gradient(#E4EAA2, #9CD672)");
root.getChildren().addAll(getActionPane(),sc);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private void setAnimation(){
// Initially hiding the Top Pane
clipRect = new Rectangle();
clipRect.setWidth(boxBounds.getWidth());
clipRect.setHeight(0);
clipRect.translateYProperty().set(boxBounds.getWidth());
layall.setClip(clipRect);
layall.translateYProperty().set(-boxBounds.getWidth());
layoffice.translateYProperty().set(-boxBounds.getWidth());
// Animation for bouncing effect.
final Timeline timelineBounce = new Timeline();
timelineBounce.setCycleCount(2);
timelineBounce.setAutoReverse(true);
final KeyValue kv1 = new KeyValue(clipRect.heightProperty(), (boxBounds.getHeight()-15));
final KeyValue kv2 = new KeyValue(clipRect.translateYProperty(), 15);
final KeyValue kv3 = new KeyValue(layall.translateYProperty(), -15);
final KeyFrame kf1 = new KeyFrame(Duration.millis(100), kv1, kv2, kv3);
timelineBounce.getKeyFrames().add(kf1);
// Event handler to call bouncing effect after the scroll down is finished.
EventHandler onFinished = new EventHandler() {
@Override
public void handle(Event event) {
timelineBounce.play();
}
};
timelineDown = new Timeline();
timelineUp = new Timeline();
timelineoffice= new Timeline();
// Animation for scroll down.
timelineDown.setCycleCount(1);
timelineDown.setAutoReverse(true);
final KeyValue kvDwn1 = new KeyValue(clipRect.heightProperty(), boxBounds.getWidth());
final KeyValue kvDwn2 = new KeyValue(clipRect.translateYProperty(), 0);
final KeyValue kvDwn3 = new KeyValue(layall.translateYProperty(), 0);
final KeyValue kvDwn4 = new KeyValue(layoffice.translateYProperty(), -boxBounds.getHeight()-190);
final KeyFrame kfDwn = new KeyFrame(Duration.millis(1000), onFinished, kvDwn1, kvDwn2, kvDwn3, kvDwn4);
timelineDown.getKeyFrames().add(kfDwn);
// Animation for scroll up.
timelineUp.setCycleCount(1);
timelineUp.setAutoReverse(true);
final KeyValue kvUp1 = new KeyValue(clipRect.heightProperty(), 0);
final KeyValue kvUp2 = new KeyValue(clipRect.translateYProperty(), boxBounds.getHeight());
final KeyValue kvUp3 = new KeyValue(layall.translateYProperty(), -boxBounds.getHeight()-190);
final KeyValue kvUp4 = new KeyValue(layoffice.translateYProperty(), -boxBounds.getHeight()-190);
final KeyFrame kfUp = new KeyFrame(Duration.millis(1000), kvUp1, kvUp2, kvUp3, kvUp4);
timelineUp.getKeyFrames().add(kfUp);
//Animation for the scrollside
timelineoffice.setCycleCount(1);
timelineoffice.setAutoReverse(true);
final KeyValue kvside1 = new KeyValue(clipRect.heightProperty(), boxBounds.getWidth());
final KeyValue kvside2 = new KeyValue(clipRect.translateYProperty(), 0);
final KeyValue kvside3 = new KeyValue(layoffice.translateYProperty(), 0);
final KeyValue kvside4 = new KeyValue(layall.translateYProperty(), -boxBounds.getHeight());
final KeyFrame kfside = new KeyFrame(Duration.millis(1000), kvside1, kvside2, kvside3, kvside4);
timelineoffice.getKeyFrames().add(kfside);
}
private VBox getActionPane(){
Image pic = new Image(ExDetails.class.getResourceAsStream("/pic/anon.png"));
ImageView ivpic= new ImageView(pic);
ivpic.setFitWidth(40);
ivpic.setFitHeight(40);
lname= new Label();
lname.setText(Login.txtname.getText());
lname.setGraphic(ivpic);
lname.setMinWidth(200);
ladmins= new Text();
lname.setId("ladmins");
ResultSet rs= null;
String query = "SELECT username, level from login WHERE username= '" + lname.getText() + "'";
DBConnect.connect();
try {
rs = DBConnect.stmt.executeQuery(query);
if (rs.next())
{
if (rs.getString("username").equals(lname.getText().toString())) {
ladmins.setText(rs.getString("level"));
}
}
}
catch (Exception e) {
e.printStackTrace();
}
freq= new Text("Frequently Used");
Image us = new Image(ExDetails.class.getResourceAsStream("/pic2/User.png"));
ImageView ivus= new ImageView(us);
ivus.setFitWidth(40);
ivus.setFitHeight(40);
lreg= new Label("Add New Users...");
lreg.setGraphic(ivus);
lreg.setMinWidth(200);
//lreg.setMinHeight(40);
lreg.setOnMouseClicked(e -> {
try {
new Users().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image re = new Image(ExDetails.class.getResourceAsStream("/pic2/reg.png"));
ImageView ivre= new ImageView(re);
ivre.setFitWidth(40);
ivre.setFitHeight(40);
lusers= new Label("Register...");
lusers.setGraphic(ivre);
lusers.setMinWidth(200);
lusers.setOnMouseClicked(e ->{
try {
new AddNew().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image att = new Image(ExDetails.class.getResourceAsStream("/pic2/att.png"));
ImageView ivatt= new ImageView(att);
ivatt.setFitWidth(40);
ivatt.setFitHeight(40);
latt=new Label("Continuous Entry...");
latt.setGraphic(ivatt);
latt.setMinWidth(200);
latt.setOnMouseClicked(e -> {
try {
new AllAttendance().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
Image apps = new Image(ExDetails.class.getResourceAsStream("/pic/menu.png"));
ImageView ivapp= new ImageView(apps);
ivapp.setFitWidth(40);
ivapp.setFitHeight(40);
lallapps=new Label("Gate Programs>>");
lallapps.setGraphic(ivapp);
lallapps.setMinWidth(200);
Image oapps = new Image(ExDetails.class.getResourceAsStream("/pic/offp.png"));
ImageView ivoapp= new ImageView(oapps);
ivoapp.setFitWidth(40);
ivoapp.setFitHeight(40);
loallapps=new Label("Office Utilities>>");
loallapps.setGraphic(ivoapp);
loallapps.setMinWidth(200);
Image set = new Image(ExDetails.class.getResourceAsStream("/pic2/Settings.png"));
ImageView ivset= new ImageView(set);
ivset.setFitWidth(40);
ivset.setFitHeight(40);
lsetting= new Label("System Setting...");
lsetting.setGraphic(ivset);
lsetting.setMinWidth(200);
lsetting.setOnMouseClicked(e -> {
try {
new SysSettings().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error", "Failure to launch program");
e1.printStackTrace();
}
});
Image ex = new Image(ExDetails.class.getResourceAsStream("/pic2/logout.png"));
ImageView ivex= new ImageView(ex);
ivex.setFitWidth(40);
ivex.setFitHeight(40);
lpower= new Label("Exit System");
lpower.setGraphic(ivex);
lpower.setMinWidth(200);
lpower.setOnMouseClicked(e -> {
String fileUpdate="UPDATE logs SET name= '" + lname.getText()
+ "', signedout ='" +myclock.getText()+"'"
+ "where signedin='" + Login.d1 + "'";
//connect to database
DBConnect.connect();
try {
DBConnect.stmt.execute(fileUpdate);
} catch (SQLException e1) {
ErrorMessage.display("Auto Logs Error", ""+e1.getMessage());
e1.printStackTrace();
}
window.close();
});
loreg.setOnMouseClicked(e -> {
try {
new AddOfficeStaffs().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error", "Failure to launch program");
e1.printStackTrace();
}
});
Image hel = new Image(ExDetails.class.getResourceAsStream("/pic2/Help.png"));
ImageView ivhe= new ImageView(hel);
ivhe.setFitWidth(40);
ivhe.setFitHeight(40);
lhelp= new Label("Help...");
lhelp.setGraphic(ivhe);
lhelp.setMinWidth(200);
lhelp.setOnMouseClicked(e ->{
try {
new Help().start(new Stage());
} catch (Exception e1) {
e1.printStackTrace();
}
});
systems= new Text("System");
VBox lusad= new VBox(5);
lusad.getChildren().addAll(lname, ladmins);
VBox stop=new VBox(20);
stop.setAlignment(Pos.TOP_LEFT);
stop.getChildren().addAll(lusad, freq, lreg, lusers, latt, lallapps, loallapps);
VBox sbottom=new VBox(20);
sbottom.setAlignment(Pos.BOTTOM_LEFT);
sbottom.getChildren().addAll(systems, lhelp, lsetting, lpower);
VBox side= new VBox(80);
side.setPadding(new Insets(23,20, 27, 20));
side.setAlignment(Pos.TOP_LEFT);
side.getChildren().addAll(stop, sbottom);
side.setStyle("-fx-background-color: linear-gradient(#E4E9A9, #9CD777);");
//side.setStyle("-fx-background-color:#9CD567");
lname.setId("forSide");
lallapps.setId("forSide");
loallapps.setId("forSide");
lreg.setId("forSide");
lpower.setId("forSide");
lusers.setId("forSide");
lhelp.setId("forSide");
latt.setId("forSide");
lsetting.setId("forSide");
VBox layall= new VBox();
lallapps.setOnMouseClicked(e -> {
timelineDown.play();
});
loallapps.setOnMouseClicked(e -> {
timelineoffice.play();
});
label22.setOnMouseClicked(e -> {
timelineUp.play();
});
loexit.setOnMouseClicked(e -> {
timelineUp.play();
});
label51.setOnMouseClicked(e -> {
try {
new DailyReports().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
label52.setOnMouseClicked(e -> {
try {
new MonthlyGenerator().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
label4.setOnMouseClicked(e -> {
try {
new ChooseOne().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
label53.setOnMouseClicked(e -> {
try {
new VisitorsReports().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
loatt.setOnMouseClicked(e -> {
try {
new Office_Entry().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
label54.setOnMouseClicked(e -> {
try {
new SpecialReports().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
loview.setOnMouseClicked(e -> {
try {
new OfficeRecords().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
lspege.setOnMouseClicked(e -> {
try {
new OfficeSpecials().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
ldaygen.setOnMouseClicked(e -> {
try {
new DailyReportso().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
lmongen.setOnMouseClicked(e -> {
try {
new MonthlyGeneratoro().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error","Application launch failure");
e1.printStackTrace();
}
});
label61.setOnMouseClicked(e -> {
try {
new About().start(new Stage());
} catch (Exception e1) {
ErrorMessage.display("Launch Error", "Program launch error");
e1.printStackTrace();
}
});
if(ladmins.getText().equals("User"))
{
lreg.setVisible(false);
label1.setVisible(false);
lusers.setVisible(false);
label3.setVisible(false);
loatt.setVisible(false);
loreg.setVisible(false);
label3.setVisible(false);
label5.setVisible(false);
losearch.setVisible(false);
lgsearch.setVisible(false);
latt.setVisible(false);
}
if(ladmins.getText().equals("Gate Admin"))
{
lreg.setVisible(false);
label1.setVisible(false);
loatt.setVisible(false);
loreg.setVisible(false);
losearch.setVisible(false);
}
if(ladmins.getText().equals("Office Admin"))
{
lreg.setVisible(false);
label1.setVisible(false);
label3.setVisible(false);
label5.setVisible(false);
lgsearch.setVisible(false);
}
layall.getChildren().add(side);
layall.setPadding(new Insets(0,0, 10, 0));
layall.setMinWidth(250);
return layall;
}
public static void main(String[] args) {
launch(args);
}
public void rotateHer(Label labelHer, ImageView iv)
{
RotateTransition rotation = new RotateTransition(Duration.seconds(2), iv);
rotation.setCycleCount(Animation.INDEFINITE);
rotation.setByAngle(360);
iv.setTranslateZ(iv.getBoundsInLocal().getWidth() / 2.0);
iv.setRotationAxis(Rotate.Y_AXIS);
labelHer.setOnMouseEntered(e ->
{
rotation.play();
iv.setRotate(180);
});
labelHer.setOnMouseExited(e ->
{
rotation.pause();
iv.setRotate(0);
});
}
private void closeProgram()
{
boolean result = Confirmation.display("Exit Program", " Are you sure you want to exit the \n program? ");
if(result)
{
String fileUpdate="UPDATE logs SET name= '" + lname.getText()
+ "', signedout ='" +myclock.getText()+"'"
+ "where signedin='" + Login.d1 + "'";
//connect to database
DBConnect.connect();
try {
DBConnect.stmt.execute(fileUpdate);
} catch (SQLException e1) {
ErrorMessage.display("Auto Logs Error", ""+e1.getMessage());
e1.printStackTrace();
}
System.exit(0);
//window.close();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene;
import java.io.File;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean;
import org.apache.jackrabbit.oak.api.jmx.CheckpointMBean;
import org.apache.jackrabbit.oak.osgi.OsgiWhiteboard;
import org.apache.jackrabbit.oak.plugins.blob.datastore.CachingFileDataStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils;
import org.apache.jackrabbit.oak.plugins.document.spi.JournalPropertyService;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexInfoService;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexPathService;
import org.apache.jackrabbit.oak.plugins.index.fulltext.PreExtractedTextProvider;
import org.apache.jackrabbit.oak.plugins.index.importer.IndexImporterProvider;
import org.apache.jackrabbit.oak.plugins.index.lucene.directory.BufferedOakDirectory;
import org.apache.jackrabbit.oak.plugins.index.lucene.property.PropertyIndexCleaner;
import org.apache.jackrabbit.oak.plugins.index.lucene.reader.DefaultIndexReaderFactory;
import org.apache.jackrabbit.oak.plugins.index.search.ExtractedTextCache;
import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition;
import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
import org.apache.jackrabbit.oak.spi.commit.BackgroundObserver;
import org.apache.jackrabbit.oak.spi.commit.Observer;
import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
import org.apache.jackrabbit.oak.spi.mount.Mounts;
import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.util.InfoStream;
import org.apache.sling.testing.mock.osgi.MockOsgi;
import org.apache.sling.testing.mock.osgi.junit.OsgiContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.osgi.framework.ServiceReference;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.TYPE_LUCENE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class LuceneIndexProviderServiceTest {
/*
The test case uses raw config name and not access it via
constants in LuceneIndexProviderService to ensure that change
in names are detected
*/
@Rule
public final TemporaryFolder folder = new TemporaryFolder(new File("target"));
@Rule
public final OsgiContext context = new OsgiContext();
private LuceneIndexProviderService service = new LuceneIndexProviderService();
private Whiteboard wb;
private MountInfoProvider mip;
@Before
public void setUp(){
mip = Mounts.newBuilder().build();
context.registerService(MountInfoProvider.class, mip);
context.registerService(StatisticsProvider.class, StatisticsProvider.NOOP);
context.registerService(IndexAugmentorFactory.class, new IndexAugmentorFactory());
context.registerService(NodeStore.class, new MemoryNodeStore());
context.registerService(IndexPathService.class, mock(IndexPathService.class));
context.registerService(AsyncIndexInfoService.class, mock(AsyncIndexInfoService.class));
context.registerService(CheckpointMBean.class, mock(CheckpointMBean.class));
wb = new OsgiWhiteboard(context.bundleContext());
MockOsgi.injectServices(service, context.bundleContext());
}
@After
public void after(){
IndexDefinition.setDisableStoredIndexDefinition(false);
}
@Test
public void defaultSetup() throws Exception{
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
assertNotNull(context.getService(QueryIndexProvider.class));
assertNotNull(context.getService(Observer.class));
assertNotNull(context.getService(IndexEditorProvider.class));
LuceneIndexEditorProvider editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertNotNull(editorProvider.getIndexCopier());
assertNotNull(editorProvider.getIndexingQueue());
IndexCopier indexCopier = service.getIndexCopier();
assertNotNull("IndexCopier should be initialized as CopyOnRead is enabled by default", indexCopier);
assertTrue(indexCopier.isPrefetchEnabled());
assertFalse(IndexDefinition.isDisableStoredIndexDefinition());
assertNotNull("CopyOnRead should be enabled by default", context.getService(CopyOnReadStatsMBean.class));
assertNotNull(context.getService(CacheStatsMBean.class));
assertTrue(context.getService(Observer.class) instanceof BackgroundObserver);
assertEquals(InfoStream.NO_OUTPUT, InfoStream.getDefault());
assertEquals(1024, BooleanQuery.getMaxClauseCount());
assertNotNull(FieldUtils.readDeclaredField(service, "documentQueue", true));
assertNotNull(context.getService(JournalPropertyService.class));
assertNotNull(context.getService(IndexImporterProvider.class));
assertNotNull(WhiteboardUtils.getServices(wb, Runnable.class, (Predicate<Runnable>)r -> r instanceof PropertyIndexCleaner));
MockOsgi.deactivate(service, context.bundleContext());
IndexTracker tracker = (IndexTracker) FieldUtils.readDeclaredField(service, "tracker", true);
assertNotNull(tracker.getAsyncIndexInfoService());
}
@Test
public void typeProperty() {
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
ServiceReference sr = context.bundleContext().getServiceReference(IndexEditorProvider.class.getName());
assertEquals(TYPE_LUCENE, sr.getProperty("type"));
}
@Test
public void disableOpenIndexAsync() {
Map<String,Object> config = getDefaultConfig();
config.put("enableOpenIndexAsync", false);
MockOsgi.activate(service, context.bundleContext(), config);
assertTrue(context.getService(Observer.class) instanceof LuceneIndexProvider);
MockOsgi.deactivate(service, context.bundleContext());
}
@Test
public void enableCopyOnWrite() {
Map<String,Object> config = getDefaultConfig();
config.put("enableCopyOnWriteSupport", true);
MockOsgi.activate(service, context.bundleContext(), config);
LuceneIndexEditorProvider editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertNotNull(editorProvider);
assertNotNull(editorProvider.getIndexCopier());
MockOsgi.deactivate(service, context.bundleContext());
}
// OAK-7357
@Test
public void disableCoRCoW() throws Exception {
// inject ds as OAK-7357 revealed ABD bean had a bug - which comes into play only with blob stores
CachingFileDataStore ds = DataStoreUtils
.createCachingFDS(folder.newFolder().getAbsolutePath(),
folder.newFolder().getAbsolutePath());
context.registerService(GarbageCollectableBlobStore.class, new DataStoreBlobStore(ds));
// re-init service and inject references
service = new LuceneIndexProviderService();
MockOsgi.injectServices(service, context.bundleContext());
Map<String,Object> config = getDefaultConfig();
config.put("enableCopyOnReadSupport", false);
config.put("enableCopyOnWriteSupport", false);
// activation should work
MockOsgi.activate(service, context.bundleContext(), config);
// get lucene index provider
LuceneIndexProvider lip = null;
for (QueryIndexProvider qip : context.getServices(QueryIndexProvider.class, null)) {
if (qip instanceof LuceneIndexProvider) {
lip = (LuceneIndexProvider)qip;
break;
}
}
assertNotNull(lip);
IndexTracker tracker = lip.getTracker();
// access reader factory with reflection and implicitly assert that it's DefaultIndexReaderFactory
Field readerFactorFld = IndexTracker.class.getDeclaredField("readerFactory");
readerFactorFld.setAccessible(true);
DefaultIndexReaderFactory readerFactory = (DefaultIndexReaderFactory)readerFactorFld.get(tracker);
Field mipFld = DefaultIndexReaderFactory.class.getDeclaredField("mountInfoProvider");
mipFld.setAccessible(true);
// OAK-7408: LIPS was using default tracker ctor and hence reader factor used default mounts
assertEquals("Reader factory not using configured MountInfoProvider", mip, mipFld.get(readerFactory));
// de-activation should work
MockOsgi.deactivate(service, context.bundleContext());
}
@Test
public void enablePrefetchIndexFiles() {
Map<String,Object> config = getDefaultConfig();
config.put("prefetchIndexFiles", true);
MockOsgi.activate(service, context.bundleContext(), config);
IndexCopier indexCopier = service.getIndexCopier();
assertTrue(indexCopier.isPrefetchEnabled());
MockOsgi.deactivate(service, context.bundleContext());
}
@Test
public void debugLogging() {
Map<String,Object> config = getDefaultConfig();
config.put("debug", true);
MockOsgi.activate(service, context.bundleContext(), config);
assertEquals(LoggingInfoStream.INSTANCE, InfoStream.getDefault());
MockOsgi.deactivate(service, context.bundleContext());
}
@Test
public void enableExtractedTextCaching() {
Map<String,Object> config = getDefaultConfig();
config.put("extractedTextCacheSizeInMB", 11);
MockOsgi.activate(service, context.bundleContext(), config);
ExtractedTextCache textCache = service.getExtractedTextCache();
assertNotNull(textCache.getCacheStats());
assertNotNull(context.getService(CacheStatsMBean.class));
assertEquals(11 * FileUtils.ONE_MB, textCache.getCacheStats().getMaxTotalWeight());
MockOsgi.deactivate(service, context.bundleContext());
assertNull(context.getService(CacheStatsMBean.class));
}
@Test
public void preExtractedTextProvider() {
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
LuceneIndexEditorProvider editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertNull(editorProvider.getExtractedTextCache().getExtractedTextProvider());
assertFalse(editorProvider.getExtractedTextCache().isAlwaysUsePreExtractedCache());
//Mock OSGi does not support components
//context.registerService(PreExtractedTextProvider.class, new DummyProvider());
service.bindExtractedTextProvider(mock(PreExtractedTextProvider.class));
assertNotNull(editorProvider.getExtractedTextCache().getExtractedTextProvider());
}
@Test
public void preExtractedProviderBindBeforeActivate() {
service.bindExtractedTextProvider(mock(PreExtractedTextProvider.class));
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
LuceneIndexEditorProvider editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertNotNull(editorProvider.getExtractedTextCache().getExtractedTextProvider());
}
@Test
public void alwaysUsePreExtractedCache() {
Map<String,Object> config = getDefaultConfig();
config.put("alwaysUsePreExtractedCache", "true");
MockOsgi.activate(service, context.bundleContext(), config);
LuceneIndexEditorProvider editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertTrue(editorProvider.getExtractedTextCache().isAlwaysUsePreExtractedCache());
}
@Test
public void booleanQuerySize() {
Map<String,Object> config = getDefaultConfig();
config.put("booleanClauseLimit", 4000);
MockOsgi.activate(service, context.bundleContext(), config);
assertEquals(4000, BooleanQuery.getMaxClauseCount());
}
@Test
public void indexDefnStorafe() {
Map<String,Object> config = getDefaultConfig();
config.put("disableStoredIndexDefinition", true);
MockOsgi.activate(service, context.bundleContext(), config);
assertTrue(IndexDefinition.isDisableStoredIndexDefinition());
}
@Test
public void blobStoreRegistered() throws Exception{
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
LuceneIndexEditorProvider editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertNull(editorProvider.getBlobStore());
/* Register a blob store */
CachingFileDataStore ds = DataStoreUtils
.createCachingFDS(folder.newFolder().getAbsolutePath(),
folder.newFolder().getAbsolutePath());
context.registerService(GarbageCollectableBlobStore.class, new DataStoreBlobStore(ds));
reactivate();
editorProvider =
(LuceneIndexEditorProvider) context.getService(IndexEditorProvider.class);
assertNotNull(editorProvider.getBlobStore());
}
@Test
public void executorPoolBehaviour() throws Exception{
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
ExecutorService executor = service.getExecutorService();
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
Callable<Object> cb1 = () -> {
latch1.await();
return null;
};
Callable<Object> cb2 = () -> {
latch2.countDown();
return null;
};
executor.submit(cb1);
executor.submit(cb2);
//Even if one task gets stuck the other task must get completed
assertTrue("Second task not executed", latch2.await(1, TimeUnit.MINUTES));
latch1.countDown();
MockOsgi.deactivate(service, context.bundleContext());
}
@Test
public void singleBlobPerIndexFileConfig() {
Map<String, Object> config = getDefaultConfig();
config.put("enableSingleBlobIndexFiles", "true");
MockOsgi.activate(service, context.bundleContext(), config);
assertTrue("Enabling property must reflect in BufferedOakDirectory state",
BufferedOakDirectory.isEnableWritingSingleBlobIndexFile());
MockOsgi.deactivate(service, context.bundleContext());
config = getDefaultConfig();
config.put("enableSingleBlobIndexFiles", "false");
MockOsgi.activate(service, context.bundleContext(), config);
assertFalse("Enabling property must reflect in BufferedOakDirectory state",
BufferedOakDirectory.isEnableWritingSingleBlobIndexFile());
MockOsgi.deactivate(service, context.bundleContext());
}
@Test
public void cleanerRegistration() throws Exception{
Map<String,Object> config = getDefaultConfig();
config.put("propIndexCleanerIntervalInSecs", 142);
MockOsgi.activate(service, context.bundleContext(), config);
ServiceReference[] sr = context.bundleContext().getAllServiceReferences(Runnable.class.getName(),
"(scheduler.name="+ PropertyIndexCleaner.class.getName()+")");
assertEquals(sr.length, 1);
assertEquals(142L, sr[0].getProperty("scheduler.period"));
}
@Test
public void cleanerRegistrationDisabled() throws Exception{
Map<String,Object> config = getDefaultConfig();
config.put("propIndexCleanerIntervalInSecs", 0);
MockOsgi.activate(service, context.bundleContext(), config);
ServiceReference[] sr = context.bundleContext().getAllServiceReferences(Runnable.class.getName(),
"(scheduler.name="+ PropertyIndexCleaner.class.getName()+")");
assertNull(sr);
}
private void reactivate() {
MockOsgi.deactivate(service, context.bundleContext());
service = new LuceneIndexProviderService();
MockOsgi.injectServices(service, context.bundleContext());
MockOsgi.activate(service, context.bundleContext(), getDefaultConfig());
}
private Map<String,Object> getDefaultConfig(){
Map<String,Object> config = new HashMap<>();
config.put("localIndexDir", folder.getRoot().getAbsolutePath());
return config;
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.emmet;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.EditorFactoryAdapter;
import com.intellij.openapi.editor.event.EditorFactoryEvent;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.EditorMarkupModel;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.HintHint;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.LightweightHint;
import com.intellij.ui.components.JBPanel;
import com.intellij.util.Alarm;
import com.intellij.util.DocumentUtil;
import com.intellij.util.Producer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
public class EmmetPreviewHint extends LightweightHint implements Disposable {
private static final Key<EmmetPreviewHint> KEY = new Key<EmmetPreviewHint>("emmet.preview");
@NotNull private final Editor myParentEditor;
@NotNull private final Editor myEditor;
@NotNull private final Alarm myAlarm = new Alarm(this);
private boolean isDisposed = false;
private EmmetPreviewHint(@NotNull JBPanel panel, @NotNull Editor editor, @NotNull Editor parentEditor) {
super(panel);
myParentEditor = parentEditor;
myEditor = editor;
final Editor topLevelEditor = InjectedLanguageUtil.getTopLevelEditor(myParentEditor);
EditorFactory.getInstance().addEditorFactoryListener(new EditorFactoryAdapter() {
@Override
public void editorReleased(@NotNull EditorFactoryEvent event) {
if (event.getEditor() == myParentEditor || event.getEditor() == myEditor || event.getEditor() == topLevelEditor) {
hide(true);
}
}
}, this);
myEditor.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent event) {
if (!isDisposed && event.isWholeTextReplaced()) {
Pair<Point, Short> position = guessPosition();
HintManagerImpl.adjustEditorHintPosition(EmmetPreviewHint.this, myParentEditor, position.first, position.second);
myEditor.getScrollingModel().scrollVertically(0);
}
}
}, this);
}
public void showHint() {
myParentEditor.putUserData(KEY, this);
Pair<Point, Short> position = guessPosition();
JRootPane pane = myParentEditor.getComponent().getRootPane();
JComponent layeredPane = pane != null ? pane.getLayeredPane() : myParentEditor.getComponent();
HintHint hintHint = new HintHint(layeredPane, position.first)
.setAwtTooltip(true)
.setContentActive(true)
.setExplicitClose(true)
.setShowImmediately(true)
.setPreferredPosition(position.second == HintManager.ABOVE ? Balloon.Position.above : Balloon.Position.below)
.setTextBg(myParentEditor.getColorsScheme().getDefaultBackground())
.setBorderInsets(new Insets(1, 1, 1, 1));
int hintFlags = HintManager.HIDE_BY_OTHER_HINT | HintManager.HIDE_BY_ESCAPE | HintManager.UPDATE_BY_SCROLLING;
HintManagerImpl.getInstanceImpl().showEditorHint(this, myParentEditor, position.first, hintFlags, 0, false, hintHint);
}
public void updateText(@NotNull final Producer<String> contentProducer) {
myAlarm.cancelAllRequests();
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
if (!isDisposed) {
final String newText = contentProducer.produce();
if (StringUtil.isEmpty(newText)) {
hide();
}
else if (!myEditor.getDocument().getText().equals(newText)) {
DocumentUtil.writeInRunUndoTransparentAction(new Runnable() {
@Override
public void run() {
myEditor.getDocument().setText(newText);
}
});
}
}
}
}, 100);
}
@TestOnly
@NotNull
public String getContent() {
return myEditor.getDocument().getText();
}
@Nullable
public static EmmetPreviewHint getExistingHint(@NotNull Editor parentEditor) {
EmmetPreviewHint emmetPreviewHint = KEY.get(parentEditor);
if (emmetPreviewHint != null) {
if (!emmetPreviewHint.isDisposed) {
return emmetPreviewHint;
}
emmetPreviewHint.hide();
}
return null;
}
@NotNull
public static EmmetPreviewHint createHint(@NotNull final EditorEx parentEditor,
@NotNull String templateText,
@NotNull FileType fileType) {
EditorFactory editorFactory = EditorFactory.getInstance();
Document document = editorFactory.createDocument(templateText);
final EditorEx previewEditor = (EditorEx)editorFactory.createEditor(document, parentEditor.getProject(), fileType, true);
MarkupModelEx model = previewEditor.getMarkupModel();
if (model instanceof EditorMarkupModel) {
((EditorMarkupModel)model).setErrorStripeVisible(true);
}
final EditorSettings settings = previewEditor.getSettings();
settings.setLineNumbersShown(false);
settings.setAdditionalLinesCount(1);
settings.setAdditionalColumnsCount(1);
settings.setRightMarginShown(false);
settings.setFoldingOutlineShown(false);
settings.setLineMarkerAreaShown(false);
settings.setIndentGuidesShown(false);
settings.setVirtualSpace(false);
settings.setWheelFontChangeEnabled(false);
settings.setAdditionalPageAtBottom(false);
settings.setCaretRowShown(false);
previewEditor.setCaretEnabled(false);
previewEditor.setBorder(IdeBorderFactory.createEmptyBorder());
JBPanel panel = new JBPanel(new BorderLayout()) {
@NotNull
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
Dimension parentEditorSize = parentEditor.getScrollPane().getSize();
int maxWidth = (int)parentEditorSize.getWidth() / 3;
int maxHeight = (int)parentEditorSize.getHeight() / 2;
final int width = settings.isUseSoftWraps() ? maxWidth : Math.min((int)size.getWidth(), maxWidth);
final int height = Math.min((int)size.getHeight(), maxHeight);
return new Dimension(width, height);
}
@NotNull
@Override
public Insets getInsets() {
return new Insets(1, 2, 0, 0);
}
};
panel.setBackground(previewEditor.getBackgroundColor());
panel.add(previewEditor.getComponent(), BorderLayout.CENTER);
return new EmmetPreviewHint(panel, previewEditor, parentEditor);
}
@Override
public boolean vetoesHiding() {
return true;
}
@Override
public void hide(boolean ok) {
super.hide(ok);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
Disposer.dispose(EmmetPreviewHint.this);
}
});
}
@Override
public void dispose() {
isDisposed = true;
myAlarm.cancelAllRequests();
EmmetPreviewHint existingBalloon = myParentEditor.getUserData(KEY);
if (existingBalloon == this) {
myParentEditor.putUserData(KEY, null);
}
if (!myEditor.isDisposed()) {
EditorFactory.getInstance().releaseEditor(myEditor);
}
}
@NotNull
private Pair<Point, Short> guessPosition() {
JRootPane rootPane = myParentEditor.getContentComponent().getRootPane();
JComponent layeredPane = rootPane != null ? rootPane.getLayeredPane() : myParentEditor.getComponent();
LogicalPosition logicalPosition = myParentEditor.getCaretModel().getLogicalPosition();
LogicalPosition pos = new LogicalPosition(logicalPosition.line, logicalPosition.column);
Point p1 = HintManagerImpl.getHintPosition(this, myParentEditor, pos, HintManager.UNDER);
Point p2 = HintManagerImpl.getHintPosition(this, myParentEditor, pos, HintManager.ABOVE);
boolean p1Ok = p1.y + getComponent().getPreferredSize().height < layeredPane.getHeight();
boolean p2Ok = p2.y >= 0;
if (p1Ok) return new Pair<Point, Short>(p1, HintManager.UNDER);
if (p2Ok) return new Pair<Point, Short>(p2, HintManager.ABOVE);
int underSpace = layeredPane.getHeight() - p1.y;
int aboveSpace = p2.y;
return aboveSpace > underSpace
? new Pair<Point, Short>(new Point(p2.x, 0), HintManager.UNDER)
: new Pair<Point, Short>(p1, HintManager.ABOVE);
}
}
|
|
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam;
import com.google.inject.ImplementedBy;
import com.netflix.priam.defaultimpl.PriamConfiguration;
import java.util.List;
/**
* Interface for Priam's configuration
*/
@ImplementedBy(PriamConfiguration.class)
public interface IConfiguration
{
public void intialize();
/**
* @return Path to the home dir of Cassandra
*/
public String getCassHome();
public String getYamlLocation();
/**
* @return Path to Cassandra startup script
*/
public String getCassStartupScript();
/**
* @return Path to Cassandra stop sript
*/
public String getCassStopScript();
/**
* Eg: 'my_backup' will result in all files stored under this dir/prefix
*
* @return Prefix that will be added to remote backup location
*/
public String getBackupLocation();
/**
* @return Get Backup retention in days
*/
public int getBackupRetentionDays();
/**
* @return Get list of racs to backup. Backup all racs if empty
*/
public List<String> getBackupRacs();
/**
* Bucket name in case of AWS
*
* @return Bucket name used for backups
*/
public String getBackupPrefix();
/**
* Location containing backup files. Typically bucket name followed by path
* to the clusters backup
*/
public String getRestorePrefix();
/**
* @param prefix
* Set the current restore prefix
*/
public void setRestorePrefix(String prefix);
/**
* @return List of keyspaces to restore. If none, all keyspaces are
* restored.
*/
public List<String> getRestoreKeySpaces();
/**
* @return Location of the local data dir
*/
public String getDataFileLocation();
/**
* @return Location of local cache
*/
public String getCacheLocation();
/**
* @return Location of local commit log dir
*/
public String getCommitLogLocation();
/**
* @return Remote commit log location for backups
*/
public String getBackupCommitLogLocation();
/**
* @return Preferred data part size for multi part uploads
*/
public long getBackupChunkSize();
/**
* @return true if commit log backup is enabled
*/
public boolean isCommitLogBackup();
/**
* @return Cassandra's JMX port
*/
public int getJmxPort();
/**
* Cassandra storage/cluster communication port
*/
public int getStoragePort();
public int getSSLStoragePort();
/**
* @return Cassandra's thrift port
*/
public int getThriftPort();
/**
* @return Port for CQL binary transport.
*/
public int getNativeTransportPort();
/**
* @return Snitch to be used in cassandra.yaml
*/
public String getSnitch();
/**
* @return Cluster name
*/
public String getAppName();
/**
* @return RAC (or zone for AWS)
*/
public String getRac();
/**
* @return List of all RAC used for the cluster
*/
public List<String> getRacs();
/**
* @return Local hostmame
*/
public String getHostname();
/**
* @return Get instance name (for AWS)
*/
public String getInstanceName();
/**
* @return Max heap size be used for Cassandra
*/
public String getHeapSize();
/**
* @return New heap size for Cassandra
*/
public String getHeapNewSize();
/**
* @return Backup hour for snapshot backups (0 - 23)
*/
public int getBackupHour();
/**
* Specifies the start and end time used for restoring data (yyyyMMddHHmm
* format) Eg: 201201132030,201201142030
*
* @return Snapshot to be searched and restored
*/
public String getRestoreSnapshot();
/**
* @return Get the Data Center name (or region for AWS)
*/
public String getDC();
/**
* @param region
* Set the current data center
*/
public void setDC(String region);
/**
* @return true if it is a multi regional cluster
*/
public boolean isMultiDC();
/**
* @return Number of backup threads for uploading
*/
public int getMaxBackupUploadThreads();
/**
* @return Number of download threads
*/
public int getMaxBackupDownloadThreads();
/**
* @return true if restore should search for nearest token if current token
* is not found
*/
public boolean isRestoreClosestToken();
/**
* Amazon specific setting to query ASG Membership
*/
public String getASGName();
/**
* Get the security group associated with nodes in this cluster
*/
public String getACLGroupName();
/**
* @return true if incremental backups are enabled
*/
boolean isIncrBackup();
/**
* @return Get host IP
*/
public String getHostIP();
/**
* @return Bytes per second to throttle for backups
*/
public int getUploadThrottle();
/**
* @return true if Priam should local config file for tokens and seeds
*/
boolean isLocalBootstrapEnabled();
/**
* @return In memory compaction limit
*/
public int getInMemoryCompactionLimit();
/**
* @return Compaction throughput
*/
public int getCompactionThroughput();
/**
* @return compaction_throughput_mb_per_sec
*/
public int getMaxHintWindowInMS();
/**
* @return hinted_handoff_throttle_in_kb
*/
public int getHintedHandoffThrottleKb();
/**
* @return max_hints_delivery_threads
*/
public int getMaxHintThreads();
/**
* @return Size of Cassandra max direct memory
*/
public String getMaxDirectMemory();
/**
* @return Bootstrap cluster name (depends on another cass cluster)
*/
public String getBootClusterName();
/**
* @return Get the name of seed provider
*/
public String getSeedProviderName();
/**
* @return Get Memtable throughput settings
*/
public int getMemtableTotalSpaceMB();
/**
* @return stream_throughput_outbound_megabits_per_sec in yaml
*/
public int getStreamingThroughputMB();
/**
* @return multithreaded_compaction in yaml
*/
public boolean getMultithreadedCompaction();
/**
* Get the paritioner for this cassandra cluster/node.
*
* @return the fully-qualified name of the partitioner class
*/
public String getPartitioner();
/**
* Support for c* 1.1 global key cache size
*/
public String getKeyCacheSizeInMB();
/**
* Support for limiting the total number of keys in c* 1.1 global key cache.
*/
public String getKeyCacheKeysToSave();
/**
* Support for c* 1.1 global row cache size
*/
public String getRowCacheSizeInMB();
/**
* Support for limiting the total number of rows in c* 1.1 global row cache.
*/
public String getRowCacheKeysToSave();
/**
* @return C* Process Name
*/
public String getCassProcessName();
/**
* Defaults to 'allow all'.
*/
public String getAuthenticator();
/**
* Defaults to 'allow all'.
*/
public String getAuthorizer();
/**
* This can be used during cluster migration.
* When on Target Cluster, keyspace name is different
* than the original one.
* @return New Keyspace Name on Target Cluster
*/
public String getTargetKSName();
/**
* This can be used during cluster migration.
* When on Target Cluster, Column Family name is different
* than the original one.
* @return New Column Family Name on Target Cluster
*/
public String getTargetCFName();
/**
* @return true/false, if Cassandra needs to be started manually
*/
public boolean doesCassandraStartManually();
/**
* @return possible values: all, dc, none
*/
public String getInternodeCompression();
public boolean isBackingUpCommitLogs();
public String getCommitLogBackupArchiveCmd();
public String getCommitLogBackupRestoreCmd();
public String getCommitLogBackupRestoreFromDirs();
public String getCommitLogBackupRestorePointInTime();
public int maxCommitLogsRestore();
/**
* @return true/false, if Cassandra is running in a VPC environment
*/
public boolean isVpcRing();
public void setRestoreKeySpaces(List<String> keyspaces);
public boolean isClientSslEnabled();
public String getInternodeEncryption();
public boolean isDynamicSnitchEnabled();
public boolean isThriftEnabled();
public boolean isNativeTransportEnabled();
public String getS3EndPoint();
public int getConcurrentReadsCnt();
public int getConcurrentWritesCnt();
public int getConcurrentCompactorsCnt();
public String getRpcServerType();
public int getRpcMaxThreads();
public int getIndexInterval();
public String getExtraConfigParams();
public String getCassYamlVal(String priamKey);
public boolean getAutoBoostrap();
}
|
|
/*******************************************************************************
* Copyright (c) 2015 Jeff Martin.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public
* License v3.0 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* Contributors:
* Jeff Martin - initial API and implementation
******************************************************************************/
package cuchaz.enigma.gui;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import javax.swing.JTree;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import cuchaz.enigma.mapping.ClassEntry;
public class ClassSelector extends JTree {
private static final long serialVersionUID = -7632046902384775977L;
public interface ClassSelectionListener {
void onSelectClass(ClassEntry classEntry);
}
public static Comparator<ClassEntry> ObfuscatedClassEntryComparator;
public static Comparator<ClassEntry> DeobfuscatedClassEntryComparator;
static {
ObfuscatedClassEntryComparator = new Comparator<ClassEntry>() {
@Override
public int compare(ClassEntry a, ClassEntry b) {
String aname = a.getName();
String bname = a.getName();
if (aname.length() != bname.length()) {
return aname.length() - bname.length();
}
return aname.compareTo(bname);
}
};
DeobfuscatedClassEntryComparator = new Comparator<ClassEntry>() {
@Override
public int compare(ClassEntry a, ClassEntry b) {
if (a instanceof ScoredClassEntry && b instanceof ScoredClassEntry) {
return Float.compare(
((ScoredClassEntry)b).getScore(),
((ScoredClassEntry)a).getScore()
);
}
return a.getName().compareTo(b.getName());
}
};
}
private ClassSelectionListener m_listener;
private Comparator<ClassEntry> m_comparator;
public ClassSelector(Comparator<ClassEntry> comparator) {
m_comparator = comparator;
// configure the tree control
setRootVisible(false);
setShowsRootHandles(false);
setModel(null);
// hook events
addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent event) {
if (m_listener != null && event.getClickCount() == 2) {
// get the selected node
TreePath path = getSelectionPath();
if (path != null && path.getLastPathComponent() instanceof ClassSelectorClassNode) {
ClassSelectorClassNode node = (ClassSelectorClassNode)path.getLastPathComponent();
m_listener.onSelectClass(node.getClassEntry());
}
}
}
});
// init defaults
m_listener = null;
}
public void setListener(ClassSelectionListener val) {
m_listener = val;
}
public void setClasses(Collection<ClassEntry> classEntries) {
if (classEntries == null) {
setModel(null);
return;
}
// build the package names
Map<String,ClassSelectorPackageNode> packages = Maps.newHashMap();
for (ClassEntry classEntry : classEntries) {
packages.put(classEntry.getPackageName(), null);
}
// sort the packages
List<String> sortedPackageNames = Lists.newArrayList(packages.keySet());
Collections.sort(sortedPackageNames, new Comparator<String>() {
@Override
public int compare(String a, String b) {
// I can never keep this rule straight when writing these damn things...
// a < b => -1, a == b => 0, a > b => +1
String[] aparts = a.split("/");
String[] bparts = b.split("/");
for (int i = 0; true; i++) {
if (i >= aparts.length) {
return -1;
} else if (i >= bparts.length) {
return 1;
}
int result = aparts[i].compareTo(bparts[i]);
if (result != 0) {
return result;
}
}
}
});
// create the root node and the package nodes
DefaultMutableTreeNode root = new DefaultMutableTreeNode();
for (String packageName : sortedPackageNames) {
ClassSelectorPackageNode node = new ClassSelectorPackageNode(packageName);
packages.put(packageName, node);
root.add(node);
}
// put the classes into packages
Multimap<String,ClassEntry> packagedClassEntries = ArrayListMultimap.create();
for (ClassEntry classEntry : classEntries) {
packagedClassEntries.put(classEntry.getPackageName(), classEntry);
}
// build the class nodes
for (String packageName : packagedClassEntries.keySet()) {
// sort the class entries
List<ClassEntry> classEntriesInPackage = Lists.newArrayList(packagedClassEntries.get(packageName));
Collections.sort(classEntriesInPackage, m_comparator);
// create the nodes in order
for (ClassEntry classEntry : classEntriesInPackage) {
ClassSelectorPackageNode node = packages.get(packageName);
node.add(new ClassSelectorClassNode(classEntry));
}
}
// finally, update the tree control
setModel(new DefaultTreeModel(root));
}
public ClassEntry getSelectedClass() {
if (!isSelectionEmpty()) {
Object selectedNode = getSelectionPath().getLastPathComponent();
if (selectedNode instanceof ClassSelectorClassNode) {
ClassSelectorClassNode classNode = (ClassSelectorClassNode)selectedNode;
return classNode.getClassEntry();
}
}
return null;
}
public String getSelectedPackage() {
if (!isSelectionEmpty()) {
Object selectedNode = getSelectionPath().getLastPathComponent();
if (selectedNode instanceof ClassSelectorPackageNode) {
ClassSelectorPackageNode packageNode = (ClassSelectorPackageNode)selectedNode;
return packageNode.getPackageName();
} else if (selectedNode instanceof ClassSelectorClassNode) {
ClassSelectorClassNode classNode = (ClassSelectorClassNode)selectedNode;
return classNode.getClassEntry().getPackageName();
}
}
return null;
}
public Iterable<ClassSelectorPackageNode> packageNodes() {
List<ClassSelectorPackageNode> nodes = Lists.newArrayList();
DefaultMutableTreeNode root = (DefaultMutableTreeNode)getModel().getRoot();
Enumeration<?> children = root.children();
while (children.hasMoreElements()) {
ClassSelectorPackageNode packageNode = (ClassSelectorPackageNode)children.nextElement();
nodes.add(packageNode);
}
return nodes;
}
public Iterable<ClassSelectorClassNode> classNodes(ClassSelectorPackageNode packageNode) {
List<ClassSelectorClassNode> nodes = Lists.newArrayList();
Enumeration<?> children = packageNode.children();
while (children.hasMoreElements()) {
ClassSelectorClassNode classNode = (ClassSelectorClassNode)children.nextElement();
nodes.add(classNode);
}
return nodes;
}
public void expandPackage(String packageName) {
if (packageName == null) {
return;
}
for (ClassSelectorPackageNode packageNode : packageNodes()) {
if (packageNode.getPackageName().equals(packageName)) {
expandPath(new TreePath(new Object[] {getModel().getRoot(), packageNode}));
return;
}
}
}
public void expandAll() {
for (ClassSelectorPackageNode packageNode : packageNodes()) {
expandPath(new TreePath(new Object[] {getModel().getRoot(), packageNode}));
}
}
public ClassEntry getFirstClass() {
for (ClassSelectorPackageNode packageNode : packageNodes()) {
for (ClassSelectorClassNode classNode : classNodes(packageNode)) {
return classNode.getClassEntry();
}
}
return null;
}
public ClassSelectorPackageNode getPackageNode(ClassEntry entry) {
for (ClassSelectorPackageNode packageNode : packageNodes()) {
if (packageNode.getPackageName().equals(entry.getPackageName())) {
return packageNode;
}
}
return null;
}
public ClassEntry getNextClass(ClassEntry entry) {
boolean foundIt = false;
for (ClassSelectorPackageNode packageNode : packageNodes()) {
if (!foundIt) {
// skip to the package with our target in it
if (packageNode.getPackageName().equals(entry.getPackageName())) {
for (ClassSelectorClassNode classNode : classNodes(packageNode)) {
if (!foundIt) {
if (classNode.getClassEntry().equals(entry)) {
foundIt = true;
}
} else {
// return the next class
return classNode.getClassEntry();
}
}
}
} else {
// return the next class
for (ClassSelectorClassNode classNode : classNodes(packageNode)) {
return classNode.getClassEntry();
}
}
}
return null;
}
public void setSelectionClass(ClassEntry classEntry) {
expandPackage(classEntry.getPackageName());
for (ClassSelectorPackageNode packageNode : packageNodes()) {
for (ClassSelectorClassNode classNode : classNodes(packageNode)) {
if (classNode.getClassEntry().equals(classEntry)) {
setSelectionPath(new TreePath(new Object[] {getModel().getRoot(), packageNode, classNode}));
}
}
}
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.local;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.testFramework.VfsTestUtil;
import com.intellij.testFramework.fixtures.BareTestFixtureTestCase;
import com.intellij.testFramework.rules.TempDirectory;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.*;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import static com.intellij.openapi.util.io.IoTestUtil.*;
import static com.intellij.testFramework.PlatformTestUtil.assertPathsEqual;
import static org.junit.Assert.*;
public class SymlinkHandlingTest extends BareTestFixtureTestCase {
@Rule public TempDirectory tempDir = new TempDirectory();
@Before
public void setUp() {
assumeSymLinkCreationIsSupported();
}
@After
public void tearDown() {
VirtualFile root = refreshAndFind(tempDir.getRoot());
// purge VFS to avoid persisting these specific file names through to the next launch
VfsTestUtil.deleteFile(root);
}
@Test
public void testMissingLink() {
File missingFile = new File(tempDir.getRoot(), "missing_file");
assertTrue(missingFile.getPath(), !missingFile.exists() || missingFile.delete());
File missingLinkFile = createSymLink(missingFile.getPath(), tempDir.getRoot() + "/missing_link", false);
VirtualFile missingLinkVFile = refreshAndFind(missingLinkFile);
assertNotNull(missingLinkVFile);
assertBrokenLink(missingLinkVFile);
assertVisitedPaths(missingLinkVFile.getPath());
}
@Test
public void testSelfLink() {
String target = new File(tempDir.getRoot(), "self_link").getPath();
File selfLinkFile = createSymLink(target, target, false);
VirtualFile selfLinkVFile = refreshAndFind(selfLinkFile);
assertNotNull(selfLinkVFile);
assertBrokenLink(selfLinkVFile);
assertVisitedPaths(selfLinkVFile.getPath());
}
@Test
public void testDotLink() {
File dotLinkFile = createSymLink(".", tempDir.getRoot() + "/dot_link");
VirtualFile dotLinkVFile = refreshAndFind(dotLinkFile);
assertNotNull(dotLinkVFile);
assertTrue(dotLinkVFile.is(VFileProperty.SYMLINK));
assertTrue(dotLinkVFile.isDirectory());
assertPathsEqual(tempDir.getRoot().getPath(), dotLinkVFile.getCanonicalPath());
assertVisitedPaths(dotLinkVFile.getPath());
}
@Test
public void testCircularLink() {
File upDir = tempDir.newDirectory("sub");
File upLinkFile = createSymLink(upDir.getPath(), upDir.getPath() + "/up_link");
VirtualFile upLinkVFile = refreshAndFind(upLinkFile);
assertNotNull(upLinkVFile);
assertTrue(upLinkVFile.is(VFileProperty.SYMLINK));
assertTrue(upLinkVFile.isDirectory());
assertPathsEqual(upDir.getPath(), upLinkVFile.getCanonicalPath());
assertVisitedPaths(upDir.getPath(), upLinkVFile.getPath());
File repeatedLinksFile = new File(upDir.getPath() + StringUtil.repeat(File.separator + upLinkFile.getName(), 4));
assertTrue(repeatedLinksFile.getPath(), repeatedLinksFile.isDirectory());
VirtualFile repeatedLinksVFile = refreshAndFind(repeatedLinksFile);
assertNotNull(repeatedLinksFile.getPath(), repeatedLinksVFile);
assertTrue(repeatedLinksVFile.is(VFileProperty.SYMLINK));
assertTrue(repeatedLinksVFile.isDirectory());
assertPathsEqual(upDir.getPath(), repeatedLinksVFile.getCanonicalPath());
assertEquals(upLinkVFile.getCanonicalFile(), repeatedLinksVFile.getCanonicalFile());
}
@Test
public void testMutualRecursiveLinks() {
File circularDir1 = tempDir.newDirectory("dir1");
File circularDir2 = tempDir.newDirectory("dir2");
File circularLink1 = createSymLink(circularDir2.getPath(), circularDir1 + "/link1");
File circularLink2 = createSymLink(circularDir1.getPath(), circularDir2 + "/link2");
VirtualFile circularLink1VFile = refreshAndFind(circularLink1);
VirtualFile circularLink2VFile = refreshAndFind(circularLink2);
assertNotNull(circularLink1VFile);
assertNotNull(circularLink2VFile);
assertVisitedPaths(circularDir1.getPath(), circularLink1.getPath(), circularLink1.getPath() + "/" + circularLink2.getName(),
circularDir2.getPath(), circularLink2.getPath(), circularLink2.getPath() + "/" + circularLink1.getName());
}
@Test
public void testDuplicateLinks() {
File targetDir = tempDir.newDirectory("target");
File link1 = createSymLink(targetDir.getPath(), tempDir.getRoot() + "/link1");
File link2 = createSymLink(targetDir.getPath(), tempDir.getRoot() + "/link2");
assertVisitedPaths(targetDir.getPath(), link1.getPath(), link2.getPath());
}
@Test
public void testSidewaysRecursiveLink() {
File a = tempDir.newDirectory("a");
File b = createTestDir(a, "b");
File link1 = createSymLink(SystemInfo.isWindows ? a.getPath() : "../../" + a.getName(), b.getPath() + "/link1");
File project = tempDir.newDirectory("project");
File c = createTestDir(project, "c");
File d = createTestDir(c, "d");
File link2 = createSymLink(SystemInfo.isWindows ? a.getPath() : "../../../" + a.getName(), d.getPath() + "/link2");
assertVisitedPaths(project,
c.getPath(), d.getPath(), link2.getPath(), link2.getPath() + "/" + b.getName(),
link2.getPath() + "/" + b.getName() + "/" + link1.getName());
}
@Test
public void testVisitAllNonRecursiveLinks() {
File target = tempDir.newDirectory("target");
File child = createTestDir(target, "child");
File link1 = createSymLink(target.getPath(), tempDir.getRoot() + "/link1");
File link2 = createSymLink(target.getPath(), tempDir.getRoot() + "/link2");
assertVisitedPaths(target.getPath(), child.getPath(),
link1.getPath(), link1.getPath() + "/child",
link2.getPath(), link2.getPath() + "/child");
}
@Test
public void testTargetIsWritable() {
File targetFile = tempDir.newFile("target.txt");
File linkFile = createSymLink(targetFile.getPath(), tempDir.getRoot() + "/link");
VirtualFile linkVFile = refreshAndFind(linkFile);
assertTrue("link=" + linkFile + ", vLink=" + linkVFile, linkVFile != null && !linkVFile.isDirectory() &&
linkVFile.is(VFileProperty.SYMLINK));
setWritableAndCheck(targetFile, true);
refresh(tempDir.getRoot());
assertTrue(linkVFile.getPath(), linkVFile.isWritable());
setWritableAndCheck(targetFile, false);
refresh(tempDir.getRoot());
assertFalse(linkVFile.getPath(), linkVFile.isWritable());
File targetDir = tempDir.newDirectory("target");
File linkDir = createSymLink(targetDir.getPath(), tempDir.getRoot() + "/linkDir");
VirtualFile linkVDir = refreshAndFind(linkDir);
assertTrue("link=" + linkDir + ", vLink=" + linkVDir, linkVDir != null && linkVDir.isDirectory() && linkVDir.is(VFileProperty.SYMLINK));
if (!SystemInfo.isWindows) {
setWritableAndCheck(targetDir, true);
refresh(tempDir.getRoot());
assertTrue(linkVDir.getPath(), linkVDir.isWritable());
setWritableAndCheck(targetDir, false);
refresh(tempDir.getRoot());
assertFalse(linkVDir.getPath(), linkVDir.isWritable());
}
else {
assertEquals(linkVDir.getPath(), targetDir.canWrite(), linkVDir.isWritable());
}
}
private static void setWritableAndCheck(File file, boolean writable) {
assertTrue(file.getPath(), file.setWritable(writable, false));
assertEquals(file.getPath(), writable, file.canWrite());
}
@Test
public void testLinkDeleteIsSafe() throws Exception {
File targetFile = tempDir.newFile("target");
File linkFile = createSymLink(targetFile.getPath(), tempDir.getRoot() + "/link");
VirtualFile linkVFile = refreshAndFind(linkFile);
assertTrue("link=" + linkFile + ", vLink=" + linkVFile,
linkVFile != null && !linkVFile.isDirectory() && linkVFile.is(VFileProperty.SYMLINK));
WriteAction.runAndWait(() -> linkVFile.delete(this));
assertFalse(linkVFile.toString(), linkVFile.isValid());
assertFalse(linkFile.exists());
assertTrue(targetFile.exists());
File targetDir = tempDir.newDirectory("targetDir");
File childFile = new File(targetDir, "child.txt");
assertTrue(childFile.getPath(), childFile.exists() || childFile.createNewFile());
File linkDir = createSymLink(targetDir.getPath(), tempDir.getRoot() + "/linkDir");
VirtualFile linkVDir = refreshAndFind(linkDir);
assertTrue("link=" + linkDir + ", vLink=" + linkVDir,
linkVDir != null && linkVDir.isDirectory() && linkVDir.is(VFileProperty.SYMLINK) && linkVDir.getChildren().length == 1);
WriteAction.runAndWait(() -> linkVDir.delete(this));
assertFalse(linkVDir.toString(), linkVDir.isValid());
assertFalse(linkDir.exists());
assertTrue(targetDir.exists());
assertTrue(childFile.exists());
}
@Test
public void testTransGenderRefresh() throws Exception {
File targetFile = tempDir.newFile("target");
File targetDir = tempDir.newDirectory("targetDir");
// file link
File link = createSymLink(targetFile.getPath(), tempDir.getRoot() + "/link");
VirtualFile vFile1 = refreshAndFind(link);
assertTrue("link=" + link + ", vLink=" + vFile1,
vFile1 != null && !vFile1.isDirectory() && vFile1.is(VFileProperty.SYMLINK));
// file link => dir
assertTrue(link.getPath(), link.delete() && link.mkdir() && link.isDirectory());
VirtualFile vFile2 = refreshAndFind(link);
assertTrue("link=" + link + ", vLink=" + vFile2,
!vFile1.isValid() && vFile2 != null && vFile2.isDirectory() && !vFile2.is(VFileProperty.SYMLINK));
// dir => dir link
assertTrue(link.getPath(), link.delete());
link = createSymLink(targetDir.getPath(), tempDir.getRoot() + "/link");
refresh(tempDir.getRoot());
vFile1 = refreshAndFind(link);
assertTrue("link=" + link + ", vLink=" + vFile1,
!vFile2.isValid() && vFile1 != null && vFile1.isDirectory() && vFile1.is(VFileProperty.SYMLINK));
// dir link => file
assertTrue(link.getPath(), link.delete() && link.createNewFile() && link.isFile());
refresh(tempDir.getRoot());
vFile2 = refreshAndFind(link);
assertTrue("link=" + link + ", vLink=" + vFile1,
!vFile1.isValid() && vFile2 != null && !vFile2.isDirectory() && !vFile2.is(VFileProperty.SYMLINK));
// file => file link
assertTrue(link.getPath(), link.delete());
link = createSymLink(targetFile.getPath(), tempDir.getRoot() + "/link");
refresh(tempDir.getRoot());
vFile1 = refreshAndFind(link);
assertTrue("link=" + link + ", vLink=" + vFile1,
!vFile2.isValid() && vFile1 != null && !vFile1.isDirectory() && vFile1.is(VFileProperty.SYMLINK));
}
@Test
public void testDirLinkSwitch() throws Exception {
Path target1Child = tempDir.newFile("target1/child1.txt", "text".getBytes(StandardCharsets.UTF_8)).toPath();
Path target2Child = tempDir.newFile("target2/child1.txt", "longer text".getBytes(StandardCharsets.UTF_8)).toPath();
tempDir.newFile("target2/child2.txt");
Path target1 = target1Child.getParent(), target2 = target2Child.getParent();
Path link = tempDir.getRoot().toPath().resolve("link");
createSymbolicLink(link, target1);
VirtualFile vLink = refreshAndFind(link.toFile());
assertTrue("link=" + link + ", vLink=" + vLink, vLink != null && vLink.isDirectory() && vLink.is(VFileProperty.SYMLINK));
vLink.setCharset(StandardCharsets.UTF_8);
assertEquals(1, vLink.getChildren().length);
assertPathsEqual(target1.toString(), vLink.getCanonicalPath());
assertEquals(Files.readString(target1Child), VfsUtilCore.loadText(vLink.findChild("child1.txt")));
Files.delete(link);
createSymbolicLink(link, target2);
refresh(tempDir.getRoot());
assertTrue("vLink=" + vLink, vLink.isValid());
assertEquals(2, vLink.getChildren().length);
assertEquals(Files.readString(target2Child), VfsUtilCore.loadText(vLink.findChild("child1.txt")));
assertPathsEqual(target2.toString(), vLink.getCanonicalPath());
}
@Test
public void testFileLinkSwitch() throws Exception {
Path target1 = tempDir.newFile("target1.txt", "text".getBytes(StandardCharsets.UTF_8)).toPath();
Path target2 = tempDir.newFile("target2.txt", "longer text".getBytes(StandardCharsets.UTF_8)).toPath();
Path link = tempDir.getRoot().toPath().resolve("link");
createSymbolicLink(link, target1);
VirtualFile vLink = refreshAndFind(link.toFile());
assertTrue("link=" + link + ", vLink=" + vLink, vLink != null && !vLink.isDirectory() && vLink.is(VFileProperty.SYMLINK));
vLink.setCharset(StandardCharsets.UTF_8);
assertEquals(Files.readString(target1), VfsUtilCore.loadText(vLink));
assertPathsEqual(target1.toString(), vLink.getCanonicalPath());
Files.delete(link);
createSymbolicLink(link, target2);
refresh(tempDir.getRoot());
assertTrue("vLink=" + vLink, vLink.isValid());
assertEquals(Files.readString(target2), VfsUtilCore.loadText(vLink));
assertPathsEqual(target2.toString(), vLink.getCanonicalPath());
}
@Test
public void testTraversePathBehindLink() {
File topDir = tempDir.newDirectory("top");
File subDir1 = createTestDir(topDir, "sub1");
File link = createSymLink(subDir1.getPath(), tempDir.getRoot() + "/link");
VirtualFile vLink = refreshAndFind(link);
assertNotNull(link.getPath(), vLink);
File subDir2 = createTestDir(topDir, "sub2");
File subChild = createTestFile(subDir2, "subChild.txt");
VirtualFile vSubChild = refreshAndFind(subChild);
assertNotNull(subChild.getPath(), vSubChild);
String relPath = "../" + subDir2.getName() + "/" + subChild.getName();
VirtualFile vSubChildRel = vLink.findFileByRelativePath(relPath);
assertEquals(vSubChild, vSubChildRel);
vSubChildRel = LocalFileSystem.getInstance().findFileByPath(vLink.getPath() + "/" + relPath);
assertEquals(vSubChild, vSubChildRel);
}
@Test
public void testCircularSymlinksMustBeDetected() {
File top = tempDir.newDirectory("top");
File sub1 = createTestDir(top, "sub1");
File link = createSymLink(top.getPath(), sub1.getPath() + "/link");
VirtualFile vLink = refreshAndFind(link);
assertNotNull(link.getPath(), vLink);
String path = sub1.getPath() + StringUtil.repeat("/" + link.getName() + "/" + sub1.getName(), 10);
VirtualFile f = LocalFileSystem.getInstance().findFileByPath(path);
assertNotNull(f);
while (!VfsUtilCore.pathEqualsTo(f, sub1.getPath())) {
if (f.getName().equals(link.getName())) {
assertTrue(f.getPath(),f.is(VFileProperty.SYMLINK));
assertTrue(f.getPath(), f.isRecursiveOrCircularSymlink());
}
else {
assertEquals(f.getPath(), sub1.getName(), f.getName());
assertFalse(f.getPath(),f.is(VFileProperty.SYMLINK));
assertFalse(f.isRecursiveOrCircularSymlink());
}
f = f.getParent();
}
}
@Test
public void testCircularSymlinksMustBeDetectedEvenForAsideLinks() {
File top = tempDir.newDirectory("top");
File sub1 = createTestDir(top, "s1");
File ss1 = createTestDir(sub1, "ss1");
File link1 = createSymLink(sub1.getPath(), ss1.getPath() + "/l1");
File sub2 = createTestDir(top, "s2");
File ss2 = createTestDir(sub2, "ss2");
File link2 = createSymLink(sub1.getPath(), ss2.getPath() + "/l2");
VirtualFile vl1 = refreshAndFind(link1);
assertNotNull(link1.getPath(), vl1);
VirtualFile vl2 = refreshAndFind(link2);
assertNotNull(link2.getPath(), vl2);
String path = link2.getPath() +"/"+ss1.getName()+"/"+link1.getName()+ "/" + ss1.getName()+"/"+link1.getName();
VirtualFile f = LocalFileSystem.getInstance().findFileByPath(path);
assertNotNull(f);
assertEquals(link1.getName(), f.getName());
assertTrue(f.getPath(), f.is(VFileProperty.SYMLINK));
assertTrue(f.getPath(), f.isRecursiveOrCircularSymlink());
f = f.getParent();
assertEquals(ss1.getName(), f.getName());
assertFalse(f.getPath(), f.is(VFileProperty.SYMLINK));
assertFalse(f.getPath(), f.isRecursiveOrCircularSymlink());
f = f.getParent();
assertEquals(link1.getName(), f.getName());
assertTrue(f.getPath(), f.is(VFileProperty.SYMLINK));
assertTrue(f.getPath(), f.isRecursiveOrCircularSymlink());
}
//<editor-fold desc="Helpers.">
private @Nullable static VirtualFile refreshAndFind(File ioFile) {
return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(ioFile);
}
private static void refresh(File root) {
VirtualFile tempDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(root);
assertNotNull(root.getPath(), tempDir);
tempDir.getChildren();
tempDir.refresh(false, true);
VfsUtilCore.visitChildrenRecursively(tempDir, new VirtualFileVisitor<Void>() { });
}
private static void assertBrokenLink(@NotNull VirtualFile link) {
assertTrue(link.is(VFileProperty.SYMLINK));
assertEquals(0, link.getLength());
assertNull(link.getCanonicalPath(), link.getCanonicalPath());
}
private void assertVisitedPaths(String... expected) {
assertVisitedPaths(tempDir.getRoot(), expected);
}
private static void assertVisitedPaths(File from, String... expected) {
VirtualFile vDir = refreshAndFind(from);
assertNotNull(vDir);
Set<String> expectedSet = StreamEx.of(expected).map(FileUtil::toSystemIndependentName).append(vDir.getPath()).toSet();
Set<String> actualSet = new HashSet<>();
VfsUtilCore.visitChildrenRecursively(vDir, new VirtualFileVisitor<Void>() {
@Override
public boolean visitFile(@NotNull VirtualFile file) {
if (!actualSet.add(file.getPath())) {
throw new AssertionError(file + " already visited");
}
return true;
}
});
List<String> exp = new ArrayList<>(expectedSet);
Collections.sort(exp);
List<String> act = new ArrayList<>(actualSet);
Collections.sort(act);
assertEquals(StringUtil.join(exp, "\n"), StringUtil.join(act, "\n"));
}
//</editor-fold>
}
|
|
package hex.tree;
import hex.Model;
import hex.ModelCategory;
import hex.genmodel.algos.tree.SharedTreeGraph;
import hex.genmodel.algos.tree.SharedTreeNode;
import hex.genmodel.algos.tree.SharedTreeSubgraph;
import hex.genmodel.algos.tree.SharedTreeGraphConverter;
import hex.schemas.TreeV3;
import water.Keyed;
import water.MemoryManager;
import water.api.Handler;
import java.util.*;
/**
* Handling requests for various model trees
*/
public class TreeHandler extends Handler {
private static final int NO_CHILD = -1;
public TreeV3 getTree(final int version, final TreeV3 args) {
if (args.tree_number < 0) {
throw new IllegalArgumentException("Invalid tree number: " + args.tree_number + ". Tree number must be >= 0.");
}
final Keyed possibleModel = args.model.key().get();
if (possibleModel == null) throw new IllegalArgumentException("Given model does not exist: " + args.model.key().toString());
else if (!(possibleModel instanceof SharedTreeModel) && !(possibleModel instanceof SharedTreeGraphConverter)) {
throw new IllegalArgumentException("Given model is not tree-based.");
}
final SharedTreeSubgraph sharedTreeSubgraph;
if (possibleModel instanceof SharedTreeGraphConverter) {
final SharedTreeGraphConverter treeBackedModel = (SharedTreeGraphConverter) possibleModel;
final SharedTreeGraph sharedTreeGraph = treeBackedModel.convert(args.tree_number, args.tree_class);
assert sharedTreeGraph.subgraphArray.size() == 1;
sharedTreeSubgraph = sharedTreeGraph.subgraphArray.get(0);
if (! ((Model)possibleModel)._output.isClassifier()) {
args.tree_class = null; // Class may not be provided by the user, should be always filled correctly on output. NULL for regression.
}
} else {
final SharedTreeModel model = (SharedTreeModel) possibleModel;
final SharedTreeModel.SharedTreeOutput sharedTreeOutput = (SharedTreeModel.SharedTreeOutput) model._output;
final int treeClass = getResponseLevelIndex(args.tree_class, sharedTreeOutput);
sharedTreeSubgraph = model.getSharedTreeSubgraph(args.tree_number, treeClass);
// Class may not be provided by the user, should be always filled correctly on output. NULL for regression.
args.tree_class = sharedTreeOutput.isClassifier() ? sharedTreeOutput.classNames()[treeClass] : null;
}
final TreeProperties treeProperties = convertSharedTreeSubgraph(sharedTreeSubgraph);
args.left_children = treeProperties._leftChildren;
args.right_children = treeProperties._rightChildren;
args.descriptions = treeProperties._descriptions;
args.root_node_id = sharedTreeSubgraph.rootNode.getNodeNumber();
args.thresholds = treeProperties._thresholds;
args.features = treeProperties._features;
args.nas = treeProperties._nas;
args.levels = treeProperties.levels;
args.predictions = treeProperties._predictions;
return args;
}
private static int getResponseLevelIndex(final String categorical, final SharedTreeModel.SharedTreeOutput sharedTreeOutput) {
final String trimmedCategorical = categorical != null ? categorical.trim() : ""; // Trim the categorical once - input from the user
if (! sharedTreeOutput.isClassifier()) {
if (!trimmedCategorical.isEmpty())
throw new IllegalArgumentException("There are no tree classes for " + sharedTreeOutput.getModelCategory() + ".");
return 0; // There is only one tree for non-classification models
}
final String[] responseColumnDomain = sharedTreeOutput._domains[sharedTreeOutput.responseIdx()];
if (sharedTreeOutput.getModelCategory() == ModelCategory.Binomial) {
if (!trimmedCategorical.isEmpty() && !trimmedCategorical.equals(responseColumnDomain[0])) {
throw new IllegalArgumentException("For binomial, only one tree class has been built per each iteration: " + responseColumnDomain[0]);
} else {
return 0;
}
} else {
for (int i = 0; i < responseColumnDomain.length; i++) {
// User is supposed to enter the name of the categorical level correctly, not ignoring case
if (trimmedCategorical.equals(responseColumnDomain[i]))
return i;
}
throw new IllegalArgumentException("There is no such tree class. Given categorical level does not exist in response column: " + trimmedCategorical);
}
}
/**
* Converts H2O-3's internal representation of a tree in a form of {@link SharedTreeSubgraph} to a format
* expected by H2O clients.
*
* @param sharedTreeSubgraph An instance of {@link SharedTreeSubgraph} to convert
* @return An instance of {@link TreeProperties} with some attributes possibly empty if suitable. Never null.
*/
static TreeProperties convertSharedTreeSubgraph(final SharedTreeSubgraph sharedTreeSubgraph) {
Objects.requireNonNull(sharedTreeSubgraph);
final TreeProperties treeprops = new TreeProperties();
treeprops._leftChildren = MemoryManager.malloc4(sharedTreeSubgraph.nodesArray.size());
treeprops._rightChildren = MemoryManager.malloc4(sharedTreeSubgraph.nodesArray.size());
treeprops._descriptions = new String[sharedTreeSubgraph.nodesArray.size()];
treeprops._thresholds = MemoryManager.malloc4f(sharedTreeSubgraph.nodesArray.size());
treeprops._features = new String[sharedTreeSubgraph.nodesArray.size()];
treeprops._nas = new String[sharedTreeSubgraph.nodesArray.size()];
treeprops._predictions = MemoryManager.malloc4f(sharedTreeSubgraph.nodesArray.size());
// Set root node's children, there is no guarantee the root node will be number 0
treeprops._rightChildren[0] = sharedTreeSubgraph.rootNode.getRightChild() != null ? sharedTreeSubgraph.rootNode.getRightChild().getNodeNumber() : -1;
treeprops._leftChildren[0] = sharedTreeSubgraph.rootNode.getLeftChild() != null ? sharedTreeSubgraph.rootNode.getLeftChild().getNodeNumber() : -1;
treeprops._thresholds[0] = sharedTreeSubgraph.rootNode.getSplitValue();
treeprops._features[0] = sharedTreeSubgraph.rootNode.getColName();
treeprops._nas[0] = getNaDirection(sharedTreeSubgraph.rootNode);
treeprops.levels = new int[sharedTreeSubgraph.nodesArray.size()][];
List<SharedTreeNode> nodesToTraverse = new ArrayList<>();
nodesToTraverse.add(sharedTreeSubgraph.rootNode);
append(treeprops._rightChildren, treeprops._leftChildren,
treeprops._descriptions, treeprops._thresholds, treeprops._features, treeprops._nas,
treeprops.levels, treeprops._predictions, nodesToTraverse, -1, false);
return treeprops;
}
private static void append(final int[] rightChildren, final int[] leftChildren, final String[] nodesDescriptions,
final float[] thresholds, final String[] splitColumns, final String[] naHandlings,
final int[][] levels, final float[] predictions,
final List<SharedTreeNode> nodesToTraverse, int pointer, boolean visitedRoot) {
if(nodesToTraverse.isEmpty()) return;
List<SharedTreeNode> discoveredNodes = new ArrayList<>();
for (SharedTreeNode node : nodesToTraverse) {
pointer++;
final SharedTreeNode leftChild = node.getLeftChild();
final SharedTreeNode rightChild = node.getRightChild();
if(visitedRoot){
fillnodeDescriptions(node, nodesDescriptions, thresholds, splitColumns, levels, predictions,
naHandlings, pointer);
} else {
StringBuilder rootDescriptionBuilder = new StringBuilder();
rootDescriptionBuilder.append("Root node has id ");
rootDescriptionBuilder.append(node.getNodeNumber());
rootDescriptionBuilder.append(" and splits on column '");
rootDescriptionBuilder.append(node.getColName());
rootDescriptionBuilder.append("'. ");
fillNodeSplitTowardsChildren(rootDescriptionBuilder, node);
nodesDescriptions[pointer] = rootDescriptionBuilder.toString();
visitedRoot = true;
}
if (leftChild != null) {
discoveredNodes.add(leftChild);
leftChildren[pointer] = leftChild.getNodeNumber();
} else {
leftChildren[pointer] = NO_CHILD;
}
if (rightChild != null) {
discoveredNodes.add(rightChild);
rightChildren[pointer] = rightChild.getNodeNumber();
} else {
rightChildren[pointer] = NO_CHILD;
}
}
append(rightChildren, leftChildren, nodesDescriptions, thresholds, splitColumns, naHandlings, levels, predictions,
discoveredNodes, pointer, true);
}
private static void fillnodeDescriptions(final SharedTreeNode node, final String[] nodeDescriptions,
final float[] thresholds, final String[] splitColumns, final int[][] levels,
final float[] predictions, final String[] naHandlings, final int pointer) {
final StringBuilder nodeDescriptionBuilder = new StringBuilder();
int[] nodeLevels = node.getParent().isBitset() ? extractNodeLevels(node) : null;
nodeDescriptionBuilder.append("Node has id ");
nodeDescriptionBuilder.append(node.getNodeNumber());
if (node.getColName() != null) {
nodeDescriptionBuilder.append(" and splits on column '");
nodeDescriptionBuilder.append(node.getColName());
nodeDescriptionBuilder.append("'. ");
} else {
nodeDescriptionBuilder.append(" and is a terminal node. ");
}
fillNodeSplitTowardsChildren(nodeDescriptionBuilder, node);
if (!Float.isNaN(node.getParent().getSplitValue())) {
nodeDescriptionBuilder.append(" Parent node split threshold is ");
nodeDescriptionBuilder.append(node.getParent().getSplitValue());
nodeDescriptionBuilder.append(".");
} else if (node.getParent().isBitset()) {
nodeLevels = extractNodeLevels(node);
assert nodeLevels != null;
nodeDescriptionBuilder.append(" Parent node split on column [");
nodeDescriptionBuilder.append(node.getParent().getColName());
nodeDescriptionBuilder.append("]. Inherited categorical levels from parent split: ");
for (int nodeLevelsindex = 0; nodeLevelsindex < nodeLevels.length; nodeLevelsindex++) {
nodeDescriptionBuilder.append(node.getParent().getDomainValues()[nodeLevels[nodeLevelsindex]]);
if (nodeLevelsindex != nodeLevels.length - 1) nodeDescriptionBuilder.append(",");
}
} else {
nodeDescriptionBuilder.append("NA only");
}
nodeDescriptions[pointer] = nodeDescriptionBuilder.toString();
splitColumns[pointer] = node.getColName();
naHandlings[pointer] = getNaDirection(node);
levels[pointer] = nodeLevels;
predictions[pointer] = node.getPredValue();
thresholds[pointer] = node.getSplitValue();
}
private static void fillNodeSplitTowardsChildren(final StringBuilder nodeDescriptionBuilder, final SharedTreeNode node){
if (!Float.isNaN(node.getSplitValue())) {
nodeDescriptionBuilder.append("Split threshold is ");
if (node.getLeftChild() != null) {
nodeDescriptionBuilder.append(" < ");
nodeDescriptionBuilder.append(node.getSplitValue());
nodeDescriptionBuilder.append(" to the left node (");
nodeDescriptionBuilder.append(node.getLeftChild().getNodeNumber());
nodeDescriptionBuilder.append(")");
}
if (node.getLeftChild() != null) {
if(node.getLeftChild() != null) nodeDescriptionBuilder.append(", ");
nodeDescriptionBuilder.append(" >= ");
nodeDescriptionBuilder.append(node.getSplitValue());
nodeDescriptionBuilder.append(" to the right node (");
nodeDescriptionBuilder.append(node.getRightChild().getNodeNumber());
nodeDescriptionBuilder.append(")");
}
nodeDescriptionBuilder.append(".");
} else if (node.isBitset()) {
fillNodeCategoricalSplitDescription(nodeDescriptionBuilder, node);
}
}
private static int[] extractNodeLevels(final SharedTreeNode node) {
final BitSet childInclusiveLevels = node.getInclusiveLevels();
final int cardinality = childInclusiveLevels.cardinality();
if (cardinality > 0) {
int[] nodeLevels = MemoryManager.malloc4(cardinality);
int bitsignCounter = 0;
for (int i = childInclusiveLevels.nextSetBit(0); i >= 0; i = childInclusiveLevels.nextSetBit(i + 1)) {
nodeLevels[bitsignCounter] = i;
bitsignCounter++;
}
return nodeLevels;
}
return null;
}
private static void fillNodeCategoricalSplitDescription(final StringBuilder nodeDescriptionBuilder, final SharedTreeNode node) {
final SharedTreeNode leftChild = node.getLeftChild();
final SharedTreeNode rightChild = node.getRightChild();
final int[] leftChildLevels = extractNodeLevels(leftChild);
final int[] rightChildLevels = extractNodeLevels(rightChild);
if (leftChild != null) {
nodeDescriptionBuilder.append(" Left child node (");
nodeDescriptionBuilder.append(leftChild.getNodeNumber());
nodeDescriptionBuilder.append(") inherits categorical levels: ");
for (int nodeLevelsindex = 0; nodeLevelsindex < leftChildLevels.length; nodeLevelsindex++) {
nodeDescriptionBuilder.append(node.getDomainValues()[leftChildLevels[nodeLevelsindex]]);
if (nodeLevelsindex != leftChildLevels.length - 1) nodeDescriptionBuilder.append(",");
}
}
if (rightChild != null) {
nodeDescriptionBuilder.append(". Right child node (");
nodeDescriptionBuilder.append(rightChild.getNodeNumber());
nodeDescriptionBuilder.append(") inherits categorical levels: ");
for (int nodeLevelsindex = 0; nodeLevelsindex < rightChildLevels.length; nodeLevelsindex++) {
nodeDescriptionBuilder.append(node.getDomainValues()[rightChildLevels[nodeLevelsindex]]);
if (nodeLevelsindex != rightChildLevels.length - 1) nodeDescriptionBuilder.append(",");
}
}
nodeDescriptionBuilder.append(". ");
}
private static String getNaDirection(final SharedTreeNode node) {
final boolean leftNa = node.getLeftChild() != null && node.getLeftChild().isInclusiveNa();
final boolean rightNa = node.getRightChild() != null && node.getRightChild().isInclusiveNa();
assert (rightNa ^ leftNa) || (rightNa == false && leftNa == false);
if (leftNa) {
return "LEFT";
} else if (rightNa) {
return "RIGHT";
}
return null; // No direction
}
public static class TreeProperties {
public int[] _leftChildren;
public int[] _rightChildren;
public String[] _descriptions; // General node description, most likely to contain serialized threshold or inclusive dom. levels
public float[] _thresholds;
public String[] _features;
public int[][] levels; // Categorical levels, points to a list of categoricals that is already existing within the model on the client.
public String[] _nas;
public float[] _predictions; // Prediction values on terminal nodes
}
}
|
|
package com.charlesmadere.hummingbird.models;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import com.charlesmadere.hummingbird.misc.MiscUtils;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import java.util.Iterator;
public class Feed implements Hydratable {
@Nullable
@SerializedName("story")
private AbsStory mStory;
@Nullable
@SerializedName("anime")
private ArrayList<Anime> mAnime;
@Nullable
@SerializedName("notifications")
private ArrayList<AbsNotification> mNotifications;
@Nullable
@SerializedName("stories")
private ArrayList<AbsStory> mStories;
@Nullable
@SerializedName("substories")
private ArrayList<AbsSubstory> mSubstories;
@Nullable
@SerializedName("library_entries")
private ArrayList<AnimeLibraryEntry> mAnimeLibraryEntries;
@Nullable
@SerializedName("reviews")
private ArrayList<AnimeReview> mAnimeReviews;
@Nullable
@SerializedName("groups")
private ArrayList<Group> mGroups;
@Nullable
@SerializedName("group_members")
private ArrayList<GroupMember> mGroupMembers;
@Nullable
@SerializedName("manga")
private ArrayList<Manga> mManga;
@Nullable
@SerializedName("manga_library_entries")
private ArrayList<MangaLibraryEntry> mMangaLibraryEntries;
@Nullable
@SerializedName("users")
private ArrayList<User> mUsers;
@Nullable
@SerializedName("meta")
private Metadata mMetadata;
public void addAnime(@Nullable final ArrayList<Anime> anime) {
if (anime == null || anime.isEmpty()) {
return;
}
if (hasAnime()) {
MiscUtils.exclusiveAdd(mAnime, anime);
} else {
mAnime = anime;
}
}
@Nullable
public ArrayList<Anime> getAnime() {
return mAnime;
}
@Nullable
public ArrayList<String> getAnimeIdsNeededForAnimeReviews() {
if (!hasAnimeReviews()) {
return null;
}
final ArrayList<String> animeIds = new ArrayList<>();
// noinspection ConstantConditions
for (final AnimeReview animeReview : mAnimeReviews) {
if (animeReview.getAnime() == null) {
final String animeId = animeReview.getAnimeId();
if (!animeIds.contains(animeId)) {
animeIds.add(animeId);
}
}
}
if (animeIds.isEmpty()) {
return null;
} else {
return animeIds;
}
}
@Nullable
public ArrayList<AnimeLibraryEntry> getAnimeLibraryEntries() {
return mAnimeLibraryEntries;
}
public int getAnimeLibraryEntriesSize() {
return mAnimeLibraryEntries == null ? 0 : mAnimeLibraryEntries.size();
}
@Nullable
public ArrayList<AnimeReview> getAnimeReviews() {
return mAnimeReviews;
}
public int getAnimeReviewsSize() {
return mAnimeReviews == null ? 0 : mAnimeReviews.size();
}
public int getCursor() {
if (mMetadata == null || mMetadata.mCursor == null) {
return 1;
} else {
return mMetadata.mCursor;
}
}
@Nullable
public ArrayList<GroupMember> getGroupMembers() {
return mGroupMembers;
}
public int getGroupMembersSize() {
return mGroupMembers == null ? 0 : mGroupMembers.size();
}
@Nullable
public ArrayList<Group> getGroups() {
return mGroups;
}
public int getGroupsSize() {
return mGroups == null ? 0 : mGroups.size();
}
@Nullable
public ArrayList<Manga> getManga() {
return mManga;
}
@Nullable
public ArrayList<MangaLibraryEntry> getMangaLibraryEntries() {
return mMangaLibraryEntries;
}
public int getMangaLibraryEntriesSize() {
return mMangaLibraryEntries == null ? 0 : mMangaLibraryEntries.size();
}
@Nullable
public ArrayList<AbsNotification> getNotifications() {
return mNotifications;
}
public int getNotificationsSize() {
return mNotifications == null ? 0 : mNotifications.size();
}
@Nullable
public ArrayList<AbsStory> getStories() {
return mStories;
}
public int getStoriesSize() {
return mStories == null ? 0 : mStories.size();
}
@Nullable
public AbsStory getStory() {
return mStory;
}
@Nullable
public ArrayList<AbsSubstory> getSubstories() {
return mSubstories;
}
@Nullable
public ArrayList<AbsSubstory> getSubstories(final AbsSubstory.Type type) {
if (!hasSubstories()) {
return null;
}
// noinspection ConstantConditions
final ArrayList<AbsSubstory> substories = new ArrayList<>(mSubstories.size());
for (final AbsSubstory substory : mSubstories) {
if (substory.getType() == type) {
substories.add(substory);
}
}
if (substories.isEmpty()) {
return null;
} else {
substories.trimToSize();
return substories;
}
}
public int getSubstoriesSize() {
return mSubstories == null ? 0 : mSubstories.size();
}
@Nullable
public ArrayList<User> getUsers() {
return mUsers;
}
public int getUsersSize() {
return mUsers == null ? 0 : mUsers.size();
}
public boolean hasAnime() {
return mAnime != null && !mAnime.isEmpty();
}
public boolean hasAnimeLibraryEntries() {
return mAnimeLibraryEntries != null && !mAnimeLibraryEntries.isEmpty();
}
public boolean hasAnimeReviews() {
return mAnimeReviews != null && !mAnimeReviews.isEmpty();
}
public boolean hasCursor() {
return mMetadata != null && mMetadata.mCursor != null;
}
public boolean hasGroupMembers() {
return mGroupMembers != null && !mGroupMembers.isEmpty();
}
public boolean hasGroups() {
return mGroups != null && !mGroups.isEmpty();
}
public boolean hasManga() {
return mManga != null && !mManga.isEmpty();
}
public boolean hasMangaLibraryEntries() {
return mMangaLibraryEntries != null && !mMangaLibraryEntries.isEmpty();
}
public boolean hasNotifications() {
return mNotifications != null && !mNotifications.isEmpty();
}
public boolean hasStories() {
return mStories != null && !mStories.isEmpty();
}
public boolean hasStory() {
return mStory != null;
}
public boolean hasSubstories() {
return mSubstories != null && !mSubstories.isEmpty();
}
public boolean hasUsers() {
return mUsers != null && !mUsers.isEmpty();
}
@Override
@WorkerThread
public void hydrate() {
if (hasAnimeLibraryEntries()) {
// noinspection ConstantConditions
for (final AnimeLibraryEntry ale : mAnimeLibraryEntries) {
ale.hydrate(this);
}
}
if (hasAnimeReviews()) {
// noinspection ConstantConditions
final Iterator<AnimeReview> iterator = mAnimeReviews.iterator();
do {
final AnimeReview animeReview = iterator.next();
if (!animeReview.hydrate(this)) {
iterator.remove();
}
} while (iterator.hasNext());
}
if (hasGroupMembers()) {
// noinspection ConstantConditions
for (final GroupMember groupMember : mGroupMembers) {
groupMember.hydrate(this);
}
}
if (hasGroups()) {
// noinspection ConstantConditions
for (final Group group : mGroups) {
group.hydrate();
}
}
if (hasMangaLibraryEntries()) {
// noinspection ConstantConditions
for (final MangaLibraryEntry mle : mMangaLibraryEntries) {
mle.hydrate(this);
}
}
if (hasNotifications()) {
// noinspection ConstantConditions
for (final AbsNotification notification : mNotifications) {
notification.hydrate(this);
}
}
if (hasSubstories()) {
// noinspection ConstantConditions
for (final AbsSubstory substory : mSubstories) {
substory.hydrate(this);
}
}
if (hasStories()) {
// noinspection ConstantConditions
for (final AbsStory story : mStories) {
story.hydrate(this);
}
}
if (hasStory()) {
// noinspection ConstantConditions
mStory.hydrate(this);
}
if (hasUsers()) {
// noinspection ConstantConditions
for (final User user : mUsers) {
user.hydrate();
}
}
}
public void merge(@Nullable final Feed feed) {
if (feed == null) {
return;
}
addAnime(feed.getAnime());
if (feed.hasAnimeLibraryEntries()) {
if (hasAnimeLibraryEntries()) {
MiscUtils.exclusiveAdd(mAnimeLibraryEntries, feed.getAnimeLibraryEntries());
} else {
mAnimeLibraryEntries = feed.getAnimeLibraryEntries();
}
}
if (feed.hasAnimeReviews()) {
if (hasAnimeReviews()) {
MiscUtils.exclusiveAdd(mAnimeReviews, feed.getAnimeReviews());
} else {
mAnimeReviews = feed.getAnimeReviews();
}
}
if (feed.hasGroupMembers()) {
if (hasGroupMembers()) {
MiscUtils.exclusiveAdd(mGroupMembers, feed.getGroupMembers());
} else {
mGroupMembers = feed.getGroupMembers();
}
}
if (feed.hasGroups()) {
if (hasGroups()) {
MiscUtils.exclusiveAdd(mGroups, feed.getGroups());
} else {
mGroups = feed.getGroups();
}
}
if (feed.hasManga()) {
if (hasManga()) {
MiscUtils.exclusiveAdd(mManga, feed.getManga());
} else {
mManga = feed.getManga();
}
}
if (feed.hasMangaLibraryEntries()) {
if (hasMangaLibraryEntries()) {
MiscUtils.exclusiveAdd(mMangaLibraryEntries, feed.getMangaLibraryEntries());
} else {
mMangaLibraryEntries = feed.getMangaLibraryEntries();
}
}
if (feed.hasNotifications()) {
if (hasNotifications()) {
MiscUtils.exclusiveAdd(mNotifications, feed.getNotifications());
} else {
mNotifications = feed.getNotifications();
}
}
if (feed.hasStories()) {
if (hasStories()) {
MiscUtils.exclusiveAdd(mStories, feed.getStories());
} else {
mStories = feed.getStories();
}
}
if (feed.hasSubstories()) {
if (hasSubstories()) {
MiscUtils.exclusiveAdd(mSubstories, feed.getSubstories());
} else {
mSubstories = feed.getSubstories();
}
}
if (feed.hasUsers()) {
if (hasUsers()) {
MiscUtils.exclusiveAdd(mUsers, feed.getUsers());
} else {
mUsers = feed.getUsers();
}
}
mMetadata = feed.mMetadata;
}
public static class Metadata implements Parcelable {
@Nullable
@SerializedName("cursor")
private Integer mCursor;
@Override
public String toString() {
return mCursor == null ? null : String.valueOf(mCursor);
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(final Parcel dest, final int flags) {
dest.writeValue(mCursor);
}
public static final Creator<Metadata> CREATOR = new Creator<Metadata>() {
@Override
public Metadata createFromParcel(final Parcel source) {
final Metadata m = new Metadata();
m.mCursor = (Integer) source.readValue(Integer.class.getClassLoader());
return m;
}
@Override
public Metadata[] newArray(final int size) {
return new Metadata[size];
}
};
}
}
|
|
/*
* Copyright 2012 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.wallet;
import org.bitcoinj.core.*;
import org.bitcoinj.core.TransactionConfidence.ConfidenceType;
import org.bitcoinj.crypto.KeyCrypter;
import org.bitcoinj.crypto.KeyCrypterScrypt;
import org.bitcoinj.script.Script;
import org.bitcoinj.signers.LocalTransactionSigner;
import org.bitcoinj.signers.TransactionSigner;
import org.bitcoinj.utils.ExchangeRate;
import org.bitcoinj.utils.Fiat;
import org.bitcoinj.wallet.Protos.Wallet.EncryptionType;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.TextFormat;
import com.google.protobuf.WireFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Serialize and de-serialize a wallet to a byte stream containing a
* <a href="https://developers.google.com/protocol-buffers/docs/overview">protocol buffer</a>. Protocol buffers are
* a data interchange format developed by Google with an efficient binary representation, a type safe specification
* language and compilers that generate code to work with those data structures for many languages. Protocol buffers
* can have their format evolved over time: conceptually they represent data using (tag, length, value) tuples. The
* format is defined by the <tt>wallet.proto</tt> file in the bitcoinj source distribution.<p>
*
* This class is used through its static methods. The most common operations are writeWallet and readWallet, which do
* the obvious operations on Output/InputStreams. You can use a {@link java.io.ByteArrayInputStream} and equivalent
* {@link java.io.ByteArrayOutputStream} if you'd like byte arrays instead. The protocol buffer can also be manipulated
* in its object form if you'd like to modify the flattened data structure before serialization to binary.<p>
*
* You can extend the wallet format with additional fields specific to your application if you want, but make sure
* to either put the extra data in the provided extension areas, or select tag numbers that are unlikely to be used
* by anyone else.<p>
*
* @author Miron Cuperman
* @author Andreas Schildbach
*/
public class WalletProtobufSerializer {
private static final Logger log = LoggerFactory.getLogger(WalletProtobufSerializer.class);
/** Current version used for serializing wallets. A version higher than this is considered from the future. */
public static final int CURRENT_WALLET_VERSION = Protos.Wallet.getDefaultInstance().getVersion();
// 512 MB
private static final int WALLET_SIZE_LIMIT = 512 * 1024 * 1024;
// Used for de-serialization
protected Map<ByteString, Transaction> txMap;
private boolean requireMandatoryExtensions = true;
private boolean requireAllExtensionsKnown = false;
private int walletWriteBufferSize = CodedOutputStream.DEFAULT_BUFFER_SIZE;
public interface WalletFactory {
Wallet create(NetworkParameters params, KeyChainGroup keyChainGroup);
}
private final WalletFactory factory;
private KeyChainFactory keyChainFactory;
public WalletProtobufSerializer() {
this(new WalletFactory() {
@Override
public Wallet create(NetworkParameters params, KeyChainGroup keyChainGroup) {
return new Wallet(params, keyChainGroup);
}
});
}
public WalletProtobufSerializer(WalletFactory factory) {
txMap = new HashMap<>();
this.factory = factory;
this.keyChainFactory = new DefaultKeyChainFactory();
}
public void setKeyChainFactory(KeyChainFactory keyChainFactory) {
this.keyChainFactory = keyChainFactory;
}
/**
* If this property is set to false, then unknown mandatory extensions will be ignored instead of causing load
* errors. You should only use this if you know exactly what you are doing, as the extension data will NOT be
* round-tripped, possibly resulting in a corrupted wallet if you save it back out again.
*/
public void setRequireMandatoryExtensions(boolean value) {
requireMandatoryExtensions = value;
}
/**
* If this property is set to true, the wallet will fail to load if any found extensions are unknown..
*/
public void setRequireAllExtensionsKnown(boolean value) {
requireAllExtensionsKnown = value;
}
/**
* Change buffer size for writing wallet to output stream. Default is {@link com.google.protobuf.CodedOutputStream.DEFAULT_BUFFER_SIZE}
* @param walletWriteBufferSize - buffer size in bytes
*/
public void setWalletWriteBufferSize(int walletWriteBufferSize) {
this.walletWriteBufferSize = walletWriteBufferSize;
}
/**
* Formats the given wallet (transactions and keys) to the given output stream in protocol buffer format.<p>
*
* Equivalent to <tt>walletToProto(wallet).writeTo(output);</tt>
*/
public void writeWallet(Wallet wallet, OutputStream output) throws IOException {
Protos.Wallet walletProto = walletToProto(wallet);
final CodedOutputStream codedOutput = CodedOutputStream.newInstance(output, this.walletWriteBufferSize);
walletProto.writeTo(codedOutput);
codedOutput.flush();
}
/**
* Returns the given wallet formatted as text. The text format is that used by protocol buffers and although it
* can also be parsed using {@link TextFormat#merge(CharSequence, com.google.protobuf.Message.Builder)},
* it is designed more for debugging than storage. It is not well specified and wallets are largely binary data
* structures anyway, consisting as they do of keys (large random numbers) and {@link Transaction}s which also
* mostly contain keys and hashes.
*/
public String walletToText(Wallet wallet) {
Protos.Wallet walletProto = walletToProto(wallet);
return TextFormat.printToString(walletProto);
}
/**
* Converts the given wallet to the object representation of the protocol buffers. This can be modified, or
* additional data fields set, before serialization takes place.
*/
public Protos.Wallet walletToProto(Wallet wallet) {
Protos.Wallet.Builder walletBuilder = Protos.Wallet.newBuilder();
walletBuilder.setNetworkIdentifier(wallet.getNetworkParameters().getId());
if (wallet.getDescription() != null) {
walletBuilder.setDescription(wallet.getDescription());
}
for (WalletTransaction wtx : wallet.getWalletTransactions()) {
Protos.Transaction txProto = makeTxProto(wtx);
walletBuilder.addTransaction(txProto);
}
walletBuilder.addAllKey(wallet.serializeKeyChainGroupToProtobuf());
for (Script script : wallet.getWatchedScripts()) {
Protos.Script protoScript =
Protos.Script.newBuilder()
.setProgram(ByteString.copyFrom(script.getProgram()))
.setCreationTimestamp(script.getCreationTimeSeconds() * 1000)
.build();
walletBuilder.addWatchedScript(protoScript);
}
// Populate the lastSeenBlockHash field.
Sha256Hash lastSeenBlockHash = wallet.getLastBlockSeenHash();
if (lastSeenBlockHash != null) {
walletBuilder.setLastSeenBlockHash(hashToByteString(lastSeenBlockHash));
walletBuilder.setLastSeenBlockHeight(wallet.getLastBlockSeenHeight());
}
if (wallet.getLastBlockSeenTimeSecs() > 0)
walletBuilder.setLastSeenBlockTimeSecs(wallet.getLastBlockSeenTimeSecs());
// Populate the scrypt parameters.
KeyCrypter keyCrypter = wallet.getKeyCrypter();
if (keyCrypter == null) {
// The wallet is unencrypted.
walletBuilder.setEncryptionType(EncryptionType.UNENCRYPTED);
} else {
// The wallet is encrypted.
walletBuilder.setEncryptionType(keyCrypter.getUnderstoodEncryptionType());
if (keyCrypter instanceof KeyCrypterScrypt) {
KeyCrypterScrypt keyCrypterScrypt = (KeyCrypterScrypt) keyCrypter;
walletBuilder.setEncryptionParameters(keyCrypterScrypt.getScryptParameters());
} else {
// Some other form of encryption has been specified that we do not know how to persist.
throw new RuntimeException("The wallet has encryption of type '" + keyCrypter.getUnderstoodEncryptionType() + "' but this WalletProtobufSerializer does not know how to persist this.");
}
}
if (wallet.getKeyRotationTime() != null) {
long timeSecs = wallet.getKeyRotationTime().getTime() / 1000;
walletBuilder.setKeyRotationTime(timeSecs);
}
populateExtensions(wallet, walletBuilder);
for (Map.Entry<String, ByteString> entry : wallet.getTags().entrySet()) {
Protos.Tag.Builder tag = Protos.Tag.newBuilder().setTag(entry.getKey()).setData(entry.getValue());
walletBuilder.addTags(tag);
}
for (TransactionSigner signer : wallet.getTransactionSigners()) {
// do not serialize LocalTransactionSigner as it's being added implicitly
if (signer instanceof LocalTransactionSigner)
continue;
Protos.TransactionSigner.Builder protoSigner = Protos.TransactionSigner.newBuilder();
protoSigner.setClassName(signer.getClass().getName());
protoSigner.setData(ByteString.copyFrom(signer.serialize()));
walletBuilder.addTransactionSigners(protoSigner);
}
// Populate the wallet version.
walletBuilder.setVersion(wallet.getVersion());
return walletBuilder.build();
}
private static void populateExtensions(Wallet wallet, Protos.Wallet.Builder walletBuilder) {
for (WalletExtension extension : wallet.getExtensions().values()) {
Protos.Extension.Builder proto = Protos.Extension.newBuilder();
proto.setId(extension.getWalletExtensionID());
proto.setMandatory(extension.isWalletExtensionMandatory());
proto.setData(ByteString.copyFrom(extension.serializeWalletExtension()));
walletBuilder.addExtension(proto);
}
}
private static Protos.Transaction makeTxProto(WalletTransaction wtx) {
Transaction tx = wtx.getTransaction();
Protos.Transaction.Builder txBuilder = Protos.Transaction.newBuilder();
txBuilder.setPool(getProtoPool(wtx))
.setHash(hashToByteString(tx.getHash()))
.setVersion((int) tx.getVersion());
if (tx.getUpdateTime() != null) {
txBuilder.setUpdatedAt(tx.getUpdateTime().getTime());
}
if (tx.getLockTime() > 0) {
txBuilder.setLockTime((int)tx.getLockTime());
}
// Handle inputs.
for (TransactionInput input : tx.getInputs()) {
Protos.TransactionInput.Builder inputBuilder = Protos.TransactionInput.newBuilder()
.setScriptBytes(ByteString.copyFrom(input.getScriptBytes()))
.setTransactionOutPointHash(hashToByteString(input.getOutpoint().getHash()))
.setTransactionOutPointIndex((int) input.getOutpoint().getIndex());
if (input.hasSequence())
inputBuilder.setSequence((int) input.getSequenceNumber());
if (input.getValue() != null)
inputBuilder.setValue(input.getValue().value);
txBuilder.addTransactionInput(inputBuilder);
}
// Handle outputs.
for (TransactionOutput output : tx.getOutputs()) {
Protos.TransactionOutput.Builder outputBuilder = Protos.TransactionOutput.newBuilder()
.setScriptBytes(ByteString.copyFrom(output.getScriptBytes()))
.setValue(output.getValue().value);
final TransactionInput spentBy = output.getSpentBy();
if (spentBy != null) {
Sha256Hash spendingHash = spentBy.getParentTransaction().getHash();
int spentByTransactionIndex = spentBy.getParentTransaction().getInputs().indexOf(spentBy);
outputBuilder.setSpentByTransactionHash(hashToByteString(spendingHash))
.setSpentByTransactionIndex(spentByTransactionIndex);
}
txBuilder.addTransactionOutput(outputBuilder);
}
// Handle which blocks tx was seen in.
final Map<Sha256Hash, Integer> appearsInHashes = tx.getAppearsInHashes();
if (appearsInHashes != null) {
for (Map.Entry<Sha256Hash, Integer> entry : appearsInHashes.entrySet()) {
txBuilder.addBlockHash(hashToByteString(entry.getKey()));
txBuilder.addBlockRelativityOffsets(entry.getValue());
}
}
if (tx.hasConfidence()) {
TransactionConfidence confidence = tx.getConfidence();
Protos.TransactionConfidence.Builder confidenceBuilder = Protos.TransactionConfidence.newBuilder();
writeConfidence(txBuilder, confidence, confidenceBuilder);
}
Protos.Transaction.Purpose purpose;
switch (tx.getPurpose()) {
case UNKNOWN: purpose = Protos.Transaction.Purpose.UNKNOWN; break;
case USER_PAYMENT: purpose = Protos.Transaction.Purpose.USER_PAYMENT; break;
case KEY_ROTATION: purpose = Protos.Transaction.Purpose.KEY_ROTATION; break;
case ASSURANCE_CONTRACT_CLAIM: purpose = Protos.Transaction.Purpose.ASSURANCE_CONTRACT_CLAIM; break;
case ASSURANCE_CONTRACT_PLEDGE: purpose = Protos.Transaction.Purpose.ASSURANCE_CONTRACT_PLEDGE; break;
case ASSURANCE_CONTRACT_STUB: purpose = Protos.Transaction.Purpose.ASSURANCE_CONTRACT_STUB; break;
case RAISE_FEE: purpose = Protos.Transaction.Purpose.RAISE_FEE; break;
default:
throw new RuntimeException("New tx purpose serialization not implemented.");
}
txBuilder.setPurpose(purpose);
ExchangeRate exchangeRate = tx.getExchangeRate();
if (exchangeRate != null) {
Protos.ExchangeRate.Builder exchangeRateBuilder = Protos.ExchangeRate.newBuilder()
.setCoinValue(exchangeRate.coin.value).setFiatValue(exchangeRate.fiat.value)
.setFiatCurrencyCode(exchangeRate.fiat.currencyCode);
txBuilder.setExchangeRate(exchangeRateBuilder);
}
if (tx.getMemo() != null)
txBuilder.setMemo(tx.getMemo());
return txBuilder.build();
}
private static Protos.Transaction.Pool getProtoPool(WalletTransaction wtx) {
switch (wtx.getPool()) {
case UNSPENT: return Protos.Transaction.Pool.UNSPENT;
case SPENT: return Protos.Transaction.Pool.SPENT;
case DEAD: return Protos.Transaction.Pool.DEAD;
case PENDING: return Protos.Transaction.Pool.PENDING;
default:
throw new RuntimeException("Unreachable");
}
}
private static void writeConfidence(Protos.Transaction.Builder txBuilder,
TransactionConfidence confidence,
Protos.TransactionConfidence.Builder confidenceBuilder) {
synchronized (confidence) {
confidenceBuilder.setType(Protos.TransactionConfidence.Type.valueOf(confidence.getConfidenceType().getValue()));
if (confidence.getConfidenceType() == ConfidenceType.BUILDING) {
confidenceBuilder.setAppearedAtHeight(confidence.getAppearedAtChainHeight());
confidenceBuilder.setDepth(confidence.getDepthInBlocks());
}
if (confidence.getConfidenceType() == ConfidenceType.DEAD) {
// Copy in the overriding transaction, if available.
// (A dead coinbase transaction has no overriding transaction).
if (confidence.getOverridingTransaction() != null) {
Sha256Hash overridingHash = confidence.getOverridingTransaction().getHash();
confidenceBuilder.setOverridingTransaction(hashToByteString(overridingHash));
}
}
TransactionConfidence.Source source = confidence.getSource();
switch (source) {
case SELF: confidenceBuilder.setSource(Protos.TransactionConfidence.Source.SOURCE_SELF); break;
case NETWORK: confidenceBuilder.setSource(Protos.TransactionConfidence.Source.SOURCE_NETWORK); break;
case UNKNOWN:
// Fall through.
default:
confidenceBuilder.setSource(Protos.TransactionConfidence.Source.SOURCE_UNKNOWN); break;
}
}
for (PeerAddress address : confidence.getBroadcastBy()) {
Protos.PeerAddress proto = Protos.PeerAddress.newBuilder()
.setIpAddress(ByteString.copyFrom(address.getAddr().getAddress()))
.setPort(address.getPort())
.setServices(address.getServices().longValue())
.build();
confidenceBuilder.addBroadcastBy(proto);
}
Date lastBroadcastedAt = confidence.getLastBroadcastedAt();
if (lastBroadcastedAt != null)
confidenceBuilder.setLastBroadcastedAt(lastBroadcastedAt.getTime());
txBuilder.setConfidence(confidenceBuilder);
}
public static ByteString hashToByteString(Sha256Hash hash) {
return ByteString.copyFrom(hash.getBytes());
}
public static Sha256Hash byteStringToHash(ByteString bs) {
return Sha256Hash.wrap(bs.toByteArray());
}
/**
* <p>Loads wallet data from the given protocol buffer and inserts it into the given Wallet object. This is primarily
* useful when you wish to pre-register extension objects. Note that if loading fails the provided Wallet object
* may be in an indeterminate state and should be thrown away.</p>
*
* <p>A wallet can be unreadable for various reasons, such as inability to open the file, corrupt data, internally
* inconsistent data, a wallet extension marked as mandatory that cannot be handled and so on. You should always
* handle {@link UnreadableWalletException} and communicate failure to the user in an appropriate manner.</p>
*
* @throws UnreadableWalletException thrown in various error conditions (see description).
*/
public Wallet readWallet(InputStream input, @Nullable WalletExtension... walletExtensions) throws UnreadableWalletException {
return readWallet(input, false, walletExtensions);
}
/**
* <p>Loads wallet data from the given protocol buffer and inserts it into the given Wallet object. This is primarily
* useful when you wish to pre-register extension objects. Note that if loading fails the provided Wallet object
* may be in an indeterminate state and should be thrown away. Do not simply call this method again on the same
* Wallet object with {@code forceReset} set {@code true}. It won't work.</p>
*
* <p>If {@code forceReset} is {@code true}, then no transactions are loaded from the wallet, and it is configured
* to replay transactions from the blockchain (as if the wallet had been loaded and {@link Wallet.reset}
* had been called immediately thereafter).
*
* <p>A wallet can be unreadable for various reasons, such as inability to open the file, corrupt data, internally
* inconsistent data, a wallet extension marked as mandatory that cannot be handled and so on. You should always
* handle {@link UnreadableWalletException} and communicate failure to the user in an appropriate manner.</p>
*
* @throws UnreadableWalletException thrown in various error conditions (see description).
*/
public Wallet readWallet(InputStream input, boolean forceReset, @Nullable WalletExtension[] extensions) throws UnreadableWalletException {
try {
Protos.Wallet walletProto = parseToProto(input);
final String paramsID = walletProto.getNetworkIdentifier();
NetworkParameters params = NetworkParameters.fromID(paramsID);
if (params == null)
throw new UnreadableWalletException("Unknown network parameters ID " + paramsID);
return readWallet(params, extensions, walletProto, forceReset);
} catch (IOException e) {
throw new UnreadableWalletException("Could not parse input stream to protobuf", e);
} catch (IllegalStateException e) {
throw new UnreadableWalletException("Could not parse input stream to protobuf", e);
} catch (IllegalArgumentException e) {
throw new UnreadableWalletException("Could not parse input stream to protobuf", e);
}
}
/**
* <p>Loads wallet data from the given protocol buffer and inserts it into the given Wallet object. This is primarily
* useful when you wish to pre-register extension objects. Note that if loading fails the provided Wallet object
* may be in an indeterminate state and should be thrown away.</p>
*
* <p>A wallet can be unreadable for various reasons, such as inability to open the file, corrupt data, internally
* inconsistent data, a wallet extension marked as mandatory that cannot be handled and so on. You should always
* handle {@link UnreadableWalletException} and communicate failure to the user in an appropriate manner.</p>
*
* @throws UnreadableWalletException thrown in various error conditions (see description).
*/
public Wallet readWallet(NetworkParameters params, @Nullable WalletExtension[] extensions,
Protos.Wallet walletProto) throws UnreadableWalletException {
return readWallet(params, extensions, walletProto, false);
}
/**
* <p>Loads wallet data from the given protocol buffer and inserts it into the given Wallet object. This is primarily
* useful when you wish to pre-register extension objects. Note that if loading fails the provided Wallet object
* may be in an indeterminate state and should be thrown away. Do not simply call this method again on the same
* Wallet object with {@code forceReset} set {@code true}. It won't work.</p>
*
* <p>If {@code forceReset} is {@code true}, then no transactions are loaded from the wallet, and it is configured
* to replay transactions from the blockchain (as if the wallet had been loaded and {@link Wallet.reset}
* had been called immediately thereafter).
*
* <p>A wallet can be unreadable for various reasons, such as inability to open the file, corrupt data, internally
* inconsistent data, a wallet extension marked as mandatory that cannot be handled and so on. You should always
* handle {@link UnreadableWalletException} and communicate failure to the user in an appropriate manner.</p>
*
* @throws UnreadableWalletException thrown in various error conditions (see description).
*/
public Wallet readWallet(NetworkParameters params, @Nullable WalletExtension[] extensions,
Protos.Wallet walletProto, boolean forceReset) throws UnreadableWalletException {
if (walletProto.getVersion() > CURRENT_WALLET_VERSION)
throw new UnreadableWalletException.FutureVersion();
if (!walletProto.getNetworkIdentifier().equals(params.getId()))
throw new UnreadableWalletException.WrongNetwork();
// Read the scrypt parameters that specify how encryption and decryption is performed.
KeyChainGroup keyChainGroup;
if (walletProto.hasEncryptionParameters()) {
Protos.ScryptParameters encryptionParameters = walletProto.getEncryptionParameters();
final KeyCrypterScrypt keyCrypter = new KeyCrypterScrypt(encryptionParameters);
keyChainGroup = KeyChainGroup.fromProtobufEncrypted(params, walletProto.getKeyList(), keyCrypter, keyChainFactory);
} else {
keyChainGroup = KeyChainGroup.fromProtobufUnencrypted(params, walletProto.getKeyList(), keyChainFactory);
}
Wallet wallet = factory.create(params, keyChainGroup);
List<Script> scripts = Lists.newArrayList();
for (Protos.Script protoScript : walletProto.getWatchedScriptList()) {
try {
Script script =
new Script(protoScript.getProgram().toByteArray(),
protoScript.getCreationTimestamp() / 1000);
scripts.add(script);
} catch (ScriptException e) {
throw new UnreadableWalletException("Unparseable script in wallet");
}
}
wallet.addWatchedScripts(scripts);
if (walletProto.hasDescription()) {
wallet.setDescription(walletProto.getDescription());
}
if (forceReset) {
// Should mirror Wallet.reset()
wallet.setLastBlockSeenHash(null);
wallet.setLastBlockSeenHeight(-1);
wallet.setLastBlockSeenTimeSecs(0);
} else {
// Read all transactions and insert into the txMap.
for (Protos.Transaction txProto : walletProto.getTransactionList()) {
readTransaction(txProto, wallet.getParams());
}
// Update transaction outputs to point to inputs that spend them
for (Protos.Transaction txProto : walletProto.getTransactionList()) {
WalletTransaction wtx = connectTransactionOutputs(params, txProto);
wallet.addWalletTransaction(wtx);
}
// Update the lastBlockSeenHash.
if (!walletProto.hasLastSeenBlockHash()) {
wallet.setLastBlockSeenHash(null);
} else {
wallet.setLastBlockSeenHash(byteStringToHash(walletProto.getLastSeenBlockHash()));
}
if (!walletProto.hasLastSeenBlockHeight()) {
wallet.setLastBlockSeenHeight(-1);
} else {
wallet.setLastBlockSeenHeight(walletProto.getLastSeenBlockHeight());
}
// Will default to zero if not present.
wallet.setLastBlockSeenTimeSecs(walletProto.getLastSeenBlockTimeSecs());
if (walletProto.hasKeyRotationTime()) {
wallet.setKeyRotationTime(new Date(walletProto.getKeyRotationTime() * 1000));
}
}
loadExtensions(wallet, extensions != null ? extensions : new WalletExtension[0], walletProto);
for (Protos.Tag tag : walletProto.getTagsList()) {
wallet.setTag(tag.getTag(), tag.getData());
}
for (Protos.TransactionSigner signerProto : walletProto.getTransactionSignersList()) {
try {
Class signerClass = Class.forName(signerProto.getClassName());
TransactionSigner signer = (TransactionSigner)signerClass.newInstance();
signer.deserialize(signerProto.getData().toByteArray());
wallet.addTransactionSigner(signer);
} catch (Exception e) {
throw new UnreadableWalletException("Unable to deserialize TransactionSigner instance: " +
signerProto.getClassName(), e);
}
}
if (walletProto.hasVersion()) {
wallet.setVersion(walletProto.getVersion());
}
// Make sure the object can be re-used to read another wallet without corruption.
txMap.clear();
return wallet;
}
private void loadExtensions(Wallet wallet, WalletExtension[] extensionsList, Protos.Wallet walletProto) throws UnreadableWalletException {
final Map<String, WalletExtension> extensions = new HashMap<>();
for (WalletExtension e : extensionsList)
extensions.put(e.getWalletExtensionID(), e);
// The Wallet object, if subclassed, might have added some extensions to itself already. In that case, don't
// expect them to be passed in, just fetch them here and don't re-add.
extensions.putAll(wallet.getExtensions());
for (Protos.Extension extProto : walletProto.getExtensionList()) {
String id = extProto.getId();
WalletExtension extension = extensions.get(id);
if (extension == null) {
if (extProto.getMandatory()) {
if (requireMandatoryExtensions)
throw new UnreadableWalletException("Unknown mandatory extension in wallet: " + id);
else
log.error("Unknown extension in wallet {}, ignoring", id);
} else if (requireAllExtensionsKnown) {
throw new UnreadableWalletException("Unknown extension in wallet: " + id);
}
} else {
log.info("Loading wallet extension {}", id);
try {
wallet.deserializeExtension(extension, extProto.getData().toByteArray());
} catch (Exception e) {
if (extProto.getMandatory() && requireMandatoryExtensions) {
log.error("Error whilst reading mandatory extension {}, failing to read wallet", id);
throw new UnreadableWalletException("Could not parse mandatory extension in wallet: " + id);
} else if (requireAllExtensionsKnown) {
log.error("Error whilst reading extension {}, failing to read wallet", id);
throw new UnreadableWalletException("Could not parse extension in wallet: " + id);
} else {
log.warn("Error whilst reading extension {}, ignoring extension", id, e);
}
}
}
}
}
/**
* Returns the loaded protocol buffer from the given byte stream. You normally want
* {@link Wallet#loadFromFile(java.io.File, WalletExtension...)} instead - this method is designed for low level
* work involving the wallet file format itself.
*/
public static Protos.Wallet parseToProto(InputStream input) throws IOException {
CodedInputStream codedInput = CodedInputStream.newInstance(input);
codedInput.setSizeLimit(WALLET_SIZE_LIMIT);
return Protos.Wallet.parseFrom(codedInput);
}
private void readTransaction(Protos.Transaction txProto, NetworkParameters params) throws UnreadableWalletException {
Transaction tx = new Transaction(params);
tx.setVersion(txProto.getVersion());
if (txProto.hasUpdatedAt()) {
tx.setUpdateTime(new Date(txProto.getUpdatedAt()));
}
for (Protos.TransactionOutput outputProto : txProto.getTransactionOutputList()) {
Coin value = Coin.valueOf(outputProto.getValue());
byte[] scriptBytes = outputProto.getScriptBytes().toByteArray();
TransactionOutput output = new TransactionOutput(params, tx, value, scriptBytes);
tx.addOutput(output);
}
for (Protos.TransactionInput inputProto : txProto.getTransactionInputList()) {
byte[] scriptBytes = inputProto.getScriptBytes().toByteArray();
TransactionOutPoint outpoint = new TransactionOutPoint(params,
inputProto.getTransactionOutPointIndex() & 0xFFFFFFFFL,
byteStringToHash(inputProto.getTransactionOutPointHash())
);
Coin value = inputProto.hasValue() ? Coin.valueOf(inputProto.getValue()) : null;
TransactionInput input = new TransactionInput(params, tx, scriptBytes, outpoint, value);
if (inputProto.hasSequence())
input.setSequenceNumber(0xffffffffL & inputProto.getSequence());
tx.addInput(input);
}
for (int i = 0; i < txProto.getBlockHashCount(); i++) {
ByteString blockHash = txProto.getBlockHash(i);
int relativityOffset = 0;
if (txProto.getBlockRelativityOffsetsCount() > 0)
relativityOffset = txProto.getBlockRelativityOffsets(i);
tx.addBlockAppearance(byteStringToHash(blockHash), relativityOffset);
}
if (txProto.hasLockTime()) {
tx.setLockTime(0xffffffffL & txProto.getLockTime());
}
if (txProto.hasPurpose()) {
switch (txProto.getPurpose()) {
case UNKNOWN: tx.setPurpose(Transaction.Purpose.UNKNOWN); break;
case USER_PAYMENT: tx.setPurpose(Transaction.Purpose.USER_PAYMENT); break;
case KEY_ROTATION: tx.setPurpose(Transaction.Purpose.KEY_ROTATION); break;
case ASSURANCE_CONTRACT_CLAIM: tx.setPurpose(Transaction.Purpose.ASSURANCE_CONTRACT_CLAIM); break;
case ASSURANCE_CONTRACT_PLEDGE: tx.setPurpose(Transaction.Purpose.ASSURANCE_CONTRACT_PLEDGE); break;
case ASSURANCE_CONTRACT_STUB: tx.setPurpose(Transaction.Purpose.ASSURANCE_CONTRACT_STUB); break;
case RAISE_FEE: tx.setPurpose(Transaction.Purpose.RAISE_FEE); break;
default: throw new RuntimeException("New purpose serialization not implemented");
}
} else {
// Old wallet: assume a user payment as that's the only reason a new tx would have been created back then.
tx.setPurpose(Transaction.Purpose.USER_PAYMENT);
}
if (txProto.hasExchangeRate()) {
Protos.ExchangeRate exchangeRateProto = txProto.getExchangeRate();
tx.setExchangeRate(new ExchangeRate(Coin.valueOf(exchangeRateProto.getCoinValue()), Fiat.valueOf(
exchangeRateProto.getFiatCurrencyCode(), exchangeRateProto.getFiatValue())));
}
if (txProto.hasMemo())
tx.setMemo(txProto.getMemo());
// Transaction should now be complete.
Sha256Hash protoHash = byteStringToHash(txProto.getHash());
if (!tx.getHash().equals(protoHash))
throw new UnreadableWalletException(String.format(Locale.US, "Transaction did not deserialize completely: %s vs %s", tx.getHash(), protoHash));
if (txMap.containsKey(txProto.getHash()))
throw new UnreadableWalletException("Wallet contained duplicate transaction " + byteStringToHash(txProto.getHash()));
txMap.put(txProto.getHash(), tx);
}
private WalletTransaction connectTransactionOutputs(final NetworkParameters params,
final org.bitcoinj.wallet.Protos.Transaction txProto) throws UnreadableWalletException {
Transaction tx = txMap.get(txProto.getHash());
final WalletTransaction.Pool pool;
switch (txProto.getPool()) {
case DEAD: pool = WalletTransaction.Pool.DEAD; break;
case PENDING: pool = WalletTransaction.Pool.PENDING; break;
case SPENT: pool = WalletTransaction.Pool.SPENT; break;
case UNSPENT: pool = WalletTransaction.Pool.UNSPENT; break;
// Upgrade old wallets: inactive pool has been merged with the pending pool.
// Remove this some time after 0.9 is old and everyone has upgraded.
// There should not be any spent outputs in this tx as old wallets would not allow them to be spent
// in this state.
case INACTIVE:
case PENDING_INACTIVE:
pool = WalletTransaction.Pool.PENDING;
break;
default:
throw new UnreadableWalletException("Unknown transaction pool: " + txProto.getPool());
}
for (int i = 0 ; i < tx.getOutputs().size() ; i++) {
TransactionOutput output = tx.getOutputs().get(i);
final Protos.TransactionOutput transactionOutput = txProto.getTransactionOutput(i);
if (transactionOutput.hasSpentByTransactionHash()) {
final ByteString spentByTransactionHash = transactionOutput.getSpentByTransactionHash();
Transaction spendingTx = txMap.get(spentByTransactionHash);
if (spendingTx == null) {
throw new UnreadableWalletException(String.format(Locale.US, "Could not connect %s to %s",
tx.getHashAsString(), byteStringToHash(spentByTransactionHash)));
}
final int spendingIndex = transactionOutput.getSpentByTransactionIndex();
TransactionInput input = checkNotNull(spendingTx.getInput(spendingIndex));
input.connect(output);
}
}
if (txProto.hasConfidence()) {
Protos.TransactionConfidence confidenceProto = txProto.getConfidence();
TransactionConfidence confidence = tx.getConfidence();
readConfidence(params, tx, confidenceProto, confidence);
}
return new WalletTransaction(pool, tx);
}
private void readConfidence(final NetworkParameters params, final Transaction tx,
final Protos.TransactionConfidence confidenceProto,
final TransactionConfidence confidence) throws UnreadableWalletException {
// We are lenient here because tx confidence is not an essential part of the wallet.
// If the tx has an unknown type of confidence, ignore.
if (!confidenceProto.hasType()) {
log.warn("Unknown confidence type for tx {}", tx.getHashAsString());
return;
}
ConfidenceType confidenceType;
switch (confidenceProto.getType()) {
case BUILDING: confidenceType = ConfidenceType.BUILDING; break;
case DEAD: confidenceType = ConfidenceType.DEAD; break;
// These two are equivalent (must be able to read old wallets).
case NOT_IN_BEST_CHAIN: confidenceType = ConfidenceType.PENDING; break;
case PENDING: confidenceType = ConfidenceType.PENDING; break;
case IN_CONFLICT: confidenceType = ConfidenceType.IN_CONFLICT; break;
case UNKNOWN:
// Fall through.
default:
confidenceType = ConfidenceType.UNKNOWN; break;
}
confidence.setConfidenceType(confidenceType);
if (confidenceProto.hasAppearedAtHeight()) {
if (confidence.getConfidenceType() != ConfidenceType.BUILDING) {
log.warn("Have appearedAtHeight but not BUILDING for tx {}", tx.getHashAsString());
return;
}
confidence.setAppearedAtChainHeight(confidenceProto.getAppearedAtHeight());
}
if (confidenceProto.hasDepth()) {
if (confidence.getConfidenceType() != ConfidenceType.BUILDING) {
log.warn("Have depth but not BUILDING for tx {}", tx.getHashAsString());
return;
}
confidence.setDepthInBlocks(confidenceProto.getDepth());
}
if (confidenceProto.hasOverridingTransaction()) {
if (confidence.getConfidenceType() != ConfidenceType.DEAD) {
log.warn("Have overridingTransaction but not OVERRIDDEN for tx {}", tx.getHashAsString());
return;
}
Transaction overridingTransaction =
txMap.get(confidenceProto.getOverridingTransaction());
if (overridingTransaction == null) {
log.warn("Have overridingTransaction that is not in wallet for tx {}", tx.getHashAsString());
return;
}
confidence.setOverridingTransaction(overridingTransaction);
}
for (Protos.PeerAddress proto : confidenceProto.getBroadcastByList()) {
InetAddress ip;
try {
ip = InetAddress.getByAddress(proto.getIpAddress().toByteArray());
} catch (UnknownHostException e) {
throw new UnreadableWalletException("Peer IP address does not have the right length", e);
}
int port = proto.getPort();
int protocolVersion = params.getProtocolVersionNum(NetworkParameters.ProtocolVersion.CURRENT);
BigInteger services = BigInteger.valueOf(proto.getServices());
PeerAddress address = new PeerAddress(params, ip, port, protocolVersion, services);
confidence.markBroadcastBy(address);
}
if (confidenceProto.hasLastBroadcastedAt())
confidence.setLastBroadcastedAt(new Date(confidenceProto.getLastBroadcastedAt()));
switch (confidenceProto.getSource()) {
case SOURCE_SELF: confidence.setSource(TransactionConfidence.Source.SELF); break;
case SOURCE_NETWORK: confidence.setSource(TransactionConfidence.Source.NETWORK); break;
case SOURCE_UNKNOWN:
// Fall through.
default: confidence.setSource(TransactionConfidence.Source.UNKNOWN); break;
}
}
/**
* Cheap test to see if input stream is a wallet. This checks for a magic value at the beginning of the stream.
*
* @param is
* input stream to test
* @return true if input stream is a wallet
*/
public static boolean isWallet(InputStream is) {
try {
final CodedInputStream cis = CodedInputStream.newInstance(is);
final int tag = cis.readTag();
final int field = WireFormat.getTagFieldNumber(tag);
if (field != 1) // network_identifier
return false;
final String network = cis.readString();
return NetworkParameters.fromID(network) != null;
} catch (IOException x) {
return false;
}
}
}
|
|
package de.djuelg.neuronizer.storage;
import android.support.annotation.NonNull;
import com.fernandocejas.arrow.optional.Optional;
import java.util.ArrayList;
import java.util.List;
import de.djuelg.neuronizer.domain.model.preview.TodoList;
import de.djuelg.neuronizer.domain.model.todolist.TodoListHeader;
import de.djuelg.neuronizer.domain.model.todolist.TodoListItem;
import de.djuelg.neuronizer.domain.model.todolist.TodoListSection;
import de.djuelg.neuronizer.domain.repository.TodoListRepository;
import de.djuelg.neuronizer.storage.converter.RealmConverter;
import de.djuelg.neuronizer.storage.converter.TodoListDAOConverter;
import de.djuelg.neuronizer.storage.converter.TodoListHeaderDAOConverter;
import de.djuelg.neuronizer.storage.converter.TodoListItemDAOConverter;
import de.djuelg.neuronizer.storage.model.TodoListDAO;
import de.djuelg.neuronizer.storage.model.TodoListHeaderDAO;
import de.djuelg.neuronizer.storage.model.TodoListItemDAO;
import io.realm.Realm;
import io.realm.RealmConfiguration;
import io.realm.RealmResults;
import static de.djuelg.neuronizer.storage.RepositoryManager.createConfiguration;
/**
* Created by dmilicic on 1/29/16.
*/
public class TodoListRepositoryImpl implements TodoListRepository {
private final RealmConfiguration configuration;
public TodoListRepositoryImpl(String realmName) {
this.configuration = createConfiguration(realmName);
}
// RealmConfiguration injectable for testing
TodoListRepositoryImpl(RealmConfiguration configuration) {
this.configuration = configuration;
}
@Override
public List<TodoList> getAll() {
Realm realm = Realm.getInstance(configuration);
RealmResults<TodoListDAO> todoListDAOs = realm.where(TodoListDAO.class).findAll();
List<TodoList> todoLists = new ArrayList<>(todoListDAOs.size());
for (TodoListDAO dao : todoListDAOs) {
todoLists.add(RealmConverter.convert(dao));
}
realm.close();
return todoLists;
}
@Override
public Optional<TodoList> getTodoListById(String uuid) {
Realm realm = Realm.getInstance(configuration);
Optional<TodoListDAO> todoListDAO = Optional.fromNullable(realm.where(TodoListDAO.class).equalTo("uuid", uuid).findFirst());
Optional<TodoList> todoList = todoListDAO.transform(new TodoListDAOConverter());
realm.close();
return todoList;
}
@Override
public Optional<TodoListHeader> getHeaderById(String uuid) {
Realm realm = Realm.getInstance(configuration);
Optional<TodoListHeaderDAO> headerDAO = Optional.fromNullable(realm.where(TodoListHeaderDAO.class).equalTo("uuid", uuid).findFirst());
Optional<TodoListHeader> header = headerDAO.transform(new TodoListHeaderDAOConverter());
realm.close();
return header;
}
@Override
public Optional<TodoListItem> getItemById(String uuid) {
Realm realm = Realm.getInstance(configuration);
Optional<TodoListItemDAO> itemDAO = Optional.fromNullable(realm.where(TodoListItemDAO.class).equalTo("uuid", uuid).findFirst());
Optional<TodoListItem> item = itemDAO.transform(new TodoListItemDAOConverter());
realm.close();
return item;
}
@Override
public List<TodoListSection> getSectionsOfTodoListId(String uuid) {
Realm realm = Realm.getInstance(configuration);
RealmResults<TodoListHeaderDAO> headerDAOs = realm.where(TodoListHeaderDAO.class).equalTo("parentTodoListUuid", uuid).findAll();
List<TodoListSection> sections = new ArrayList<>(headerDAOs.size());
for (TodoListHeaderDAO dao : headerDAOs) {
sections.add(constructSection(realm, dao));
}
realm.close();
return sections;
}
@Override
public List<TodoListHeader> getHeadersOfTodoListId(String uuid) {
Realm realm = Realm.getInstance(configuration);
RealmResults<TodoListHeaderDAO> headerDAOs = realm.where(TodoListHeaderDAO.class).equalTo("parentTodoListUuid", uuid).findAll();
List<TodoListHeader> headers = new ArrayList<>(headerDAOs.size());
for (TodoListHeaderDAO dao : headerDAOs) {
headers.add(RealmConverter.convert(dao));
}
realm.close();
return headers;
}
@Override
public int getHeaderCountOfTodoList(String uuid) {
Realm realm = Realm.getInstance(configuration);
int size = (int) realm.where(TodoListHeaderDAO.class).equalTo("parentTodoListUuid", uuid).count();
realm.close();
return size;
}
@Override
public int getSubItemCountOfHeader(String uuid) {
Realm realm = Realm.getInstance(configuration);
int size = (int) realm.where(TodoListItemDAO.class).equalTo("parentHeaderUuid", uuid).count();
realm.close();
return size;
}
private TodoListSection constructSection(Realm realm, TodoListHeaderDAO headerDAO) {
RealmResults<TodoListItemDAO> itemDAOs = realm.where(TodoListItemDAO.class)
.equalTo("parentTodoListUuid", headerDAO.getParentTodoListUuid())
.equalTo("parentHeaderUuid", headerDAO.getUuid()).findAll();
List<TodoListItem> items = new ArrayList<>(itemDAOs.size());
TodoListHeader header = RealmConverter.convert(headerDAO);
for (TodoListItemDAO dao : itemDAOs) {
items.add(RealmConverter.convert(dao));
}
return new TodoListSection(header, items);
}
@Override
public boolean insert(TodoList todoList) {
Realm realm = Realm.getInstance(configuration);
final TodoListDAO dao = RealmConverter.convert(todoList);
realm.beginTransaction();
try {
realm.copyToRealm(dao);
realm.commitTransaction();
} catch (Throwable throwable) {
realm.cancelTransaction();
realm.close();
return false;
}
realm.close();
return true;
}
@Override
public boolean insert(TodoListHeader header) {
Realm realm = Realm.getInstance(configuration);
final TodoListHeaderDAO dao = RealmConverter.convert(header);
realm.beginTransaction();
try {
realm.copyToRealm(dao);
realm.commitTransaction();
} catch (Throwable throwable) {
realm.cancelTransaction();
realm.close();
return false;
}
realm.close();
return true;
}
@Override
public boolean insert(TodoListItem item) {
Realm realm = Realm.getInstance(configuration);
final TodoListItemDAO dao = RealmConverter.convert(item);
realm.beginTransaction();
try {
realm.copyToRealm(dao);
realm.commitTransaction();
} catch (Throwable throwable) {
realm.cancelTransaction();
realm.close();
return false;
}
realm.close();
return true;
}
@Override
public void delete(final TodoList deletedTodoList) {
Realm realm = Realm.getInstance(configuration);
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(@NonNull Realm realm) {
realm.where(TodoListItemDAO.class).equalTo("parentTodoListUuid", deletedTodoList.getUuid()).findAll().deleteAllFromRealm();
realm.where(TodoListHeaderDAO.class).equalTo("parentTodoListUuid", deletedTodoList.getUuid()).findAll().deleteAllFromRealm();
TodoListDAO dao = realm.where(TodoListDAO.class).equalTo("uuid", deletedTodoList.getUuid()).findFirst();
if (dao != null) dao.deleteFromRealm();
}
});
realm.close();
}
@Override
public void delete(final TodoListHeader deletedHeader) {
Realm realm = Realm.getInstance(configuration);
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(@NonNull Realm realm) {
realm.where(TodoListItemDAO.class).equalTo("parentHeaderUuid", deletedHeader.getUuid()).findAll().deleteAllFromRealm();
TodoListHeaderDAO dao = realm.where(TodoListHeaderDAO.class).equalTo("uuid", deletedHeader.getUuid()).findFirst();
if (dao != null) dao.deleteFromRealm();
}
});
realm.close();
}
@Override
public void delete(final TodoListItem deletedItem) {
Realm realm = Realm.getInstance(configuration);
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(@NonNull Realm realm) {
TodoListItemDAO dao = realm.where(TodoListItemDAO.class).equalTo("uuid", deletedItem.getUuid()).findFirst();
if (dao != null) dao.deleteFromRealm();
}
});
realm.close();
}
@Override
public void update(TodoList updatedTodoList) {
Realm realm = Realm.getInstance(configuration);
final TodoListDAO todoListDAO = RealmConverter.convert(updatedTodoList);
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(@NonNull Realm realm) {
realm.copyToRealmOrUpdate(todoListDAO);
}
});
realm.close();
}
@Override
public void update(TodoListHeader updatedHeader) {
Realm realm = Realm.getInstance(configuration);
final TodoListHeaderDAO dao = RealmConverter.convert(updatedHeader);
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(@NonNull Realm realm) {
realm.copyToRealmOrUpdate(dao);
}
});
realm.close();
}
@Override
public void update(TodoListItem updatedItem) {
Realm realm = Realm.getInstance(configuration);
final TodoListItemDAO dao = RealmConverter.convert(updatedItem);
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(@NonNull Realm realm) {
realm.copyToRealmOrUpdate(dao);
}
});
realm.close();
}
}
|
|
package com.sam_chordas.android.stockhawk.ui;
import android.app.LoaderManager;
import android.content.Context;
import android.content.CursorLoader;
import android.content.Intent;
import android.content.Loader;
import android.database.Cursor;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.helper.ItemTouchHelper;
import android.text.InputType;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import com.afollestad.materialdialogs.MaterialDialog;
import com.google.android.gms.gcm.GcmNetworkManager;
import com.google.android.gms.gcm.PeriodicTask;
import com.google.android.gms.gcm.Task;
import com.melnykov.fab.FloatingActionButton;
import com.sam_chordas.android.stockhawk.R;
import com.sam_chordas.android.stockhawk.data.QuoteColumns;
import com.sam_chordas.android.stockhawk.data.QuoteProvider;
import com.sam_chordas.android.stockhawk.rest.QuoteCursorAdapter;
import com.sam_chordas.android.stockhawk.rest.RecyclerViewItemClickListener;
import com.sam_chordas.android.stockhawk.rest.Utils;
import com.sam_chordas.android.stockhawk.service.StockIntentService;
import com.sam_chordas.android.stockhawk.service.StockTaskService;
import com.sam_chordas.android.stockhawk.touch_helper.SimpleItemTouchHelperCallback;
public class MyStocksActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor>{
/**
* Fragment managing the behaviors, interactions and presentation of the navigation drawer.
*/
/**
* Used to store the last screen title. For use in {@link #restoreActionBar()}.
*/
private CharSequence mTitle;
private Intent mServiceIntent;
private ItemTouchHelper mItemTouchHelper;
private static final int CURSOR_LOADER_ID = 0;
private QuoteCursorAdapter mCursorAdapter;
private Context mContext;
private Cursor mCursor;
boolean isConnected;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mContext = this;
ConnectivityManager cm =
(ConnectivityManager) mContext.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
isConnected = activeNetwork != null &&
activeNetwork.isConnectedOrConnecting();
setContentView(R.layout.activity_my_stocks);
// The intent service is for executing immediate pulls from the Yahoo API
// GCMTaskService can only schedule tasks, they cannot execute immediately
mServiceIntent = new Intent(this, StockIntentService.class);
if (savedInstanceState == null){
// Run the initialize task service so that some stocks appear upon an empty database
mServiceIntent.putExtra("tag", "init");
if (isConnected){
startService(mServiceIntent);
} else{
networkToast();
}
}
RecyclerView recyclerView = (RecyclerView) findViewById(R.id.recycler_view);
recyclerView.setLayoutManager(new LinearLayoutManager(this));
getLoaderManager().initLoader(CURSOR_LOADER_ID, null, this);
mCursorAdapter = new QuoteCursorAdapter(this, null);
recyclerView.addOnItemTouchListener(new RecyclerViewItemClickListener(this,
new RecyclerViewItemClickListener.OnItemClickListener() {
@Override public void onItemClick(View v, int position) {
Intent intent = StockDetailActivity.getStartActivityIntent(MyStocksActivity.this, (String) v.getTag());
startActivity(intent);
}
}));
recyclerView.setAdapter(mCursorAdapter);
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.attachToRecyclerView(recyclerView);
fab.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
if (isConnected){
new MaterialDialog.Builder(mContext).title(R.string.symbol_search)
.content(R.string.content_test)
.inputType(InputType.TYPE_CLASS_TEXT)
.input(R.string.input_hint, R.string.input_prefill, new MaterialDialog.InputCallback() {
@Override public void onInput(MaterialDialog dialog, CharSequence input) {
// On FAB click, receive user input. Make sure the stock doesn't already exist
// in the DB and proceed accordingly
Cursor c = getContentResolver().query(QuoteProvider.Quotes.CONTENT_URI,
new String[] { QuoteColumns.SYMBOL }, QuoteColumns.SYMBOL + "= ?",
new String[] { input.toString() }, null);
if (c.getCount() != 0) {
Toast toast =
Toast.makeText(MyStocksActivity.this, R.string.stock_is_saved,
Toast.LENGTH_LONG);
toast.setGravity(Gravity.CENTER, Gravity.CENTER, 0);
toast.show();
return;
} else {
// Add the stock to DB
mServiceIntent.putExtra("tag", "add");
mServiceIntent.putExtra("symbol", input.toString());
startService(mServiceIntent);
}
}
})
.show();
} else {
networkToast();
}
}
});
ItemTouchHelper.Callback callback = new SimpleItemTouchHelperCallback(mCursorAdapter);
mItemTouchHelper = new ItemTouchHelper(callback);
mItemTouchHelper.attachToRecyclerView(recyclerView);
mTitle = getTitle();
if (isConnected){
long period = 3600L;
long flex = 10L;
String periodicTag = "periodic";
// create a periodic task to pull stocks once every hour after the app has been opened. This
// is so Widget data stays up to date.
PeriodicTask periodicTask = new PeriodicTask.Builder()
.setService(StockTaskService.class)
.setPeriod(period)
.setFlex(flex)
.setTag(periodicTag)
.setRequiredNetwork(Task.NETWORK_STATE_CONNECTED)
.setRequiresCharging(false)
.build();
// Schedule task with tag "periodic." This ensure that only the stocks present in the DB
// are updated.
GcmNetworkManager.getInstance(this).schedule(periodicTask);
}
}
@Override
public void onResume() {
super.onResume();
getLoaderManager().restartLoader(CURSOR_LOADER_ID, null, this);
}
public void networkToast(){
Toast.makeText(mContext, getString(R.string.network_toast), Toast.LENGTH_SHORT).show();
}
public void restoreActionBar() {
ActionBar actionBar = getSupportActionBar();
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setTitle(mTitle);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.my_stocks, menu);
restoreActionBar();
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
if (id == R.id.action_change_units){
// this is for changing stock changes from percent value to dollar value
Utils.showPercent = !Utils.showPercent;
this.getContentResolver().notifyChange(QuoteProvider.Quotes.CONTENT_URI, null);
}
return super.onOptionsItemSelected(item);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args){
// This narrows the return to only the stocks that are most current.
return new CursorLoader(this, QuoteProvider.Quotes.CONTENT_URI,
new String[]{ QuoteColumns._ID, QuoteColumns.SYMBOL, QuoteColumns.BIDPRICE,
QuoteColumns.PERCENT_CHANGE, QuoteColumns.CHANGE, QuoteColumns.ISUP},
QuoteColumns.ISCURRENT + " = ?",
new String[]{"1"},
null);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data){
mCursorAdapter.swapCursor(data);
mCursor = data;
}
@Override
public void onLoaderReset(Loader<Cursor> loader){
mCursorAdapter.swapCursor(null);
}
}
|
|
package org.apache.solr.core;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.file.Files;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.IOUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrException;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
import java.nio.charset.StandardCharsets;
public class TestCoreDiscovery extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore();
}
private final File solrHomeDirectory = createTempDir().toFile();
private void setMeUp(String alternateCoreDir) throws Exception {
System.setProperty("solr.solr.home", solrHomeDirectory.getAbsolutePath());
String xmlStr = SOLR_XML;
if (alternateCoreDir != null) {
xmlStr = xmlStr.replace("<solr>", "<solr> <str name=\"coreRootDirectory\">" + alternateCoreDir + "</str> ");
}
File tmpFile = new File(solrHomeDirectory, ConfigSolr.SOLR_XML_FILE);
FileUtils.write(tmpFile, xmlStr, IOUtils.UTF_8);
}
private void setMeUp() throws Exception {
setMeUp(null);
}
private Properties makeCorePropFile(String name, boolean isLazy, boolean loadOnStartup, String... extraProps) {
Properties props = new Properties();
props.put(CoreDescriptor.CORE_NAME, name);
props.put(CoreDescriptor.CORE_SCHEMA, "schema-tiny.xml");
props.put(CoreDescriptor.CORE_CONFIG, "solrconfig-minimal.xml");
props.put(CoreDescriptor.CORE_TRANSIENT, Boolean.toString(isLazy));
props.put(CoreDescriptor.CORE_LOADONSTARTUP, Boolean.toString(loadOnStartup));
props.put(CoreDescriptor.CORE_DATADIR, "${core.dataDir:stuffandnonsense}");
props.put(CoreDescriptor.CORE_INSTDIR, "totallybogus"); // For testing that this property is ignored if present.
for (String extra : extraProps) {
String[] parts = extra.split("=");
props.put(parts[0], parts[1]);
}
return props;
}
private void addCoreWithProps(Properties stockProps, File propFile) throws Exception {
if (!propFile.getParentFile().exists()) propFile.getParentFile().mkdirs();
Writer out = new OutputStreamWriter(new FileOutputStream(propFile), StandardCharsets.UTF_8);
try {
stockProps.store(out, null);
} finally {
out.close();
}
addConfFiles(new File(propFile.getParent(), "conf"));
}
private void addCoreWithProps(String name, Properties stockProps) throws Exception {
File propFile = new File(new File(solrHomeDirectory, name), CorePropertiesLocator.PROPERTIES_FILENAME);
File parent = propFile.getParentFile();
assertTrue("Failed to mkdirs for " + parent.getAbsolutePath(), parent.mkdirs());
addCoreWithProps(stockProps, propFile);
}
private void addConfFiles(File confDir) throws Exception {
String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
assertTrue("Failed to mkdirs for " + confDir.getAbsolutePath(), confDir.mkdirs());
FileUtils.copyFile(new File(top, "schema-tiny.xml"), new File(confDir, "schema-tiny.xml"));
FileUtils.copyFile(new File(top, "solrconfig-minimal.xml"), new File(confDir, "solrconfig-minimal.xml"));
FileUtils.copyFile(new File(top, "solrconfig.snippet.randomindexconfig.xml"), new File(confDir, "solrconfig.snippet.randomindexconfig.xml"));
}
private CoreContainer init() throws Exception {
final CoreContainer cores = new CoreContainer();
cores.load();
return cores;
}
@After
public void after() throws Exception {
}
// Test the basic setup, create some dirs with core.properties files in them, but solr.xml has discoverCores
// set and insure that we find all the cores and can load them.
@Test
public void testDiscovery() throws Exception {
setMeUp();
// name, isLazy, loadOnStartup
addCoreWithProps("core1", makeCorePropFile("core1", false, true, "dataDir=core1"));
addCoreWithProps("core2", makeCorePropFile("core2", false, false, "dataDir=core2"));
// I suspect what we're adding in here is a "configset" rather than a schema or solrconfig.
//
addCoreWithProps("lazy1", makeCorePropFile("lazy1", true, false, "dataDir=lazy1"));
CoreContainer cc = init();
try {
assertEquals(ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME,
cc.getDefaultCoreName());
TestLazyCores.checkInCores(cc, "core1");
TestLazyCores.checkNotInCores(cc, "lazy1", "core2", "collection1");
// force loading of core2 and lazy1 by getting them from the CoreContainer
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2");
SolrCore lazy1 = cc.getCore("lazy1")) {
// Let's assert we did the right thing for implicit properties too.
CoreDescriptor desc = core1.getCoreDescriptor();
assertEquals("core1", desc.getName());
// This is too long and ugly to put in. Besides, it varies.
assertNotNull(desc.getInstanceDir());
// Prove we're ignoring this even though it's set in the properties file
assertFalse("InstanceDir should be ignored", desc.getInstanceDir().contains("totallybogus"));
assertEquals("core1", desc.getDataDir());
assertEquals("solrconfig-minimal.xml", desc.getConfigName());
assertEquals("schema-tiny.xml", desc.getSchemaName());
TestLazyCores.checkInCores(cc, "core1", "core2", "lazy1");
}
} finally {
cc.shutdown();
}
}
@Test
public void testDuplicateNames() throws Exception {
setMeUp();
// name, isLazy, loadOnStartup
addCoreWithProps("core1", makeCorePropFile("core1", false, true));
addCoreWithProps("core2", makeCorePropFile("core2", false, false, "name=core1"));
CoreContainer cc = null;
try {
cc = init();
fail("Should have thrown exception in testDuplicateNames");
} catch (SolrException se) {
String message = se.getMessage();
assertTrue("Wrong exception thrown on duplicate core names",
message.indexOf("Found multiple cores with the name [core1]") != -1);
assertTrue(File.separator + "core1 should have been mentioned in the message: " + message,
message.indexOf(File.separator + "core1") != -1);
assertTrue(File.separator + "core2 should have been mentioned in the message:" + message,
message.indexOf(File.separator + "core2") != -1);
} finally {
if (cc != null) {
cc.shutdown();
}
}
}
@Test
public void testAlternateCoreDir() throws Exception {
File alt = createTempDir().toFile();
setMeUp(alt.getAbsolutePath());
addCoreWithProps(makeCorePropFile("core1", false, true, "dataDir=core1"),
new File(alt, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
addCoreWithProps(makeCorePropFile("core2", false, false, "dataDir=core2"),
new File(alt, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
assertNotNull(core1);
assertNotNull(core2);
} finally {
cc.shutdown();
}
}
@Test
public void testNoCoreDir() throws Exception {
File noCoreDir = createTempDir().toFile();
setMeUp(noCoreDir.getAbsolutePath());
addCoreWithProps(makeCorePropFile("core1", false, true),
new File(noCoreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
addCoreWithProps(makeCorePropFile("core2", false, false),
new File(noCoreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
assertNotNull(core1);
assertNotNull(core2);
} finally {
cc.shutdown();
}
}
@Test
public void testCoreDirCantRead() throws Exception {
File coreDir = solrHomeDirectory;
setMeUp(coreDir.getAbsolutePath());
addCoreWithProps(makeCorePropFile("core1", false, true),
new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
// Insure that another core is opened successfully
addCoreWithProps(makeCorePropFile("core2", false, false, "dataDir=core2"),
new File(coreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
File toSet = new File(coreDir, "core1");
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
assertNull(core1);
assertNotNull(core2);
} finally {
cc.shutdown();
}
// So things can be cleaned up by the framework!
toSet.setReadable(true, false);
}
@Test
public void testNonCoreDirCantRead() throws Exception {
File coreDir = solrHomeDirectory;
setMeUp(coreDir.getAbsolutePath());
addCoreWithProps(makeCorePropFile("core1", false, true),
new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
addCoreWithProps(makeCorePropFile("core2", false, false, "dataDir=core2"),
new File(coreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
File toSet = new File(solrHomeDirectory, "cantReadDir");
assertTrue("Should have been able to make directory '" + toSet.getAbsolutePath() + "' ", toSet.mkdirs());
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
assertNotNull(core1); // Should be able to open the perfectly valid core1 despite a non-readable directory
assertNotNull(core2);
} finally {
cc.shutdown();
}
// So things can be cleaned up by the framework!
toSet.setReadable(true, false);
}
@Test
public void testFileCantRead() throws Exception {
File coreDir = solrHomeDirectory;
setMeUp(coreDir.getAbsolutePath());
addCoreWithProps(makeCorePropFile("core1", false, true),
new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
File toSet = new File(solrHomeDirectory, "cantReadFile");
assertTrue("Should have been able to make file '" + toSet.getAbsolutePath() + "' ", toSet.createNewFile());
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1")) {
assertNotNull(core1); // Should still be able to create core despite r/o file.
} finally {
cc.shutdown();
}
// So things can be cleaned up by the framework!
toSet.setReadable(true, false);
}
@Test
public void testSolrHomeDoesntExist() throws Exception {
File homeDir = solrHomeDirectory;
Files.delete(homeDir.toPath());
CoreContainer cc = null;
try {
cc = init();
} catch (SolrException ex) {
assertTrue("Core init doesn't report if solr home directory doesn't exist " + ex.getMessage(),
0 <= ex.getMessage().indexOf("solr.xml does not exist"));
} finally {
if (cc != null) {
cc.shutdown();
}
}
}
@Test
public void testSolrHomeNotReadable() throws Exception {
File homeDir = solrHomeDirectory;
setMeUp(homeDir.getAbsolutePath());
addCoreWithProps(makeCorePropFile("core1", false, true),
new File(homeDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
assumeTrue("Cannot make " + homeDir + " non-readable. Test aborted.", homeDir.setReadable(false, false));
CoreContainer cc = null;
try {
cc = init();
} catch (Exception ex) {
String eoe = ex.getMessage();
assertTrue("Should have had a runtime exception here",
0 < ex.getMessage().indexOf("doesn't have read permissions"));
} finally {
if (cc != null) {
cc.shutdown();
}
}
// So things can be cleaned up by the framework!
homeDir.setReadable(true, false);
}
// For testing whether finding a solr.xml overrides looking at solr.properties
private final static String SOLR_XML = "<solr> " +
"<int name=\"transientCacheSize\">2</int> " +
"<solrcloud> " +
"<str name=\"hostContext\">solrprop</str> " +
"<int name=\"zkClientTimeout\">20</int> " +
"<str name=\"host\">222.333.444.555</str> " +
"<int name=\"hostPort\">6000</int> " +
"</solrcloud> " +
"</solr>";
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.jps.cmdline;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.util.containers.FileCollectionFactory;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.api.BuildType;
import org.jetbrains.jps.api.CanceledStatus;
import org.jetbrains.jps.api.GlobalOptions;
import org.jetbrains.jps.builders.*;
import org.jetbrains.jps.builders.impl.BuildDataPathsImpl;
import org.jetbrains.jps.builders.impl.BuildRootIndexImpl;
import org.jetbrains.jps.builders.impl.BuildTargetIndexImpl;
import org.jetbrains.jps.builders.impl.BuildTargetRegistryImpl;
import org.jetbrains.jps.builders.java.JavaModuleBuildTargetType;
import org.jetbrains.jps.builders.java.dependencyView.Callbacks;
import org.jetbrains.jps.builders.logging.BuildLoggingManager;
import org.jetbrains.jps.builders.storage.BuildDataPaths;
import org.jetbrains.jps.incremental.*;
import org.jetbrains.jps.incremental.fs.BuildFSState;
import org.jetbrains.jps.incremental.messages.BuildMessage;
import org.jetbrains.jps.incremental.messages.CompilerMessage;
import org.jetbrains.jps.incremental.relativizer.PathRelativizerService;
import org.jetbrains.jps.incremental.storage.BuildDataManager;
import org.jetbrains.jps.incremental.storage.BuildTargetsState;
import org.jetbrains.jps.incremental.storage.ProjectStamps;
import org.jetbrains.jps.incremental.storage.StampsStorage;
import org.jetbrains.jps.indices.ModuleExcludeIndex;
import org.jetbrains.jps.indices.impl.IgnoredFileIndexImpl;
import org.jetbrains.jps.indices.impl.ModuleExcludeIndexImpl;
import org.jetbrains.jps.model.JpsModel;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static org.jetbrains.jps.api.CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.TargetTypeBuildScope;
public final class BuildRunner {
private static final Logger LOG = Logger.getInstance(BuildRunner.class);
public static final boolean PARALLEL_BUILD_ENABLED = Boolean.parseBoolean(System.getProperty(GlobalOptions.COMPILE_PARALLEL_OPTION, "false"));
public static final boolean PARALLEL_BUILD_AUTOMAKE_ENABLED = PARALLEL_BUILD_ENABLED && Boolean.parseBoolean(System.getProperty(GlobalOptions.ALLOW_PARALLEL_AUTOMAKE_OPTION, "true"));
private final JpsModelLoader myModelLoader;
private List<String> myFilePaths = Collections.emptyList();
private Map<String, String> myBuilderParams = Collections.emptyMap();
private boolean myForceCleanCaches;
public BuildRunner(JpsModelLoader modelLoader) {
myModelLoader = modelLoader;
}
public void setFilePaths(List<String> filePaths) {
myFilePaths = filePaths != null? filePaths : Collections.emptyList();
}
public void setBuilderParams(Map<String, String> builderParams) {
myBuilderParams = builderParams != null? builderParams : Collections.emptyMap();
}
public ProjectDescriptor load(MessageHandler msgHandler, File dataStorageRoot, BuildFSState fsState) throws IOException {
final JpsModel jpsModel = myModelLoader.loadModel();
BuildDataPaths dataPaths = new BuildDataPathsImpl(dataStorageRoot);
BuildTargetRegistryImpl targetRegistry = new BuildTargetRegistryImpl(jpsModel);
ModuleExcludeIndex index = new ModuleExcludeIndexImpl(jpsModel);
IgnoredFileIndexImpl ignoredFileIndex = new IgnoredFileIndexImpl(jpsModel);
BuildRootIndexImpl buildRootIndex = new BuildRootIndexImpl(targetRegistry, jpsModel, index, dataPaths, ignoredFileIndex);
BuildTargetIndexImpl targetIndex = new BuildTargetIndexImpl(targetRegistry, buildRootIndex);
BuildTargetsState targetsState = new BuildTargetsState(dataPaths, jpsModel, buildRootIndex);
PathRelativizerService relativizer = new PathRelativizerService(jpsModel.getProject());
ProjectStamps projectStamps = null;
BuildDataManager dataManager = null;
try {
projectStamps = new ProjectStamps(dataStorageRoot, targetsState, relativizer);
dataManager = new BuildDataManager(dataPaths, targetsState, relativizer);
if (dataManager.versionDiffers()) {
myForceCleanCaches = true;
msgHandler.processMessage(new CompilerMessage(getRootCompilerName(), BuildMessage.Kind.INFO,
JpsBuildBundle.message("build.message.dependency.data.format.has.changed.project.rebuild.required")));
}
}
catch (Exception e) {
// second try
LOG.info(e);
if (projectStamps != null) {
projectStamps.close();
}
if (dataManager != null) {
dataManager.close();
}
myForceCleanCaches = true;
FileUtilRt.delete(dataStorageRoot);
targetsState = new BuildTargetsState(dataPaths, jpsModel, buildRootIndex);
projectStamps = new ProjectStamps(dataStorageRoot, targetsState, relativizer);
dataManager = new BuildDataManager(dataPaths, targetsState, relativizer);
// second attempt succeeded
msgHandler.processMessage(new CompilerMessage(getRootCompilerName(), BuildMessage.Kind.INFO,
JpsBuildBundle.message("build.message.project.rebuild.forced.0", e.getMessage())));
}
return new ProjectDescriptor(jpsModel, fsState, projectStamps, dataManager, BuildLoggingManager.DEFAULT, index, targetsState,
targetIndex, buildRootIndex, ignoredFileIndex);
}
@NotNull
public static @Nls String getRootCompilerName() {
return JpsBuildBundle.message("builder.name.root");
}
public void setForceCleanCaches(boolean forceCleanCaches) {
myForceCleanCaches = forceCleanCaches;
}
public void runBuild(ProjectDescriptor pd,
CanceledStatus cs,
@Nullable Callbacks.ConstantAffectionResolver constantSearch,
MessageHandler msgHandler,
BuildType buildType,
List<TargetTypeBuildScope> scopes, final boolean includeDependenciesToScope) throws Exception {
for (int attempt = 0; attempt < 2 && !cs.isCanceled(); attempt++) {
final boolean forceClean = myForceCleanCaches && myFilePaths.isEmpty();
final CompileScope compileScope = createCompilationScope(pd, scopes, myFilePaths, forceClean, includeDependenciesToScope);
final IncProjectBuilder builder = new IncProjectBuilder(pd, BuilderRegistry.getInstance(), myBuilderParams, cs, Utils.IS_TEST_MODE);
builder.addMessageHandler(msgHandler);
try {
switch (buildType) {
case BUILD:
builder.build(compileScope, forceClean);
break;
case CLEAN:
//todo[nik]
// new ProjectBuilder(new GantBinding(), project).clean();
break;
case UP_TO_DATE_CHECK:
builder.checkUpToDate(compileScope);
break;
}
break; // break attempts loop
}
catch (RebuildRequestedException e) {
if (attempt == 0) {
LOG.info(e);
myForceCleanCaches = true;
}
else {
throw e;
}
}
}
}
private static CompileScope createCompilationScope(ProjectDescriptor pd, List<TargetTypeBuildScope> scopes, Collection<String> paths,
final boolean forceClean, final boolean includeDependenciesToScope) throws Exception {
Set<BuildTargetType<?>> targetTypes = new HashSet<>();
Set<BuildTargetType<?>> targetTypesToForceBuild = new HashSet<>();
Set<BuildTarget<?>> targets = new HashSet<>();
Map<BuildTarget<?>, Set<File>> files;
final TargetTypeRegistry typeRegistry = TargetTypeRegistry.getInstance();
for (TargetTypeBuildScope scope : scopes) {
final BuildTargetType<?> targetType = typeRegistry.getTargetType(scope.getTypeId());
if (targetType == null) {
LOG.info("Unknown target type: " + scope.getTypeId());
continue;
}
if (scope.getForceBuild() || forceClean) {
targetTypesToForceBuild.add(targetType);
}
if (scope.getAllTargets()) {
targetTypes.add(targetType);
}
else {
BuildTargetLoader<?> loader = targetType.createLoader(pd.getModel());
for (String targetId : scope.getTargetIdList()) {
BuildTarget<?> target = loader.createTarget(targetId);
if (target != null) {
targets.add(target);
}
else {
LOG.info("Unknown " + targetType + " target id: " + targetId);
}
}
}
}
if (includeDependenciesToScope) {
includeDependenciesToScope(targetTypes, targets, targetTypesToForceBuild, pd);
}
final StampsStorage<? extends StampsStorage.Stamp> stampsStorage = pd.getProjectStamps().getStampStorage();
if (!paths.isEmpty()) {
boolean forceBuildAllModuleBasedTargets = false;
for (BuildTargetType<?> type : targetTypesToForceBuild) {
if (type instanceof JavaModuleBuildTargetType) {
forceBuildAllModuleBasedTargets = true;
break;
}
}
files = new HashMap<>();
for (String path : paths) {
final File file = new File(path);
final Collection<BuildRootDescriptor> descriptors = pd.getBuildRootIndex().findAllParentDescriptors(file, null);
for (BuildRootDescriptor descriptor : descriptors) {
Set<File> fileSet = files.get(descriptor.getTarget());
if (fileSet == null) {
fileSet = FileCollectionFactory.createCanonicalFileSet();
files.put(descriptor.getTarget(), fileSet);
}
final boolean added = fileSet.add(file);
if (added) {
final BuildTargetType<?> targetType = descriptor.getTarget().getTargetType();
if (targetTypesToForceBuild.contains(targetType) || (forceBuildAllModuleBasedTargets && targetType instanceof ModuleBasedBuildTargetType)) {
pd.fsState.markDirty(null, file, descriptor, stampsStorage, false);
}
}
}
}
}
else {
files = Collections.emptyMap();
}
return new CompileScopeImpl(targetTypes, targetTypesToForceBuild, targets, files);
}
private static void includeDependenciesToScope(Set<? extends BuildTargetType<?>> targetTypes, Set<BuildTarget<?>> targets,
Set<? super BuildTargetType<?>> targetTypesToForceBuild, ProjectDescriptor descriptor) {
//todo[nik] get rid of CompileContext parameter for BuildTargetIndex.getDependencies() and use it here
TargetOutputIndex dummyIndex = new TargetOutputIndex() {
@Override
public Collection<BuildTarget<?>> getTargetsByOutputFile(@NotNull File file) {
return Collections.emptyList();
}
};
List<BuildTarget<?>> current = new ArrayList<>(targets);
while (!current.isEmpty()) {
List<BuildTarget<?>> next = new ArrayList<>();
for (BuildTarget<?> target : current) {
for (BuildTarget<?> depTarget : target.computeDependencies(descriptor.getBuildTargetIndex(), dummyIndex)) {
if (!targets.contains(depTarget) && !targetTypes.contains(depTarget.getTargetType())) {
next.add(depTarget);
if (targetTypesToForceBuild.contains(target.getTargetType())) {
targetTypesToForceBuild.add(depTarget.getTargetType());
}
}
}
}
targets.addAll(next);
current = next;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.persistence.impl.journal;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.ActiveMQExceptionType;
import org.apache.activemq.artemis.core.io.IOCallback;
import org.apache.activemq.artemis.core.journal.impl.SimpleWaitIOCallback;
import org.apache.activemq.artemis.core.persistence.OperationContext;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.utils.ExecutorFactory;
/**
* Each instance of OperationContextImpl is associated with an executor (usually an ordered Executor).
*
* Tasks are hold until the operations are complete and executed in the natural order as soon as the operations are returned
* from replication and storage.
*
* If there are no pending IO operations, the tasks are just executed at the callers thread without any context switch.
*
* So, if you are doing operations that are not dependent on IO (e.g NonPersistentMessages) you wouldn't have any context switch.
*/
public class OperationContextImpl implements OperationContext {
private static final ThreadLocal<OperationContext> threadLocalContext = new ThreadLocal<>();
public static void clearContext() {
OperationContextImpl.threadLocalContext.set(null);
}
public static final OperationContext getContext() {
return getContext(null);
}
public static OperationContext getContext(final ExecutorFactory executorFactory) {
OperationContext token = OperationContextImpl.threadLocalContext.get();
if (token == null) {
if (executorFactory == null) {
return null;
} else {
token = new OperationContextImpl(executorFactory.getExecutor());
OperationContextImpl.threadLocalContext.set(token);
}
}
return token;
}
public static void setContext(final OperationContext context) {
OperationContextImpl.threadLocalContext.set(context);
}
private List<TaskHolder> tasks;
private List<TaskHolder> storeOnlyTasks;
private long minimalStore = Long.MAX_VALUE;
private long minimalReplicated = Long.MAX_VALUE;
private long minimalPage = Long.MAX_VALUE;
private final AtomicLong storeLineUp = new AtomicLong(0);
private final AtomicLong replicationLineUp = new AtomicLong(0);
private final AtomicLong pageLineUp = new AtomicLong(0);
private long stored = 0;
private long replicated = 0;
private long paged = 0;
private int errorCode = -1;
private String errorMessage = null;
private final Executor executor;
private final AtomicInteger executorsPending = new AtomicInteger(0);
public OperationContextImpl(final Executor executor) {
super();
this.executor = executor;
}
@Override
public void pageSyncLineUp() {
pageLineUp.incrementAndGet();
}
@Override
public synchronized void pageSyncDone() {
paged++;
checkTasks();
}
@Override
public void storeLineUp() {
storeLineUp.incrementAndGet();
}
@Override
public void replicationLineUp() {
replicationLineUp.incrementAndGet();
}
@Override
public synchronized void replicationDone() {
replicated++;
checkTasks();
}
@Override
public void executeOnCompletion(IOCallback runnable) {
executeOnCompletion(runnable, false);
}
@Override
public void executeOnCompletion(final IOCallback completion, final boolean storeOnly) {
if (errorCode != -1) {
completion.onError(errorCode, errorMessage);
return;
}
boolean executeNow = false;
synchronized (this) {
final int UNDEFINED = Integer.MIN_VALUE;
int storeLined = UNDEFINED;
int pageLined = UNDEFINED;
int replicationLined = UNDEFINED;
if (storeOnly) {
if (storeOnlyTasks == null) {
storeOnlyTasks = new LinkedList<>();
}
} else {
if (tasks == null) {
tasks = new LinkedList<>();
minimalReplicated = (replicationLined = replicationLineUp.intValue());
minimalStore = (storeLined = storeLineUp.intValue());
minimalPage = (pageLined = pageLineUp.intValue());
}
}
//On the next branches each of them is been used
if (replicationLined == UNDEFINED) {
replicationLined = replicationLineUp.intValue();
storeLined = storeLineUp.intValue();
pageLined = pageLineUp.intValue();
}
// On this case, we can just execute the context directly
if (replicationLined == replicated && storeLined == stored && pageLined == paged) {
// We want to avoid the executor if everything is complete...
// However, we can't execute the context if there are executions pending
// We need to use the executor on this case
if (executorsPending.get() == 0) {
// No need to use an executor here or a context switch
// there are no actions pending.. hence we can just execute the task directly on the same thread
executeNow = true;
} else {
execute(completion);
}
} else {
if (storeOnly) {
storeOnlyTasks.add(new TaskHolder(completion, storeLined, replicationLined, pageLined));
} else {
tasks.add(new TaskHolder(completion, storeLined, replicationLined, pageLined));
}
}
}
if (executeNow) {
// Executing outside of any locks
completion.done();
}
}
@Override
public synchronized void done() {
stored++;
checkTasks();
}
private void checkTasks() {
if (storeOnlyTasks != null) {
Iterator<TaskHolder> iter = storeOnlyTasks.iterator();
while (iter.hasNext()) {
TaskHolder holder = iter.next();
if (stored >= holder.storeLined) {
// If set, we use an executor to avoid the server being single threaded
execute(holder.task);
iter.remove();
}
}
}
if (stored >= minimalStore && replicated >= minimalReplicated && paged >= minimalPage) {
Iterator<TaskHolder> iter = tasks.iterator();
while (iter.hasNext()) {
TaskHolder holder = iter.next();
if (stored >= holder.storeLined && replicated >= holder.replicationLined && paged >= holder.pageLined) {
// If set, we use an executor to avoid the server being single threaded
execute(holder.task);
iter.remove();
} else {
// End of list here. No other task will be completed after this
break;
}
}
}
}
/**
* @param task
*/
private void execute(final IOCallback task) {
executorsPending.incrementAndGet();
try {
executor.execute(new Runnable() {
@Override
public void run() {
try {
// If any IO is done inside the callback, it needs to be done on a new context
OperationContextImpl.clearContext();
task.done();
} finally {
executorsPending.decrementAndGet();
}
}
});
} catch (Throwable e) {
ActiveMQServerLogger.LOGGER.errorExecutingAIOCallback(e);
executorsPending.decrementAndGet();
task.onError(ActiveMQExceptionType.INTERNAL_ERROR.getCode(), "It wasn't possible to complete IO operation - " + e.getMessage());
}
}
/*
* (non-Javadoc)
* @see org.apache.activemq.artemis.core.replication.ReplicationToken#complete()
*/
public void complete() {
}
@Override
public synchronized void onError(final int errorCode, final String errorMessage) {
this.errorCode = errorCode;
this.errorMessage = errorMessage;
if (tasks != null) {
Iterator<TaskHolder> iter = tasks.iterator();
while (iter.hasNext()) {
TaskHolder holder = iter.next();
holder.task.onError(errorCode, errorMessage);
iter.remove();
}
}
}
static final class TaskHolder {
@Override
public String toString() {
return "TaskHolder [storeLined=" + storeLined +
", replicationLined=" +
replicationLined +
", pageLined=" +
pageLined +
", task=" +
task +
"]";
}
final int storeLined;
final int replicationLined;
final int pageLined;
final IOCallback task;
TaskHolder(final IOCallback task, int storeLined, int replicationLined, int pageLined) {
this.storeLined = storeLined;
this.replicationLined = replicationLined;
this.pageLined = pageLined;
this.task = task;
}
}
@Override
public void waitCompletion() throws Exception {
waitCompletion(0);
}
@Override
public boolean waitCompletion(final long timeout) throws InterruptedException, ActiveMQException {
SimpleWaitIOCallback waitCallback = new SimpleWaitIOCallback();
executeOnCompletion(waitCallback);
complete();
if (timeout == 0) {
waitCallback.waitCompletion();
return true;
} else {
return waitCallback.waitCompletion(timeout);
}
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
if (tasks != null) {
for (TaskHolder hold : tasks) {
buffer.append("Task = " + hold + "\n");
}
}
return "OperationContextImpl [" + hashCode() + "] [minimalStore=" + minimalStore +
", storeLineUp=" +
storeLineUp +
", stored=" +
stored +
", minimalReplicated=" +
minimalReplicated +
", replicationLineUp=" +
replicationLineUp +
", replicated=" +
replicated +
", paged=" +
paged +
", minimalPage=" +
minimalPage +
", pageLineUp=" +
pageLineUp +
", errorCode=" +
errorCode +
", errorMessage=" +
errorMessage +
", executorsPending=" +
executorsPending +
", executor=" + this.executor +
"]" + buffer.toString();
}
}
|
|
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.discovery.shared.transport;
import javax.ws.rs.core.HttpHeaders;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.appinfo.EurekaInstanceConfig;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.discovery.EurekaClientConfig;
import com.netflix.discovery.shared.Applications;
import com.netflix.discovery.shared.resolver.ClosableResolver;
import com.netflix.discovery.shared.resolver.ClusterResolver;
import com.netflix.discovery.shared.resolver.DefaultEndpoint;
import com.netflix.discovery.shared.resolver.EurekaEndpoint;
import com.netflix.discovery.shared.resolver.StaticClusterResolver;
import com.netflix.discovery.shared.resolver.aws.ApplicationsResolver;
import com.netflix.discovery.shared.resolver.aws.AwsEndpoint;
import com.netflix.discovery.shared.resolver.aws.EurekaHttpResolver;
import com.netflix.discovery.shared.resolver.aws.TestEurekaHttpResolver;
import com.netflix.discovery.shared.transport.jersey.Jersey1TransportClientFactories;
import com.netflix.discovery.shared.transport.jersey.TransportClientFactories;
import com.netflix.discovery.util.EurekaEntityComparators;
import com.netflix.discovery.util.InstanceInfoGenerator;
import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.ClientRequest;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.filter.ClientFilter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static com.netflix.discovery.shared.transport.EurekaHttpResponse.anEurekaHttpResponse;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* @author Tomasz Bak
*/
public class EurekaHttpClientsTest {
private static final InstanceInfo MY_INSTANCE = InstanceInfoGenerator.newBuilder(1, "myApp").build().first();
private final EurekaInstanceConfig instanceConfig = mock(EurekaInstanceConfig.class);
private final ApplicationInfoManager applicationInfoManager = new ApplicationInfoManager(instanceConfig, MY_INSTANCE);
private final EurekaHttpClient writeRequestHandler = mock(EurekaHttpClient.class);
private final EurekaHttpClient readRequestHandler = mock(EurekaHttpClient.class);
private EurekaClientConfig clientConfig;
private EurekaTransportConfig transportConfig;
private SimpleEurekaHttpServer writeServer;
private SimpleEurekaHttpServer readServer;
private ClusterResolver<EurekaEndpoint> clusterResolver;
private EurekaHttpClientFactory clientFactory;
private String readServerURI;
private final InstanceInfoGenerator instanceGen = InstanceInfoGenerator.newBuilder(2, 1).build();
@Before
public void setUp() throws IOException {
clientConfig = mock(EurekaClientConfig.class);
transportConfig = mock(EurekaTransportConfig.class);
when(clientConfig.getEurekaServerTotalConnectionsPerHost()).thenReturn(10);
when(clientConfig.getEurekaServerTotalConnections()).thenReturn(10);
when(transportConfig.getSessionedClientReconnectIntervalSeconds()).thenReturn(10);
writeServer = new SimpleEurekaHttpServer(writeRequestHandler);
clusterResolver = new StaticClusterResolver<EurekaEndpoint>("regionA", new DefaultEndpoint("localhost", writeServer.getServerPort(), false, "/v2/"));
readServer = new SimpleEurekaHttpServer(readRequestHandler);
readServerURI = "http://localhost:" + readServer.getServerPort();
clientFactory = EurekaHttpClients.canonicalClientFactory(
"test",
transportConfig,
clusterResolver,
new Jersey1TransportClientFactories().newTransportClientFactory(
clientConfig,
Collections.<ClientFilter>emptyList(),
applicationInfoManager.getInfo()
));
}
@After
public void tearDown() throws Exception {
if (writeServer != null) {
writeServer.shutdown();
}
if (readServer != null) {
readServer.shutdown();
}
if (clientFactory != null) {
clientFactory.shutdown();
}
}
@Test
public void testCanonicalClient() throws Exception {
Applications apps = instanceGen.toApplications();
when(writeRequestHandler.getApplications()).thenReturn(
anEurekaHttpResponse(302, Applications.class).headers("Location", readServerURI + "/v2/apps").build()
);
when(readRequestHandler.getApplications()).thenReturn(
anEurekaHttpResponse(200, apps).headers(HttpHeaders.CONTENT_TYPE, "application/json").build()
);
EurekaHttpClient eurekaHttpClient = clientFactory.newClient();
EurekaHttpResponse<Applications> result = eurekaHttpClient.getApplications();
assertThat(result.getStatusCode(), is(equalTo(200)));
assertThat(EurekaEntityComparators.equal(result.getEntity(), apps), is(true));
}
@Test
public void testCompositeBootstrapResolver() throws Exception {
Applications applications = InstanceInfoGenerator.newBuilder(5, "eurekaWrite", "someOther").build().toApplications();
Applications applications2 = InstanceInfoGenerator.newBuilder(2, "eurekaWrite", "someOther").build().toApplications();
String vipAddress = applications.getRegisteredApplications("eurekaWrite").getInstances().get(0).getVIPAddress();
// setup client config to use fixed root ips for testing
when(clientConfig.shouldUseDnsForFetchingServiceUrls()).thenReturn(false);
when(clientConfig.getEurekaServerServiceUrls(anyString())).thenReturn(Arrays.asList("http://foo:0")); // can use anything here
when(clientConfig.getRegion()).thenReturn("us-east-1");
when(transportConfig.getWriteClusterVip()).thenReturn(vipAddress);
when(transportConfig.getAsyncExecutorThreadPoolSize()).thenReturn(4);
when(transportConfig.getAsyncResolverRefreshIntervalMs()).thenReturn(400);
when(transportConfig.getAsyncResolverWarmUpTimeoutMs()).thenReturn(400);
ApplicationsResolver.ApplicationsSource applicationsSource = mock(ApplicationsResolver.ApplicationsSource.class);
when(applicationsSource.getApplications(anyInt(), eq(TimeUnit.SECONDS)))
.thenReturn(null) // first time
.thenReturn(applications) // second time
.thenReturn(null); // subsequent times
EurekaHttpClient mockHttpClient = mock(EurekaHttpClient.class);
when(mockHttpClient.getVip(eq(vipAddress)))
.thenReturn(anEurekaHttpResponse(200, applications).build())
.thenReturn(anEurekaHttpResponse(200, applications2).build()); // contains diff number of servers
TransportClientFactory transportClientFactory = mock(TransportClientFactory.class);
when(transportClientFactory.newClient(any(EurekaEndpoint.class))).thenReturn(mockHttpClient);
ClosableResolver<AwsEndpoint> resolver = null;
try {
resolver = EurekaHttpClients.compositeBootstrapResolver(
clientConfig,
transportConfig,
transportClientFactory,
applicationInfoManager.getInfo(),
applicationsSource
);
List endpoints = resolver.getClusterEndpoints();
assertThat(endpoints.size(), equalTo(applications.getInstancesByVirtualHostName(vipAddress).size()));
// wait for the second cycle that hits the app source
verify(applicationsSource, timeout(3000).times(2)).getApplications(anyInt(), eq(TimeUnit.SECONDS));
endpoints = resolver.getClusterEndpoints();
assertThat(endpoints.size(), equalTo(applications.getInstancesByVirtualHostName(vipAddress).size()));
// wait for the third cycle that triggers the mock http client (which is the third resolver cycle)
// for the third cycle we have mocked the application resolver to return null data so should fall back
// to calling the remote resolver again (which should return applications2)
verify(mockHttpClient, timeout(3000).times(3)).getVip(anyString());
endpoints = resolver.getClusterEndpoints();
assertThat(endpoints.size(), equalTo(applications2.getInstancesByVirtualHostName(vipAddress).size()));
} finally {
if (resolver != null) {
resolver.shutdown();
}
}
}
@Test
public void testCanonicalResolver() throws Exception {
when(clientConfig.getEurekaServerURLContext()).thenReturn("context");
when(clientConfig.getRegion()).thenReturn("region");
when(transportConfig.getAsyncExecutorThreadPoolSize()).thenReturn(3);
when(transportConfig.getAsyncResolverRefreshIntervalMs()).thenReturn(400);
when(transportConfig.getAsyncResolverWarmUpTimeoutMs()).thenReturn(400);
Applications applications = InstanceInfoGenerator.newBuilder(5, "eurekaRead", "someOther").build().toApplications();
String vipAddress = applications.getRegisteredApplications("eurekaRead").getInstances().get(0).getVIPAddress();
ApplicationsResolver.ApplicationsSource applicationsSource = mock(ApplicationsResolver.ApplicationsSource.class);
when(applicationsSource.getApplications(anyInt(), eq(TimeUnit.SECONDS)))
.thenReturn(null) // first time
.thenReturn(applications); // subsequent times
EurekaHttpClientFactory remoteResolverClientFactory = mock(EurekaHttpClientFactory.class);
EurekaHttpClient httpClient = mock(EurekaHttpClient.class);
when(remoteResolverClientFactory.newClient()).thenReturn(httpClient);
when(httpClient.getVip(vipAddress)).thenReturn(EurekaHttpResponse.anEurekaHttpResponse(200, applications).build());
EurekaHttpResolver remoteResolver = spy(new TestEurekaHttpResolver(clientConfig, transportConfig, remoteResolverClientFactory, vipAddress));
when(transportConfig.getReadClusterVip()).thenReturn(vipAddress);
ApplicationsResolver localResolver = spy(new ApplicationsResolver(
clientConfig, transportConfig, applicationsSource, transportConfig.getReadClusterVip()));
ClosableResolver resolver = null;
try {
resolver = EurekaHttpClients.compositeQueryResolver(
remoteResolver,
localResolver,
clientConfig,
transportConfig,
applicationInfoManager.getInfo()
);
List endpoints = resolver.getClusterEndpoints();
assertThat(endpoints.size(), equalTo(applications.getInstancesByVirtualHostName(vipAddress).size()));
verify(remoteResolver, times(1)).getClusterEndpoints();
verify(localResolver, times(1)).getClusterEndpoints();
// wait for the second cycle that hits the app source
verify(applicationsSource, timeout(3000).times(2)).getApplications(anyInt(), eq(TimeUnit.SECONDS));
endpoints = resolver.getClusterEndpoints();
assertThat(endpoints.size(), equalTo(applications.getInstancesByVirtualHostName(vipAddress).size()));
verify(remoteResolver, times(1)).getClusterEndpoints();
verify(localResolver, times(2)).getClusterEndpoints();
} finally {
if (resolver != null) {
resolver.shutdown();
}
}
}
@Test
public void testAddingAdditionalFilters() throws Exception {
TestFilter testFilter = new TestFilter();
Collection<ClientFilter> additionalFilters = Arrays.<ClientFilter>asList(testFilter);
TransportClientFactory transportClientFactory = new Jersey1TransportClientFactories().newTransportClientFactory(
clientConfig,
additionalFilters,
MY_INSTANCE
);
EurekaHttpClient client = transportClientFactory.newClient(clusterResolver.getClusterEndpoints().get(0));
client.getApplication("foo");
assertThat(testFilter.await(30, TimeUnit.SECONDS), is(true));
}
private static class TestFilter extends ClientFilter {
private final CountDownLatch latch = new CountDownLatch(1);
@Override
public ClientResponse handle(ClientRequest cr) throws ClientHandlerException {
latch.countDown();
return mock(ClientResponse.class);
}
public boolean await(long timeout, TimeUnit unit) throws Exception {
return latch.await(timeout, unit);
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workmail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The details of a mailbox export job, including the user or resource ID associated with the mailbox and the S3 bucket
* that the mailbox contents are exported to.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workmail-2017-10-01/MailboxExportJob" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class MailboxExportJob implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The identifier of the mailbox export job.
* </p>
*/
private String jobId;
/**
* <p>
* The identifier of the user or resource associated with the mailbox.
* </p>
*/
private String entityId;
/**
* <p>
* The mailbox export job description.
* </p>
*/
private String description;
/**
* <p>
* The name of the S3 bucket.
* </p>
*/
private String s3BucketName;
/**
* <p>
* The path to the S3 bucket and file that the mailbox export job exports to.
* </p>
*/
private String s3Path;
/**
* <p>
* The estimated progress of the mailbox export job, in percentage points.
* </p>
*/
private Integer estimatedProgress;
/**
* <p>
* The state of the mailbox export job.
* </p>
*/
private String state;
/**
* <p>
* The mailbox export job start timestamp.
* </p>
*/
private java.util.Date startTime;
/**
* <p>
* The mailbox export job end timestamp.
* </p>
*/
private java.util.Date endTime;
/**
* <p>
* The identifier of the mailbox export job.
* </p>
*
* @param jobId
* The identifier of the mailbox export job.
*/
public void setJobId(String jobId) {
this.jobId = jobId;
}
/**
* <p>
* The identifier of the mailbox export job.
* </p>
*
* @return The identifier of the mailbox export job.
*/
public String getJobId() {
return this.jobId;
}
/**
* <p>
* The identifier of the mailbox export job.
* </p>
*
* @param jobId
* The identifier of the mailbox export job.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withJobId(String jobId) {
setJobId(jobId);
return this;
}
/**
* <p>
* The identifier of the user or resource associated with the mailbox.
* </p>
*
* @param entityId
* The identifier of the user or resource associated with the mailbox.
*/
public void setEntityId(String entityId) {
this.entityId = entityId;
}
/**
* <p>
* The identifier of the user or resource associated with the mailbox.
* </p>
*
* @return The identifier of the user or resource associated with the mailbox.
*/
public String getEntityId() {
return this.entityId;
}
/**
* <p>
* The identifier of the user or resource associated with the mailbox.
* </p>
*
* @param entityId
* The identifier of the user or resource associated with the mailbox.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withEntityId(String entityId) {
setEntityId(entityId);
return this;
}
/**
* <p>
* The mailbox export job description.
* </p>
*
* @param description
* The mailbox export job description.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The mailbox export job description.
* </p>
*
* @return The mailbox export job description.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The mailbox export job description.
* </p>
*
* @param description
* The mailbox export job description.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The name of the S3 bucket.
* </p>
*
* @param s3BucketName
* The name of the S3 bucket.
*/
public void setS3BucketName(String s3BucketName) {
this.s3BucketName = s3BucketName;
}
/**
* <p>
* The name of the S3 bucket.
* </p>
*
* @return The name of the S3 bucket.
*/
public String getS3BucketName() {
return this.s3BucketName;
}
/**
* <p>
* The name of the S3 bucket.
* </p>
*
* @param s3BucketName
* The name of the S3 bucket.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withS3BucketName(String s3BucketName) {
setS3BucketName(s3BucketName);
return this;
}
/**
* <p>
* The path to the S3 bucket and file that the mailbox export job exports to.
* </p>
*
* @param s3Path
* The path to the S3 bucket and file that the mailbox export job exports to.
*/
public void setS3Path(String s3Path) {
this.s3Path = s3Path;
}
/**
* <p>
* The path to the S3 bucket and file that the mailbox export job exports to.
* </p>
*
* @return The path to the S3 bucket and file that the mailbox export job exports to.
*/
public String getS3Path() {
return this.s3Path;
}
/**
* <p>
* The path to the S3 bucket and file that the mailbox export job exports to.
* </p>
*
* @param s3Path
* The path to the S3 bucket and file that the mailbox export job exports to.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withS3Path(String s3Path) {
setS3Path(s3Path);
return this;
}
/**
* <p>
* The estimated progress of the mailbox export job, in percentage points.
* </p>
*
* @param estimatedProgress
* The estimated progress of the mailbox export job, in percentage points.
*/
public void setEstimatedProgress(Integer estimatedProgress) {
this.estimatedProgress = estimatedProgress;
}
/**
* <p>
* The estimated progress of the mailbox export job, in percentage points.
* </p>
*
* @return The estimated progress of the mailbox export job, in percentage points.
*/
public Integer getEstimatedProgress() {
return this.estimatedProgress;
}
/**
* <p>
* The estimated progress of the mailbox export job, in percentage points.
* </p>
*
* @param estimatedProgress
* The estimated progress of the mailbox export job, in percentage points.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withEstimatedProgress(Integer estimatedProgress) {
setEstimatedProgress(estimatedProgress);
return this;
}
/**
* <p>
* The state of the mailbox export job.
* </p>
*
* @param state
* The state of the mailbox export job.
* @see MailboxExportJobState
*/
public void setState(String state) {
this.state = state;
}
/**
* <p>
* The state of the mailbox export job.
* </p>
*
* @return The state of the mailbox export job.
* @see MailboxExportJobState
*/
public String getState() {
return this.state;
}
/**
* <p>
* The state of the mailbox export job.
* </p>
*
* @param state
* The state of the mailbox export job.
* @return Returns a reference to this object so that method calls can be chained together.
* @see MailboxExportJobState
*/
public MailboxExportJob withState(String state) {
setState(state);
return this;
}
/**
* <p>
* The state of the mailbox export job.
* </p>
*
* @param state
* The state of the mailbox export job.
* @return Returns a reference to this object so that method calls can be chained together.
* @see MailboxExportJobState
*/
public MailboxExportJob withState(MailboxExportJobState state) {
this.state = state.toString();
return this;
}
/**
* <p>
* The mailbox export job start timestamp.
* </p>
*
* @param startTime
* The mailbox export job start timestamp.
*/
public void setStartTime(java.util.Date startTime) {
this.startTime = startTime;
}
/**
* <p>
* The mailbox export job start timestamp.
* </p>
*
* @return The mailbox export job start timestamp.
*/
public java.util.Date getStartTime() {
return this.startTime;
}
/**
* <p>
* The mailbox export job start timestamp.
* </p>
*
* @param startTime
* The mailbox export job start timestamp.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withStartTime(java.util.Date startTime) {
setStartTime(startTime);
return this;
}
/**
* <p>
* The mailbox export job end timestamp.
* </p>
*
* @param endTime
* The mailbox export job end timestamp.
*/
public void setEndTime(java.util.Date endTime) {
this.endTime = endTime;
}
/**
* <p>
* The mailbox export job end timestamp.
* </p>
*
* @return The mailbox export job end timestamp.
*/
public java.util.Date getEndTime() {
return this.endTime;
}
/**
* <p>
* The mailbox export job end timestamp.
* </p>
*
* @param endTime
* The mailbox export job end timestamp.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailboxExportJob withEndTime(java.util.Date endTime) {
setEndTime(endTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getJobId() != null)
sb.append("JobId: ").append(getJobId()).append(",");
if (getEntityId() != null)
sb.append("EntityId: ").append(getEntityId()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getS3BucketName() != null)
sb.append("S3BucketName: ").append(getS3BucketName()).append(",");
if (getS3Path() != null)
sb.append("S3Path: ").append(getS3Path()).append(",");
if (getEstimatedProgress() != null)
sb.append("EstimatedProgress: ").append(getEstimatedProgress()).append(",");
if (getState() != null)
sb.append("State: ").append(getState()).append(",");
if (getStartTime() != null)
sb.append("StartTime: ").append(getStartTime()).append(",");
if (getEndTime() != null)
sb.append("EndTime: ").append(getEndTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof MailboxExportJob == false)
return false;
MailboxExportJob other = (MailboxExportJob) obj;
if (other.getJobId() == null ^ this.getJobId() == null)
return false;
if (other.getJobId() != null && other.getJobId().equals(this.getJobId()) == false)
return false;
if (other.getEntityId() == null ^ this.getEntityId() == null)
return false;
if (other.getEntityId() != null && other.getEntityId().equals(this.getEntityId()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getS3BucketName() == null ^ this.getS3BucketName() == null)
return false;
if (other.getS3BucketName() != null && other.getS3BucketName().equals(this.getS3BucketName()) == false)
return false;
if (other.getS3Path() == null ^ this.getS3Path() == null)
return false;
if (other.getS3Path() != null && other.getS3Path().equals(this.getS3Path()) == false)
return false;
if (other.getEstimatedProgress() == null ^ this.getEstimatedProgress() == null)
return false;
if (other.getEstimatedProgress() != null && other.getEstimatedProgress().equals(this.getEstimatedProgress()) == false)
return false;
if (other.getState() == null ^ this.getState() == null)
return false;
if (other.getState() != null && other.getState().equals(this.getState()) == false)
return false;
if (other.getStartTime() == null ^ this.getStartTime() == null)
return false;
if (other.getStartTime() != null && other.getStartTime().equals(this.getStartTime()) == false)
return false;
if (other.getEndTime() == null ^ this.getEndTime() == null)
return false;
if (other.getEndTime() != null && other.getEndTime().equals(this.getEndTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getJobId() == null) ? 0 : getJobId().hashCode());
hashCode = prime * hashCode + ((getEntityId() == null) ? 0 : getEntityId().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getS3BucketName() == null) ? 0 : getS3BucketName().hashCode());
hashCode = prime * hashCode + ((getS3Path() == null) ? 0 : getS3Path().hashCode());
hashCode = prime * hashCode + ((getEstimatedProgress() == null) ? 0 : getEstimatedProgress().hashCode());
hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime * hashCode + ((getStartTime() == null) ? 0 : getStartTime().hashCode());
hashCode = prime * hashCode + ((getEndTime() == null) ? 0 : getEndTime().hashCode());
return hashCode;
}
@Override
public MailboxExportJob clone() {
try {
return (MailboxExportJob) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.workmail.model.transform.MailboxExportJobMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
package apollo.dataadapter.chado.jdbc;
import java.util.ArrayList;
import java.util.List;
import java.sql.Connection;
import java.sql.SQLException;
import org.apache.log4j.*;
import org.bdgp.util.ProgressEvent;
import apollo.config.Config;
import apollo.datamodel.SequenceI;
import apollo.datamodel.StrandedFeatureSet;
import apollo.datamodel.FeatureSet;
import apollo.dataadapter.DataInput;
import apollo.dataadapter.chado.ChadoAdapter;
import apollo.dataadapter.chado.ChadoTransaction;
import apollo.dataadapter.chado.SeqType;
abstract class AbstractChadoInstance implements ChadoInstance {
// -----------------------------------------------------------------------
// Class/static variables
// -----------------------------------------------------------------------
protected final static Logger logger = LogManager.getLogger(AbstractChadoInstance.class);
private final static Double Z = new Double(0.0);
private final static Double D100 = new Double(100.0);
// -----------------------------------------------------------------------
// Instance variables
// -----------------------------------------------------------------------
private JdbcChadoAdapter jdbcChadoAdapter;
private String logDirectory = null;
private ChadoProgram[] genePredictionPrograms;
private ChadoProgram[] searchHitPrograms;
private ChadoProgram[] oneLevelResultPrograms;
private List oneLevelAnnotTypes;
private List threeLevelAnnotTypes;
// JDBCTransactionWriter
private String writebackTemplateFile;
private List chadoTransactionMacros; // TODO: add 'writeback' to make name consistent?
// PureJDBCTransactionWriter
private boolean pureJDBCWriteMode = false;
private boolean pureJDBCCopyOnWrite = false;
private boolean pureJDBCNoCommit = false;
private boolean pureJDBCUseCDS = true;
// cv names
private String featureCVName = null;
private String relationshipCVName = null;
private String propertyTypeCVName = null;
private String seqDescriptionCVName = null;
// cvterm names (defaults appear below
private String polypeptideType = "polypeptide";
private String seqDescriptionTerm = "description";
private String partOfCvTerm = "part_of";
private String transProtRelationCvTerm = "derives_from";
private String transGeneRelationCvTerm = "derives_from";
private String exonTransRelationCvTerm = "part_of";
private String cdsTransRelationCvTerm = "derives_from";
private String polypeptideCdsRelationCvTerm = "derives_from";
private String polypeptideTransRelationCvTerm = null;
private String transcriptCvTerm = "transcript";
private String syntenyRelationshipType = "paralogous_to";
private String featureOwnerPropertyTerm = "owner";
private String featureCreateDatePropertyTerm = "date";
private String commentPropertyTerm = "comment";
private String synonymPropertyTerm = "synonym";
// boolean flags
private boolean searchHitsHaveFeatLocs = false;
private boolean cacheAnalysisTable = true;
private boolean retrieveAnnotations = true;
private boolean copyGeneModelsIntoResultTier = false;
private boolean queryFeatureIdWithUniquename = true;
private boolean queryFeatureIdWithName = true;
private String topLevelFeatType;
private String id;
/** A list of seq types allowing the user to choose from */
private List seqTypes;
private StrandedFeatureSet results;
private StrandedFeatureSet annots;
private FeatureLocImplementation topFeatLoc;
private boolean useSynonyms;
protected AbstractChadoInstance() {}
protected AbstractChadoInstance(JdbcChadoAdapter jdbcChadoAdapter) {
this.jdbcChadoAdapter = jdbcChadoAdapter;
}
public ChadoInstance cloneInstance() {
try {
// do we need to deep clone anything - probably not
return (ChadoInstance)this.clone();
} catch (CloneNotSupportedException e) {
return null; // shouldnt happen
}
}
public void setId(String id) { this.id = id; }
// rename this getJdbcChadoAdapter - confusing!
public void setChadoAdapter(JdbcChadoAdapter jdbcAdapter) {
this.jdbcChadoAdapter = jdbcAdapter;
}
public JdbcChadoAdapter getChadoAdapter() {
return this.jdbcChadoAdapter;
}
protected Connection getConnection() {
return getChadoAdapter().getConnection();
}
public void setWritebackTemplateFile(String templateFile) {
writebackTemplateFile = templateFile;
}
public String getWritebackTemplateFile() { return writebackTemplateFile; }
public void setPureJDBCWriteMode(boolean writeMode) {
pureJDBCWriteMode = writeMode;
}
public boolean getPureJDBCWriteMode() {
return pureJDBCWriteMode;
}
public void setPureJDBCCopyOnWrite(boolean copyOnWrite) {
pureJDBCCopyOnWrite = copyOnWrite;
}
public boolean getPureJDBCCopyOnWrite() {
return pureJDBCCopyOnWrite;
}
public void setPureJDBCNoCommit(boolean noCommit) {
pureJDBCNoCommit = noCommit;
}
public boolean getPureJDBCNoCommit() {
return pureJDBCNoCommit;
}
public void setPureJDBCUseCDS(boolean useCDS) {
this.pureJDBCUseCDS = useCDS;
}
public boolean getPureJDBCUseCDS() {
return pureJDBCUseCDS;
}
public void setLogDirectory(String path) {
this.logDirectory = path;
}
public String getLogDirectory() {
return this.logDirectory;
}
public void setGenePredictionPrograms(ChadoProgram[] genePredictionPrograms) {
this.genePredictionPrograms = genePredictionPrograms;
}
public boolean getUseSynonyms()
{
return useSynonyms;
}
public void setUseSynonyms(boolean useSynonyms)
{
this.useSynonyms = useSynonyms;
}
/**
* Return array for all the gene prediction programs. This comes from xml configuration
* @return an array of ChadoProgram
*/
public ChadoProgram[] getGenePredictionPrograms() {
return genePredictionPrograms;
}
public String getTranscriptName(FeatureSet tfs, String program) {
return program+":"+tfs.getStart()+"-"+tfs.getEnd();
}
public void setSearchHitPrograms(ChadoProgram[] searchHitPrograms) {
this.searchHitPrograms = searchHitPrograms;
}
/** Return array of strings for all the hit programs. This comes from
xml configuration */
public ChadoProgram[] getSearchHitPrograms() {
return searchHitPrograms;
}
public void setOneLevelResultPrograms(ChadoProgram[] oneLevelResults) {
oneLevelResultPrograms = oneLevelResults;
}
public ChadoProgram[] getOneLevelResultPrograms() {
return oneLevelResultPrograms;
}
/**
* Get a list of annotation types that are saved in Chado db as one level features
* from a list of types.
* @return
*/
public List getOneLevelAnnotTypes() {
if (oneLevelAnnotTypes == null)
oneLevelAnnotTypes = new ArrayList(0);
return oneLevelAnnotTypes;
}
public void setOneLevelAnnotTypes(List features) {
oneLevelAnnotTypes = new ArrayList(features);
}
/**
* Get a list of features that are stored in the chado db as three level features
* (gene-namedFeaure-exon).
* @return
*/
public List getThreeLevelAnnotTypes() {
if (threeLevelAnnotTypes == null)
threeLevelAnnotTypes = new ArrayList(0);
return threeLevelAnnotTypes;
}
public void setThreeLevelAnnotTypes(List features) {
threeLevelAnnotTypes = new ArrayList(features);
}
/** returns true if search hits have feature locs. Hits are the feature sets
that hold the leaf/hsps. The leaves always have feat-locs, but the featSet/hits
dont always have feat locs, which makes it impractical to retrieve out of
range leaves/hsps. Currently rice has hit feat locs, fly & tigr dont */
public boolean searchHitsHaveFeatLocs() {
return searchHitsHaveFeatLocs;
}
public void setSearchHitsHaveFeatLocs(boolean haveFeatLocs) {
this.searchHitsHaveFeatLocs = haveFeatLocs;
}
protected Long getRelationshipCVTermId(String name) {
return getChadoAdapter().getRelationshipCVTermId(name);
}
public void setRelationshipCVName(String relCV) {
relationshipCVName = relCV;
}
public String getRelationshipCVName() {
return relationshipCVName;
}
/** return string for part of relationship (tigr part_of, fb partof) */
public String getPartOfCvTerm() { return partOfCvTerm; }
public void setPartOfCvTerm(String term) {
partOfCvTerm = term;
}
public String getTransProtRelationTerm() { return transProtRelationCvTerm; }
/** Need to set (from chado-adapter.xml) if diff from default */
public void setTransProtRelationTerm(String term) { transProtRelationCvTerm = term; }
public String getTransGeneRelationTerm() { return transGeneRelationCvTerm; }
/** Need to set (from chado-adapter.xml) if diff from default */
public void setTransGeneRelationTerm(String term) { transGeneRelationCvTerm = term; }
public String getExonTransRelationTerm() { return exonTransRelationCvTerm; }
/** Need to set (from chado-adapter.xml) if diff from default */
public void setExonTransRelationTerm(String term) { exonTransRelationCvTerm = term; }
public String getCdsTransRelationTerm() { return cdsTransRelationCvTerm; }
/** Need to set (from chado-adapter.xml) if diff from default */
public void setCdsTransRelationTerm(String term) { cdsTransRelationCvTerm = term; }
public String getPolypeptideCdsRelationTerm() { return polypeptideCdsRelationCvTerm; }
/** Need to set (from chado-adapter.xml) if diff from default */
public void setPolypeptideCdsRelationTerm(String term) { polypeptideCdsRelationCvTerm = term; }
public String getPolypeptideTransRelationTerm() { return polypeptideTransRelationCvTerm; }
/** Need to set (from chado-adapter.xml) if diff from default */
public void setPolypeptideTransRelationTerm(String term) { polypeptideTransRelationCvTerm = term; }
class ProducedByException extends RelationshipCVException {
ProducedByException(String m) { super(m); }
}
protected Long getProducedByCVTermId() throws ProducedByException {
Long id = getRelationshipCVTermId(getTransProtRelationTerm());
if (id == null) {
String m = "produced by cv term '"+getTransProtRelationTerm()+"' not found in "+
"relationship cv "+getRelationshipCVName();
ProducedByException e = new ProducedByException(m);
logger.error(m, e);
throw e;
}
return id;
}
public void setFeatureCVName(String featCV) {
featureCVName = featCV;
}
public String getFeatureCVName() { return featureCVName; }
/** Old SO/SOFA uses "protein", new uses "polypeptide" (default) */
public void setPolypeptideType(String polypeptideType) {
this.polypeptideType = polypeptideType;
}
public String getPolypeptideType() {
return polypeptideType;
}
public void setSyntenyRelationshipType(String syntenyRelationshipType) {
this.syntenyRelationshipType = syntenyRelationshipType;
}
public String getSyntenyRelationshipType() {
return syntenyRelationshipType;
}
public void setFeatureOwnerPropertyTerm(String ownerTerm) {
this.featureOwnerPropertyTerm = ownerTerm;
}
public String getFeatureOwnerPropertyTerm() {
return this.featureOwnerPropertyTerm;
}
public void setFeatureCreateDatePropertyTerm(String dateTerm) {
this.featureCreateDatePropertyTerm = dateTerm;
}
public String getFeatureCreateDatePropertyTerm() {
return this.featureCreateDatePropertyTerm;
}
public void setCommentPropertyTerm(String commentTerm) {
this.commentPropertyTerm = commentTerm;
}
public String getCommentPropertyTerm() {
return this.commentPropertyTerm;
}
public String getSynonymPropertyTerm()
{
return synonymPropertyTerm;
}
public void setSynonymPropertyTerm(String synonymPropertyTerm)
{
this.synonymPropertyTerm = synonymPropertyTerm;
}
public String getSeqDescriptionCVName() {
// use propertyTypeCVName by default for reverse compatability
return (this.seqDescriptionCVName == null) ? propertyTypeCVName : seqDescriptionCVName;
}
public void setSeqDescriptionCVName(String cvName) {
this.seqDescriptionCVName = cvName;
}
public String getSeqDescriptionTerm() {
return this.seqDescriptionTerm;
}
public void setSeqDescriptionTerm(String term) {
this.seqDescriptionTerm = term;
}
protected Long getPolypeptideCVTermId() {
return getFeatureCVTermId(getPolypeptideType());
}
public Long getTranscriptCvTermId() {
return getFeatureCVTermId(transcriptCvTerm);
}
public String getTranscriptTerm() {
return transcriptCvTerm;
}
/** calls JdbcChadoAdapter -> cvterms should have its own class */
protected Long getFeatureCVTermId(String name) {
return getChadoAdapter().getFeatureCVTermId(name);
}
public void setPropertyTypeCVName(String cv) {
propertyTypeCVName = cv;
}
public String getPropertyTypeCVName() { return propertyTypeCVName; }
/**
* Returns type string as program:sourcename; override if this is
* not what your database requires.
*/
public String getAnalysisType(long analysisId, String program, String programversion, String sourcename) {
if (sourcename != null && sourcename.length() > 0) {
return program + ":" + sourcename;
}
return program;
}
public void setCacheAnalysisTable(boolean cache) {
this.cacheAnalysisTable = cache;
}
public boolean cacheAnalysisTable() {
return this.cacheAnalysisTable;
}
public void setRetrieveAnnotations(boolean retrieveAnnotations) {
this.retrieveAnnotations = retrieveAnnotations;
}
public void setCopyGeneModelsIntoResultTier(boolean copyModels) {
this.copyGeneModelsIntoResultTier = copyModels;
}
public boolean getCopyGeneModelsIntoResultTier() {
return this.copyGeneModelsIntoResultTier;
}
public void setQueryFeatureIdWithUniquename(boolean newVal) {
this.queryFeatureIdWithUniquename = newVal;
}
public boolean getQueryFeatureIdWithUniquename() {
return this.queryFeatureIdWithUniquename;
}
public void setQueryFeatureIdWithName(boolean newVal) {
this.queryFeatureIdWithName = newVal;
}
public boolean getQueryFeatureIdWithName() {
return this.queryFeatureIdWithName;
}
protected void getAnnotations(Connection conn,SequenceI refSeq, StrandedFeatureSet sfs,
FeatureLocImplementation featLocImp, boolean getFeatProps,
boolean getSynonyms, boolean getDbXRefs,
ChadoAdapter adapter) {
if (!retrieveAnnotations)
return;
getChadoAdapter().addOneLevelAnnotations(conn, refSeq, sfs, featLocImp, getFeatProps, getSynonyms, getDbXRefs);
adapter.fireProgressEvent(new ProgressEvent(this,D100, "One Level Annotations retrieved"));
adapter.fireProgressEvent(new ProgressEvent(this, Z, "Retrieving gene models"));
getChadoAdapter().addAnnotationModels(conn, refSeq, sfs, featLocImp, getFeatProps,
getSynonyms,getDbXRefs,getThreeLevelAnnotTypes());
// Continue to fetch gene models: one level AnnotatedFeatures
adapter.fireProgressEvent(new ProgressEvent(this, new Double(75.0), "Gene models retrieved"));
}
/** This is the beginning of trying to get chado-adapter.xml and
transactionXMLTemplate non-redundant, so you dont have to configure the same
things twice. A bunch of the preamble macros can be figured now from
chado-adpter.xml config and dont need to be in the template */
public List getChadoTransMacros() {
if (chadoTransactionMacros != null)
return chadoTransactionMacros;
chadoTransactionMacros = new ArrayList(4);
// Feature CV
ChadoTransaction chadoTrans = makeCvTrans(getFeatureCVName());
chadoTransactionMacros.add(chadoTrans);
// Relationship CV
chadoTrans = makeCvTrans(getRelationshipCVName());
chadoTransactionMacros.add(chadoTrans);
// Lookup protein/polypep
chadoTrans = makeCvTermTrans(getPolypeptideType(),getFeatureCVName());
chadoTransactionMacros.add(chadoTrans);
chadoTrans = makeCvTermTrans(getTransProtRelationTerm(),getRelationshipCVName());
chadoTransactionMacros.add(chadoTrans);
// more to come...
return chadoTransactionMacros;
}
private ChadoTransaction makeCvTrans(String cv) {
ChadoTransaction chadoTrans = new ChadoTransaction();
chadoTrans.setOperation(ChadoTransaction.LOOKUP);
chadoTrans.setTableName("cv");
chadoTrans.setID(cv);
chadoTrans.addProperty("name",cv);
return chadoTrans;
}
private ChadoTransaction makeCvTermTrans(String id, String cv) {
ChadoTransaction chadoTrans = new ChadoTransaction();
chadoTrans.setOperation(ChadoTransaction.LOOKUP);
chadoTrans.setTableName("cvterm");
chadoTrans.setID(id);
chadoTrans.addProperty("cv_id",cv);
chadoTrans.addProperty("name",id); // id & name are same
return chadoTrans;
}
public void setSeqTypeList(List seqTypes) {
this.seqTypes = seqTypes;
}
public int getSeqTypesSize() {
if (seqTypes == null)
return 0;
return seqTypes.size();
}
public SeqType getSeqType(int i) { return (SeqType)seqTypes.get(i); }
/** Assumes theres only 1 location seq type - returns first it finds */
public SeqType getLocationSeqType() {
for (int i=0; i<getSeqTypesSize(); i++) {
SeqType seqType = getSeqType(i);
if (seqType.hasStartAndEnd())
return seqType;
}
return null; // shouldnt happen
}
/** Top level feature type is needed for saving back to chado. Also for querying
locations from the command line (where type isnt given). This is configured
in chado-adapter cfg file under <seqTypes> <type> with <isTopLevel>true.
If top level is not configged tries to gleen from seq types - if theres any
seqTypes with start&end, uses that, if no locs then just looks for 'chromosome'
in type name, and if all that fails just sets to "chromosome". */
public String getTopLevelFeatType() {
if (topLevelFeatType != null)
return topLevelFeatType;
// check first if explicitly set with <isTopLevel> in chado config
for (int i=0; i<getSeqTypesSize(); i++) {
SeqType seqType = getSeqType(i);
if (seqType.isTopLevelFeatType()) {
topLevelFeatType = seqType.getName();
return topLevelFeatType;
}
}
// not explicitly set, usually the seqType with start&end is top level, and
// usually there is only one of these (if theres more than one print msg)
for (int i=0; i<getSeqTypesSize(); i++) {
SeqType seqType = getSeqType(i);
if (seqType.hasStartAndEnd()) {
// not set - 1st location - hopefully there is only 1 location
if (topLevelFeatType == null) {
topLevelFeatType = seqType.getName();
}
// there are 2 locations! yikes
else {
String m = "2 locations specified in seq types for instance "+id;
if (hasChromosomeInString(seqType.getName()))
topLevelFeatType = seqType.getName();
if (hasChromosomeInString(topLevelFeatType)) {
m += "using "+topLevelFeatType+" for top level feat because it has "
+"'chromosome' in it";
}
else {// chromosome not in either type
m += "Arbitrarily setting top level feat to "+topLevelFeatType+
". Set top level feat type explicitly with <isTopLevel>";
}
logger.info(m);
}
}
}
if (topLevelFeatType != null) // got it with location
return topLevelFeatType;
// isTopLevel not explicitly set nor is there a location - look for chrom
for (int i=0; i<getSeqTypesSize(); i++) {
SeqType seqType = getSeqType(i);
if (hasChromosomeInString(seqType.getName())) {
topLevelFeatType = seqType.getName();
logger.info("setting top level feat type to "+topLevelFeatType+
" because it had 'chromosome' in it. Set explicitly "+
"with <isTopLevel>true in <sequenceTypes> <type>");
return topLevelFeatType;
}
}
// everything failed - just set to chromosome and hope it works
// lets hope it doesnt get to this point - it shouldnt
topLevelFeatType = "chromosome";
logger.info("unable to discern top level feat type from config. Just setting "
+"it to 'chromosome' and hoping to get lucky. Top level "
+"seq type needs to be configured.");
return topLevelFeatType;
}
private boolean hasChromosomeInString(String type) {
return type.indexOf("chromosome") != -1;
}
/** If dataInputs type is configured as a location, then change dataInput to be
a location - is there a better way to do this? */
public void checkForLocation(DataInput dataInput) {
if (dataInput.getSoType() == null && dataInput.isRegion())
dataInput.setSoType(getLocationSeqType().getName());
if (isLocation(dataInput.getSoType()) && !dataInput.isRegion()) {
// it is a location - make it so, this causes loc in string to be parsed
// which is the desired effect
dataInput.makeDataTypeRegion(); // a little silly
}
}
private boolean isLocation(String type) {
SeqType st = getSeqType(type);
if (st == null)
return false;
return st.hasStartAndEnd();
}
public SeqType getSeqType(String name) {
for (int i=0; i<getSeqTypesSize(); i++) {
if (getSeqType(i).getName().equals(name))
return getSeqType(i);
}
return null;
}
public boolean typeHasStartAndEnd(String typeName) {
SeqType st = getSeqType(typeName);
if (st == null) return false;
return st.hasStartAndEnd();
}
public StrandedFeatureSet getResultStrandedFeatSet() {
if (results == null)
results = new StrandedFeatureSet();
return results;
}
public StrandedFeatureSet getAnnotStrandedFeatSet() {
if (annots == null)
annots = new StrandedFeatureSet();
return annots;
}
protected void clear() {
annots = null;
results = null;
}
void setTopFeatLoc(FeatureLocImplementation topFeatLoc) {
this.topFeatLoc = topFeatLoc;
}
public FeatureLocImplementation getTopFeatLoc() {
return topFeatLoc;
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction;
import org.elasticsearch.action.support.ChannelActionListener;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.OriginSettingClient;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.ShardSearchContextId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportActionProxy;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiFunction;
/**
* An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through
* transport.
*/
public class SearchTransportService {
public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]";
public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]";
public static final String CLEAR_SCROLL_CONTEXTS_ACTION_NAME = "indices:data/read/search[clear_scroll_contexts]";
public static final String DFS_ACTION_NAME = "indices:data/read/search[phase/dfs]";
public static final String QUERY_ACTION_NAME = "indices:data/read/search[phase/query]";
public static final String QUERY_ID_ACTION_NAME = "indices:data/read/search[phase/query/id]";
public static final String QUERY_SCROLL_ACTION_NAME = "indices:data/read/search[phase/query/scroll]";
public static final String QUERY_FETCH_SCROLL_ACTION_NAME = "indices:data/read/search[phase/query+fetch/scroll]";
public static final String FETCH_ID_SCROLL_ACTION_NAME = "indices:data/read/search[phase/fetch/id/scroll]";
public static final String FETCH_ID_ACTION_NAME = "indices:data/read/search[phase/fetch/id]";
public static final String QUERY_CAN_MATCH_NAME = "indices:data/read/search[can_match]";
private final TransportService transportService;
private final NodeClient client;
private final BiFunction<Transport.Connection, SearchActionListener, ActionListener> responseWrapper;
private final Map<String, Long> clientConnections = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
public SearchTransportService(TransportService transportService, NodeClient client,
BiFunction<Transport.Connection, SearchActionListener, ActionListener> responseWrapper) {
this.transportService = transportService;
this.client = client;
this.responseWrapper = responseWrapper;
}
public void sendFreeContext(Transport.Connection connection, final ShardSearchContextId contextId, OriginalIndices originalIndices) {
transportService.sendRequest(connection, FREE_CONTEXT_ACTION_NAME, new SearchFreeContextRequest(originalIndices, contextId),
TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(new ActionListener<SearchFreeContextResponse>() {
@Override
public void onResponse(SearchFreeContextResponse response) {
// no need to respond if it was freed or not
}
@Override
public void onFailure(Exception e) {
}
}, SearchFreeContextResponse::new));
}
public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId,
ActionListener<SearchFreeContextResponse> listener) {
transportService.sendRequest(connection, FREE_CONTEXT_SCROLL_ACTION_NAME, new ScrollFreeContextRequest(contextId),
TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new));
}
public void sendCanMatch(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final
ActionListener<SearchService.CanMatchResponse> listener) {
transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task,
TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchService.CanMatchResponse::new));
}
public void sendClearAllScrollContexts(Transport.Connection connection, final ActionListener<TransportResponse> listener) {
transportService.sendRequest(connection, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, TransportRequest.Empty.INSTANCE,
TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, (in) -> TransportResponse.Empty.INSTANCE));
}
public void sendExecuteDfs(Transport.Connection connection, final ShardSearchRequest request, SearchTask task,
final SearchActionListener<DfsSearchResult> listener) {
transportService.sendChildRequest(connection, DFS_ACTION_NAME, request, task,
new ConnectionCountingHandler<>(listener, DfsSearchResult::new, clientConnections, connection.getNode().getId()));
}
public void sendExecuteQuery(Transport.Connection connection, final ShardSearchRequest request, SearchTask task,
final SearchActionListener<SearchPhaseResult> listener) {
// we optimize this and expect a QueryFetchSearchResult if we only have a single shard in the search request
// this used to be the QUERY_AND_FETCH which doesn't exist anymore.
final boolean fetchDocuments = request.numberOfShards() == 1;
Writeable.Reader<SearchPhaseResult> reader = fetchDocuments ? QueryFetchSearchResult::new : QuerySearchResult::new;
final ActionListener handler = responseWrapper.apply(connection, listener);
transportService.sendChildRequest(connection, QUERY_ACTION_NAME, request, task,
new ConnectionCountingHandler<>(handler, reader, clientConnections, connection.getNode().getId()));
}
public void sendExecuteQuery(Transport.Connection connection, final QuerySearchRequest request, SearchTask task,
final SearchActionListener<QuerySearchResult> listener) {
transportService.sendChildRequest(connection, QUERY_ID_ACTION_NAME, request, task,
new ConnectionCountingHandler<>(listener, QuerySearchResult::new, clientConnections, connection.getNode().getId()));
}
public void sendExecuteScrollQuery(Transport.Connection connection, final InternalScrollSearchRequest request, SearchTask task,
final SearchActionListener<ScrollQuerySearchResult> listener) {
transportService.sendChildRequest(connection, QUERY_SCROLL_ACTION_NAME, request, task,
new ConnectionCountingHandler<>(listener, ScrollQuerySearchResult::new, clientConnections, connection.getNode().getId()));
}
public void sendExecuteScrollFetch(Transport.Connection connection, final InternalScrollSearchRequest request, SearchTask task,
final SearchActionListener<ScrollQueryFetchSearchResult> listener) {
transportService.sendChildRequest(connection, QUERY_FETCH_SCROLL_ACTION_NAME, request, task,
new ConnectionCountingHandler<>(listener, ScrollQueryFetchSearchResult::new, clientConnections,
connection.getNode().getId()));
}
public void sendExecuteFetch(Transport.Connection connection, final ShardFetchSearchRequest request, SearchTask task,
final SearchActionListener<FetchSearchResult> listener) {
sendExecuteFetch(connection, FETCH_ID_ACTION_NAME, request, task, listener);
}
public void sendExecuteFetchScroll(Transport.Connection connection, final ShardFetchRequest request, SearchTask task,
final SearchActionListener<FetchSearchResult> listener) {
sendExecuteFetch(connection, FETCH_ID_SCROLL_ACTION_NAME, request, task, listener);
}
private void sendExecuteFetch(Transport.Connection connection, String action, final ShardFetchRequest request, SearchTask task,
final SearchActionListener<FetchSearchResult> listener) {
transportService.sendChildRequest(connection, action, request, task,
new ConnectionCountingHandler<>(listener, FetchSearchResult::new, clientConnections, connection.getNode().getId()));
}
/**
* Used by {@link TransportSearchAction} to send the expand queries (field collapsing).
*/
void sendExecuteMultiSearch(final MultiSearchRequest request, SearchTask task,
final ActionListener<MultiSearchResponse> listener) {
final Transport.Connection connection = transportService.getConnection(transportService.getLocalNode());
transportService.sendChildRequest(connection, MultiSearchAction.NAME, request, task,
new ConnectionCountingHandler<>(listener, MultiSearchResponse::new, clientConnections, connection.getNode().getId()));
}
public RemoteClusterService getRemoteClusterService() {
return transportService.getRemoteClusterService();
}
/**
* Return a map of nodeId to pending number of search requests.
* This is a snapshot of the current pending search and not a live map.
*/
public Map<String, Long> getPendingSearchRequests() {
return new HashMap<>(clientConnections);
}
static class ScrollFreeContextRequest extends TransportRequest {
private ShardSearchContextId contextId;
ScrollFreeContextRequest(ShardSearchContextId contextId) {
this.contextId = Objects.requireNonNull(contextId);
}
ScrollFreeContextRequest(StreamInput in) throws IOException {
super(in);
contextId = new ShardSearchContextId(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
contextId.writeTo(out);
}
public ShardSearchContextId id() {
return this.contextId;
}
}
static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest {
private OriginalIndices originalIndices;
SearchFreeContextRequest(OriginalIndices originalIndices, ShardSearchContextId id) {
super(id);
this.originalIndices = originalIndices;
}
SearchFreeContextRequest(StreamInput in) throws IOException {
super(in);
originalIndices = OriginalIndices.readOriginalIndices(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
OriginalIndices.writeOriginalIndices(originalIndices, out);
}
@Override
public String[] indices() {
if (originalIndices == null) {
return null;
}
return originalIndices.indices();
}
@Override
public IndicesOptions indicesOptions() {
if (originalIndices == null) {
return null;
}
return originalIndices.indicesOptions();
}
}
public static class SearchFreeContextResponse extends TransportResponse {
private boolean freed;
SearchFreeContextResponse(StreamInput in) throws IOException {
freed = in.readBoolean();
}
SearchFreeContextResponse(boolean freed) {
this.freed = freed;
}
public boolean isFreed() {
return freed;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(freed);
}
}
public static void registerRequestHandler(TransportService transportService, SearchService searchService) {
transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, ScrollFreeContextRequest::new,
(request, channel, task) -> {
boolean freed = searchService.freeReaderContext(request.id());
channel.sendResponse(new SearchFreeContextResponse(freed));
});
TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, false, SearchFreeContextResponse::new);
transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, SearchFreeContextRequest::new,
(request, channel, task) -> {
boolean freed = searchService.freeReaderContext(request.id());
channel.sendResponse(new SearchFreeContextResponse(freed));
});
TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, false, SearchFreeContextResponse::new);
transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, ThreadPool.Names.SAME,
TransportRequest.Empty::new,
(request, channel, task) -> {
searchService.freeAllScrollContexts();
channel.sendResponse(TransportResponse.Empty.INSTANCE);
});
TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, false,
(in) -> TransportResponse.Empty.INSTANCE);
transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new,
(request, channel, task) ->
searchService.executeDfsPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, DFS_ACTION_NAME, request))
);
TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, true, DfsSearchResult::new);
transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new,
(request, channel, task) ->
searchService.executeQueryPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, QUERY_ACTION_NAME, request))
);
TransportActionProxy.registerProxyActionWithDynamicResponseType(transportService, QUERY_ACTION_NAME, true,
(request) -> ((ShardSearchRequest)request).numberOfShards() == 1 ? QueryFetchSearchResult::new : QuerySearchResult::new);
transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, ThreadPool.Names.SAME, QuerySearchRequest::new,
(request, channel, task) -> {
searchService.executeQueryPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, QUERY_ID_ACTION_NAME, request));
});
TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, true, QuerySearchResult::new);
transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, InternalScrollSearchRequest::new,
(request, channel, task) -> {
searchService.executeQueryPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, QUERY_SCROLL_ACTION_NAME, request));
});
TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, true, ScrollQuerySearchResult::new);
transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, InternalScrollSearchRequest::new,
(request, channel, task) -> {
searchService.executeFetchPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, QUERY_FETCH_SCROLL_ACTION_NAME, request));
});
TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, true, ScrollQueryFetchSearchResult::new);
transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, ShardFetchRequest::new,
(request, channel, task) -> {
searchService.executeFetchPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, FETCH_ID_SCROLL_ACTION_NAME, request));
});
TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, true, FetchSearchResult::new);
transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ThreadPool.Names.SAME, true, true, ShardFetchSearchRequest::new,
(request, channel, task) -> {
searchService.executeFetchPhase(request, (SearchShardTask) task,
new ChannelActionListener<>(channel, FETCH_ID_ACTION_NAME, request));
});
TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, true, FetchSearchResult::new);
// this is cheap, it does not fetch during the rewrite phase, so we can let it quickly execute on a networking thread
transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new,
(request, channel, task) -> {
searchService.canMatch(request, new ChannelActionListener<>(channel, QUERY_CAN_MATCH_NAME, request));
});
TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, true, SearchService.CanMatchResponse::new);
}
/**
* Returns a connection to the given node on the provided cluster. If the cluster alias is <code>null</code> the node will be resolved
* against the local cluster.
* @param clusterAlias the cluster alias the node should be resolved against
* @param node the node to resolve
* @return a connection to the given node belonging to the cluster with the provided alias.
*/
public Transport.Connection getConnection(@Nullable String clusterAlias, DiscoveryNode node) {
if (clusterAlias == null) {
return transportService.getConnection(node);
} else {
return transportService.getRemoteClusterService().getConnection(node, clusterAlias);
}
}
final class ConnectionCountingHandler<Response extends TransportResponse> extends ActionListenerResponseHandler<Response> {
private final Map<String, Long> clientConnections;
private final String nodeId;
ConnectionCountingHandler(final ActionListener<? super Response> listener, final Writeable.Reader<Response> responseReader,
final Map<String, Long> clientConnections, final String nodeId) {
super(listener, responseReader);
this.clientConnections = clientConnections;
this.nodeId = nodeId;
// Increment the number of connections for this node by one
clientConnections.compute(nodeId, (id, conns) -> conns == null ? 1 : conns + 1);
}
@Override
public void handleResponse(Response response) {
super.handleResponse(response);
// Decrement the number of connections or remove it entirely if there are no more connections
// We need to remove the entry here so we don't leak when nodes go away forever
assert assertNodePresent();
clientConnections.computeIfPresent(nodeId, (id, conns) -> conns.longValue() == 1 ? null : conns - 1);
}
@Override
public void handleException(TransportException e) {
super.handleException(e);
// Decrement the number of connections or remove it entirely if there are no more connections
// We need to remove the entry here so we don't leak when nodes go away forever
assert assertNodePresent();
clientConnections.computeIfPresent(nodeId, (id, conns) -> conns.longValue() == 1 ? null : conns - 1);
}
private boolean assertNodePresent() {
clientConnections.compute(nodeId, (id, conns) -> {
assert conns != null : "number of connections for " + id + " is null, but should be an integer";
assert conns >= 1 : "number of connections for " + id + " should be >= 1 but was " + conns;
return conns;
});
// Always return true, there is additional asserting here, the boolean is just so this
// can be skipped when assertions are not enabled
return true;
}
}
public void cancelSearchTask(SearchTask task, String reason) {
CancelTasksRequest req = new CancelTasksRequest()
.setTaskId(new TaskId(client.getLocalNodeId(), task.getId()))
.setReason("Fatal failure during search: " + reason);
// force the origin to execute the cancellation as a system user
new OriginSettingClient(client, GetTaskAction.TASKS_ORIGIN).admin().cluster().cancelTasks(req, ActionListener.wrap(() -> {}));
}
public NamedWriteableRegistry getNamedWriteableRegistry() {
return client.getNamedWriteableRegistry();
}
}
|
|
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.metric.v2;
import java.util.List;
import java.util.Map;
import org.deidentifier.arx.metric.InformationLoss;
import org.deidentifier.arx.metric.Metric;
import org.deidentifier.arx.metric.Metric.AggregateFunction;
/**
* This internal class provides access to version 2 of all metrics. Users of the API should use
* <code>org.deidentifier.arx.metric.Metric<code> for creating instances of metrics for information loss.
*
* @author Fabian Prasser
*/
public class __MetricV2 {
/**
* Creates a new instance of the AECS metric.
*
* @return
*/
public static Metric<ILSingleDimensional> createAECSMetric() {
return new MetricSDAECS();
}
/**
* Creates a new instance of the AECS metric.
*
* @param rowCount
* @return
*/
public static Metric<ILSingleDimensional> createAECSMetric(double rowCount) {
return new MetricSDAECS(rowCount);
}
/**
* Creates an instance of the ambiguity metric.
*
* @return
*/
public static Metric<ILSingleDimensional> createAmbiguityMetric() {
return new MetricSDNMAmbiguity();
}
/**
* Creates an instance of the discernability metric.
*
* @return
*/
public static Metric<ILSingleDimensional> createDiscernabilityMetric() {
return createDiscernabilityMetric(false);
}
/**
* Creates an instance of the discernability metric. The monotonic variant is DM*.
*
* @param monotonic If set to true, the monotonic variant (DM*) will be created
* @return
*/
public static Metric<ILSingleDimensional> createDiscernabilityMetric(boolean monotonic) {
return createDiscernabilityMetric(monotonic, 0);
}
/**
* Creates an instance of the discernability metric. The monotonic variant is DM*.
*
* @param monotonic If set to true, the monotonic variant (DM*) will be created
* @param numTuples Pre-initialization
* @return
*/
public static Metric<ILSingleDimensional> createDiscernabilityMetric(boolean monotonic, double numTuples) {
if (monotonic) {
MetricSDDiscernability result = new MetricSDDiscernability();
result.setNumTuples(numTuples);
return result;
} else {
MetricSDNMDiscernability result = new MetricSDNMDiscernability();
result.setNumTuples(numTuples);
return result;
}
}
/**
* Creates an instance of the non-monotonic non-uniform entropy metric. The default aggregate function,
* which is the sum-function, will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createEntropyMetric() {
return createEntropyMetric(false, AggregateFunction.SUM);
}
/**
* Creates an instance of the non-uniform entropy metric. The default aggregate function,
* which is the sum-function, will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @param monotonic If set to true, the monotonic variant of the metric will be created
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createEntropyMetric(boolean monotonic) {
return createEntropyMetric(monotonic, AggregateFunction.SUM);
}
/**
* Creates an instance of the non-uniform entropy metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param monotonic If set to true, the monotonic variant of the metric will be created
* @param function The aggregate function to be used for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createEntropyMetric(boolean monotonic, AggregateFunction function) {
if (monotonic) {
return new MetricMDNUEntropy(function);
} else {
return new MetricMDNUNMEntropy(function);
}
}
/**
* Creates an instance of the non-uniform entropy metric. The default aggregate function,
* which is the sum-function, will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @param monotonic If set to true, the monotonic variant of the metric will be created
* @param cache
* @param cardinalities
* @param hierarchies
* @return
*/
public static Metric<AbstractILMultiDimensional> createEntropyMetric(boolean monotonic, double[][] cache, int[][][] cardinalities, int[][][] hierarchies) {
MetricMDNUEntropyPrecomputed result = (MetricMDNUEntropyPrecomputed)createEntropyMetric(monotonic, AggregateFunction.SUM);
result.initialize(cache, cardinalities, hierarchies);
return result;
}
/**
* Creates an instance of the non-uniform entropy metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param function The aggregate function to be used for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createEntropyOriginalMetric(AggregateFunction function) {
return new MetricMDNUNMEntropyOriginal(function);
}
/**
* Creates an instance of the non-uniform entropy metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param function The aggregate function to be used for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createEntropyOriginalNormalizedMetric(AggregateFunction function) {
return new MetricMDNUNMEntropyOriginalNormalized(function);
}
/**
* Creates an instance of the height metric. The default aggregate function, which is the sum-function,
* will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createHeightMetric() {
return new MetricMDHeight();
}
/**
* Creates an instance of the height metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param function The aggregate function to use for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createHeightMetric(AggregateFunction function) {
return new MetricMDHeight(function);
}
/**
* Creates an instance of the height metric. The default aggregate function, which is the sum-function,
* will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @param minHeight
* @param maxHeight
* @return
*/
public static Metric<AbstractILMultiDimensional> createHeightMetric(int minHeight, int maxHeight) {
MetricMDHeight result = new MetricMDHeight();
result.initialize(minHeight, maxHeight);
return result;
}
/**
* Helper method. Normally, there should be no need to call this
* @param value
* @return
*/
public static InformationLoss<?> createILMultiDimensionalArithmeticMean(double value) {
return new ILMultiDimensionalArithmeticMean(value);
}
/**
* Helper method. Normally, there should be no need to call this
* @param value
* @return
*/
public static InformationLoss<?> createILMultiDimensionalSum(double value) {
return new ILMultiDimensionalSum(value);
}
/**
* Helper method. Normally, there should be no need to call this
* @param value
* @return
*/
public static InformationLoss<?> createILSingleDimensional(double value) {
return new ILSingleDimensional(value);
}
/**
* Creates an instance of the KL Divergence metric.
*
* @return
*/
public static Metric<ILSingleDimensional> createKLDivergenceMetric() {
return new MetricSDNMKLDivergence();
}
/**
* Creates an instance of the loss metric which treats generalization and suppression equally.
* The default aggregate function, which is the rank function, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createLossMetric() {
return new MetricMDNMLoss();
}
/**
* Creates an instance of the loss metric which treats generalization and suppression equally.
* This metric will respect attribute weights defined in the configuration.
*
* @param function The aggregate function to use for comparing results
* @return
*/
public static Metric<AbstractILMultiDimensional> createLossMetric(AggregateFunction function) {
return new MetricMDNMLoss(function);
}
/**
* Creates an instance of the loss metric with factors for weighting generalization and suppression.
* The default aggregate function, which is the rank function, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @param gsFactor A factor [0,1] weighting generalization and suppression.
* The default value is 0.5, which means that generalization
* and suppression will be treated equally. A factor of 0
* will favor suppression, and a factor of 1 will favor
* generalization. The values in between can be used for
* balancing both methods.
* @return
*/
public static Metric<AbstractILMultiDimensional> createLossMetric(double gsFactor) {
return new MetricMDNMLoss(gsFactor, AggregateFunction.RANK);
}
/**
* Creates an instance of the loss metric with factors for weighting generalization and suppression.
* This metric will respect attribute weights defined in the configuration.
*
* @param gsFactor A factor [0,1] weighting generalization and suppression.
* The default value is 0.5, which means that generalization
* and suppression will be treated equally. A factor of 0
* will favor suppression, and a factor of 1 will favor
* generalization. The values in between can be used for
* balancing both methods.
*
* @param function The aggregate function to use for comparing results
* @return
*/
public static Metric<AbstractILMultiDimensional> createLossMetric(double gsFactor, AggregateFunction function) {
return new MetricMDNMLoss(gsFactor, function);
}
/**
* Creates an instance of the normalized entropy metric.
* The default aggregate function, which is the sum function, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createNormalizedEntropyMetric() {
return new MetricMDNUNMNormalizedEntropy();
}
/**
* Creates an instance of the normalized entropy metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param function The aggregate function to use for comparing results
* @return
*/
public static Metric<AbstractILMultiDimensional> createNormalizedEntropyMetric(AggregateFunction function) {
return new MetricMDNUNMNormalizedEntropy(function);
}
/**
* Creates an instance of the non-monotonic precision metric.
* The default aggregate function, which is the arithmetic mean, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecisionMetric() {
return createPrecisionMetric(false, AggregateFunction.ARITHMETIC_MEAN);
}
/**
* Creates an instance of the non-monotonic precision metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param function The aggregate function to use for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecisionMetric(AggregateFunction function) {
return createPrecisionMetric(false, function);
}
/**
* Creates an instance of the precision metric.
* The default aggregate function, which is the arithmetic mean, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @param monotonic If set to true, the monotonic variant of the metric will be created
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecisionMetric(boolean monotonic) {
return createPrecisionMetric(monotonic, AggregateFunction.ARITHMETIC_MEAN);
}
/**
* Creates an instance of the precision metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param monotonic If set to true, the monotonic variant of the metric will be created
* @param function
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecisionMetric(boolean monotonic, AggregateFunction function) {
if (monotonic) {
return new MetricMDPrecision(function);
} else {
return new MetricMDNMPrecision(function);
}
}
/**
* Creates an instance of the precision metric.
* The default aggregate function, which is the arithmetic mean, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @param monotonic If set to true, the monotonic variant of the metric will be created
* @param heights
* @param cells
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecisionMetric(boolean monotonic, int[] heights, double cells) {
MetricMDNMPrecision result = (MetricMDNMPrecision)createPrecisionMetric(monotonic, AggregateFunction.ARITHMETIC_MEAN);
result.initialize(heights, cells);
return result;
}
/**
* Creates a potentially precomputed instance of the non-monotonic non-uniform entropy metric. The default aggregate function,
* which is the sum-function, will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedEntropyMetric(double threshold) {
return createPrecomputedEntropyMetric(threshold, false, AggregateFunction.SUM);
}
/**
* Creates a potentially precomputed instance of the non-uniform entropy metric. The default aggregate function,
* which is the sum-function, will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
* @param monotonic If set to true, the monotonic variant of the metric will be created
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedEntropyMetric(double threshold, boolean monotonic) {
return createPrecomputedEntropyMetric(threshold, monotonic, AggregateFunction.SUM);
}
/**
* Creates a potentially precomputed instance of the non-uniform entropy metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
* @param monotonic If set to true, the monotonic variant of the metric will be created
* @param function The aggregate function to be used for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedEntropyMetric(double threshold, boolean monotonic, AggregateFunction function) {
if (monotonic) {
return new MetricMDNUEntropyPotentiallyPrecomputed(threshold, function);
} else {
return new MetricMDNUNMEntropyPotentiallyPrecomputed(threshold, function);
}
}
/**
* Creates a potentially precomputed instance of the loss metric which treats generalization
* and suppression equally.
* The default aggregate function, which is the rank function, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold) {
return new MetricMDNMLossPotentiallyPrecomputed(threshold);
}
/**
* Creates a potentially precomputed instance of the loss metric which treats generalization and suppression equally.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
* @param function The aggregate function to use for comparing results
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold, AggregateFunction function) {
return new MetricMDNMLossPotentiallyPrecomputed(threshold, function);
}
/**
* Creates a potentially precomputed instance of the loss metric with factors for weighting generalization and suppression.
* The default aggregate function, which is the rank function, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
* @param gsFactor A factor [0,1] weighting generalization and suppression.
* The default value is 0.5, which means that generalization
* and suppression will be treated equally. A factor of 0
* will favor suppression, and a factor of 1 will favor
* generalization. The values in between can be used for
* balancing both methods.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold, double gsFactor) {
return new MetricMDNMLossPotentiallyPrecomputed(threshold, gsFactor, AggregateFunction.RANK);
}
/**
* Creates a potentially precomputed instance of the loss metric with factors for weighting generalization and suppression.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
* @param gsFactor A factor [0,1] weighting generalization and suppression.
* The default value is 0.5, which means that generalization
* and suppression will be treated equally. A factor of 0
* will favor suppression, and a factor of 1 will favor
* generalization. The values in between can be used for
* balancing both methods.
*
* @param function The aggregate function to use for comparing results
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold, double gsFactor, AggregateFunction function) {
return new MetricMDNMLossPotentiallyPrecomputed(threshold, gsFactor, function);
}
/**
* Creates a potentially precomputed instance of the normalized entropy metric.
* The default aggregate function, which is the sum function, will be used.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedNormalizedEntropyMetric(double threshold) {
return new MetricMDNUNMNormalizedEntropyPotentiallyPrecomputed(threshold);
}
/**
* Creates a potentially precomputed instance of the normalized entropy metric.
* This metric will respect attribute weights defined in the configuration.
*
* @param threshold The precomputed variant of the metric will be used if
* #distinctValues / #rows <= threshold for all quasi-identifiers.
*
* @param function The aggregate function to use for comparing results
* @return
*/
public static Metric<AbstractILMultiDimensional> createPrecomputedNormalizedEntropyMetric(double threshold, AggregateFunction function) {
return new MetricMDNUNMNormalizedEntropyPotentiallyPrecomputed(threshold, function);
}
/**
* Creates an instance of a metric with statically defined information loss.
* The default aggregate function, which is the sum-function, will be used for comparing results.
* This metric will respect attribute weights defined in the configuration.
*
* @param loss User defined information loss per attribute
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createStaticMetric(Map<String, List<Double>> loss) {
return new MetricMDStatic(loss);
}
/**
* Creates an instance of a metric with statically defined information loss.
* This metric will respect attribute weights defined in the configuration.
*
* @param loss User defined information loss per attribute
* @param function The aggregate function to use for comparing results
*
* @return
*/
public static Metric<AbstractILMultiDimensional> createStaticMetric(Map<String, List<Double>> loss, AggregateFunction function) {
return new MetricMDStatic(function, loss);
}
}
|
|
package com.codepath.apps.restclienttemplate.models;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.Nullable;
import android.util.Log;
import android.widget.ImageView;
import android.widget.TextView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import com.codepath.apps.restclienttemplate.MyDatabase;
import com.codepath.apps.restclienttemplate.R;
import com.codepath.apps.restclienttemplate.TwitterClient;
import com.raizlabs.android.dbflow.annotation.Column;
import com.raizlabs.android.dbflow.annotation.ForeignKey;
import com.raizlabs.android.dbflow.annotation.PrimaryKey;
import com.raizlabs.android.dbflow.annotation.Table;
import com.raizlabs.android.dbflow.sql.language.Select;
import com.raizlabs.android.dbflow.structure.BaseModel;
import butterknife.Bind;
import static android.R.attr.id;
/**
* Created by gretel on 9/25/17.
*/
@Table(database = MyDatabase.class)
public class Tweet extends BaseModel implements Parcelable{
//list out the attributes
@PrimaryKey
@Column
Long id_tweet;
@Column
private String body;
@Column
private String createdAt;
@Column
@ForeignKey(saveForeignKeyModel = true)
private User user;
@Nullable
private Entity entity;
@Column
private int retweetCount;
@Column
private int favoriteCount;
@Column
private boolean isRetweeted;
@Column
private boolean isFavorited;
public Tweet() {
}
public static Long lastTweetId;
public Long getId() {
return id_tweet;
}
public void setId(Long id) {
this.id_tweet = id;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public String getBody() {
return body;
}
public void setBody(String body) {
this.body = body;
}
public String getCreatedAt() {
return TwitterClient.getTimeAgo(createdAt);
}
public void setCreatedAt(String createdAt) {
this.createdAt = createdAt;
}
public int getRetweetCount() {
return retweetCount;
}
public void setRetweetCount(int retweetCount) {
this.retweetCount = retweetCount;
}
public Boolean getRetweeted() {
return isRetweeted;
}
public void setRetweeted(Boolean retweeted) {
isRetweeted = retweeted;
}
public Boolean getFavorited() {
return isFavorited;
}
public void setFavorited(Boolean favorited) {
isFavorited = favorited;
}
public boolean isRetweeted() {
return isRetweeted;
}
public Boolean isFavorited() {
return isFavorited;
}
@Nullable
public Entity getEntity() {
return entity;
}
public void setEntity(@Nullable Entity entity) {
this.entity = entity;
}
public int getFavoriteCount() {
return favoriteCount;
}
public void setFavoriteCount(int favoriteCount) {
this.favoriteCount = favoriteCount;
}
public static Tweet fromJson(JSONObject json) {
Tweet tweet = new Tweet();
try {
// tweet.id_tweet = json.getLong("id_str");
tweet.id_tweet = json.getLong("id");
tweet.body = json.getString("text");
tweet.createdAt = json.getString("created_at");
tweet.retweetCount = json.getInt("retweet_count");
tweet.user = User.fromJson(json.getJSONObject("user"));
JSONObject entities = json.optJSONObject("entities");
tweet.entity = entities == null ? null : Entity.fromJSON(entities);
tweet.retweetCount = json.getInt("retweet_count");
tweet.favoriteCount = json.getInt("favorite_count");
tweet.isRetweeted = json.getBoolean("retweeted");
tweet.isFavorited = json.getBoolean("favorited");
if(tweet.getBody().startsWith("RT")){
tweet.isFavorited = false;
tweet.favoriteCount = 0;
}
} catch (JSONException e) {
e.printStackTrace();
}
return tweet;
}
public static ArrayList<Tweet> fromJson(JSONArray json) {
ArrayList<Tweet> tweets = new ArrayList<Tweet>(json.length());
for (int i = 0; i < json.length(); i++) {
try {
Tweet tweet = fromJson(json.getJSONObject(i));
if(tweet != null) {
tweets.add(tweet);
}
} catch (JSONException e) {
e.printStackTrace();
continue;
}
}
return tweets;
}
protected Tweet(Parcel in) {
id_tweet = in.readLong();
body = in.readString();
createdAt = in.readString();
retweetCount = in.readInt();
user = (User) in.readValue(User.class.getClassLoader());
entity = (Entity) in.readValue(Entity.class.getClassLoader());
retweetCount = in.readInt();
favoriteCount = in.readInt();
}
public static Tweet byId(long id){
return new Select().from(Tweet.class).where(Tweet_Table.id_tweet.eq(id)).querySingle();
}
public static List<Tweet> recentItems(){
return new Select().from(Tweet.class).orderBy(Tweet_Table.id_tweet, false).limit(300).queryList();
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(id_tweet);
dest.writeString(body);
dest.writeString(createdAt);
dest.writeInt(retweetCount);
dest.writeValue(user);
dest.writeValue(entity);
dest.writeInt(retweetCount);
dest.writeInt(favoriteCount);
}
@SuppressWarnings("unused")
public static final Parcelable.Creator<Tweet> CREATOR = new Parcelable.Creator<Tweet>() {
@Override
public Tweet createFromParcel(Parcel in) {
return new Tweet(in);
}
@Override
public Tweet[] newArray(int size) {
return new Tweet[size];
}
};
}
|
|
package com.github.teozfrank.duelme.util;
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2014 teozfrank
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import com.github.teozfrank.duelme.main.DuelMe;
import com.github.teozfrank.duelme.threads.DuelStartThread;
import org.bukkit.*;
import org.bukkit.block.Block;
import org.bukkit.block.Sign;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.potion.PotionEffect;
import java.util.*;
public class DuelManager {
private DuelMe plugin;
private List<DuelRequest> duelRequests;
/**
* list to hold the current spectating player uuids
*/
private List<UUID> spectatingPlayerUUIDs;
/**
* list of queued players
*/
private List<UUID> queuedPlayerUUIDs;
/**
* list to hold the frozen player uuids (before a duel starts)
*/
private List<UUID> frozenPlayerUUIDs;
/**
* list of dead players
*/
private List<UUID> deadPlayers;
/**
* list to hold arena objects
*/
private List<DuelArena> duelArenas;
private HashMap<UUID, PlayerData> playerData;
private MessageManager mm;
public DuelManager(DuelMe plugin) {
this.plugin = plugin;
this.duelRequests = new ArrayList<DuelRequest>();
this.spectatingPlayerUUIDs = new ArrayList<UUID>();
this.frozenPlayerUUIDs = new ArrayList<UUID>();
this.duelArenas = new ArrayList<DuelArena>();
this.playerData = new HashMap<UUID, PlayerData>();
this.deadPlayers = new ArrayList<UUID>();
this.mm = plugin.getMessageManager();
this.queuedPlayerUUIDs = new ArrayList<UUID>();
}
/**
* gets a list of the arena objects
*
* @return list of arenas
*/
public List<DuelArena> getDuelArenas() {
return duelArenas;
}
/**
* add a duel arena
*
* @param da the duel arena
*/
public void addDuelArena(DuelArena da) {
this.duelArenas.add(da);
}
/**
* get a duel arena by name
*
* @param duelArenaName the duel arena name
* @return the duel arena , null if it does not exist
*/
public DuelArena getDuelArenaByName(String duelArenaName) {
for (DuelArena da : duelArenas) {
if (da.getName().equalsIgnoreCase(duelArenaName)) {
return da;
}
}
return null;
}
/**
* if a player is in a duel
*
* @param playerUUIDIn the players UUID
* @return true if is in a duel, false if not
*/
public boolean isInDuel(UUID playerUUIDIn) {
for (DuelArena a : this.getDuelArenas()) {
for(UUID duelPlayerUUID: a.getPlayers()) {
if(playerUUIDIn.equals(duelPlayerUUID)) {
return true;
}
}
}
return false;
}
/**
* get the arena name that a player is in
*
* @param playerUUIDIn the players name
* @return the arena name that the plater is in,
* returns null if the player is not in an arena
*/
public String getPlayersArenaName(UUID playerUUIDIn) {
for (DuelArena a : this.getDuelArenas()) {
if (a.getPlayers().contains(playerUUIDIn)) {
return a.getName();
}
}
return null;
}
/**
* add a dead player to the list of dead players
* @param uuid the uuid of the dead player
*/
public void addDeadPlayer(UUID uuid) {
if (!this.deadPlayers.contains(uuid)) {
this.deadPlayers.add(uuid);
}
}
/**
* is a player a dead player
* @param uuid the uuid of the player
* @return true if dead, false if not
*/
public boolean isDeadPlayer(UUID uuid) {
if (getDeadPlayers().contains(uuid)) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("UUID " + uuid + " is in dead player list");
}
return true;
}
return false;
}
public List<UUID> getDeadPlayers() {
return this.deadPlayers;
}
/**
* remove a dead player from the list of dead players
* @param uuid the uuid of the dead player to remove
*/
public void removeDeadPlayer(UUID uuid) {
this.deadPlayers.remove(uuid);
}
/**
* gets the arena of two players
*
* @param player1UUID the first player
* @param player2UUID the second player
* @return the arena that the players are in
* , null if both players are not in the same arena.
*/
public DuelArena getPlayersArena(UUID player1UUID, UUID player2UUID) {
for (DuelArena a : this.getDuelArenas()) {
List<UUID> players = a.getPlayers();
if (players.contains(player1UUID) && players.contains(player2UUID)) {
return a;
}
}
return null;
}
/**
* get a list of the frozen players
*
* @return list of frozen players
*/
public List<UUID> getFrozenPlayerUUIDs() {
return this.frozenPlayerUUIDs;
}
/**
* add a frozen player to stop them from moving
*
* @param playerUUID the players name
*/
public void addFrozenPlayer(UUID playerUUID) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("frozen player added: " + playerUUID);
}
this.frozenPlayerUUIDs.add(playerUUID);
}
/**
* add a frozen players to stop them from moving
*
* @param senderUUID the duel sender
* @param targetUUID the duel target
*/
public void addFrozenPlayer(UUID senderUUID, UUID targetUUID) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("frozen sender added: " + senderUUID);
SendConsoleMessage.debug("frozen target added: " + targetUUID);
}
this.frozenPlayerUUIDs.add(senderUUID);
this.frozenPlayerUUIDs.add(targetUUID);
}
public HashMap<UUID, PlayerData> getPlayerData() {
return playerData;
}
public void setPlayerData(HashMap<UUID, PlayerData> playerData) {
this.playerData = playerData;
}
/**
* remove a frozen player allowing them to move
*
* @param playerUUIDIn the players name
*/
public void removeFrozenPlayer(UUID playerUUIDIn) {
this.frozenPlayerUUIDs.remove(playerUUIDIn);
}
/**
* gets the arena of a player
*
* @param playerUUID the players UUID
* @return the arena of the player, null if the player
* is not in a arena
*/
public DuelArena getPlayersArenaByUUID(UUID playerUUID) {
for (DuelArena a : this.getDuelArenas()) {
List<UUID> players = a.getPlayers();
if (players.contains(playerUUID)) {
return a;
}
}
return null;
}
/**
* check if a player has sent a duel request to a player before
* @param sender the sender
* @param target the target player
* @return true if the player has send a duel request to a given player, false if not
*/
public boolean hasSentRequest(UUID sender, UUID target) {
for(DuelRequest duelRequest: duelRequests) {
if(duelRequest.getDuelSender() == sender && duelRequest.getDuelTarget() == target) {
return true;
}
}
return false;
}
public DuelRequest getDuelRequest(UUID sender, UUID target) {
for(DuelRequest duelRequest: duelRequests) {
if(duelRequest.getDuelSender() == sender && duelRequest.getDuelTarget() == target) {
return duelRequest;
}
}
return null;
}
/**
* handle normal duel requests
*
* @param duelSender the sender of the request
* @param duelTargetIn the string player of the target player
*/
public void sendDuelRequest(Player duelSender, String duelTargetIn, String arenaIn) {
FileManager fm = plugin.getFileManager();
String duelSenderName = duelSender.getName();
UUID duelSenderUUID = duelSender.getUniqueId();
Player duelTarget = Bukkit.getPlayer(duelTargetIn);
if(arenaIn != null) {
DuelArena arena = this.getDuelArenaByName(arenaIn);
if(arena == null) {
Util.sendMsg(duelSender, ChatColor.RED + "Sorry but that duel arena name you specified doesn't exist.");
return;
}
if(arena.getDuelState() != DuelState.WAITING) {
Util.sendMsg(duelSender, ChatColor. RED + "Sorry but that duel arena isn't available right now please try another one.");
return;
}
}
if (duelTarget != null) {
UUID duelTargetUUID = duelTarget.getUniqueId();
if (isInDuel(duelTargetUUID)) {
String playerAlreadyInDuel = mm.getPlayerAlreadyInDuelMessage();
playerAlreadyInDuel = playerAlreadyInDuel.replaceAll("%target%", duelTargetIn);
Util.sendMsg(duelSender, playerAlreadyInDuel);
return;
}
if (hasSentRequest(duelSenderUUID, duelTargetUUID)) {
String requestAlreadySent = mm.getDuelRequestAlreadySentMessage();
requestAlreadySent = requestAlreadySent.replaceAll("%target%", duelTargetIn);
Util.sendMsg(duelSender, requestAlreadySent);
return;
}
String duelTargetName = duelTarget.getName();
if (duelSenderName.equals(duelTargetName)) {
Util.sendMsg(duelSender, mm.getCannotDuelSelfMessage());
return;
}
String duelRequestSentMessage = mm.getDuelRequestSentMessage();
duelRequestSentMessage = duelRequestSentMessage.replaceAll("%target%", duelTargetName);
Util.sendMsg(duelSender, duelRequestSentMessage);
if (fm.isGUIMenuEnabled()) {
SendConsoleMessage.error("This feature is broken and is disabled until it has been fixed! Sent a normal non gui request!");
//plugin.getAcceptMenu().openNormalDuelAccept(duelSender, duelTarget);
String duelRequestReceived = mm.getDuelRequestReceivedMessage();
duelRequestReceived = duelRequestReceived.replaceAll("%sender%", duelSenderName);
Util.sendMsg(duelTarget, ChatColor.translateAlternateColorCodes('&', duelRequestReceived));
} else {
String duelRequestReceived = mm.getDuelRequestReceivedMessage();
duelRequestReceived = duelRequestReceived.replaceAll("%sender%", duelSenderName);
Util.sendMsg(duelTarget, ChatColor.translateAlternateColorCodes('&', duelRequestReceived));
}
this.duelRequests.add(new DuelRequest(duelSenderUUID, duelTargetUUID, arenaIn, System.currentTimeMillis()));
} else {
String targetNotOnline = mm.getTargetNotOnlineMessage();
targetNotOnline = targetNotOnline.replaceAll("%target%", duelTargetIn);
Util.sendMsg(duelSender, targetNotOnline);
}
}
public void removeDuelRequest(DuelRequest duelRequest) {
this.duelRequests.remove(duelRequest);
}
/**
* handles accepting the request with the specified player to accept the duel request
*
* @param acceptor the player that is accepting the request
* @param senderIn the string player of whom they are accepting
*/
public void acceptRequest(Player acceptor, String senderIn) {
UUID acceptorUUID = acceptor.getUniqueId();
Player sender = Bukkit.getPlayer(senderIn);
if (sender == null) {
String targetNotOnline = mm.getTargetNotOnlineMessage();
targetNotOnline = targetNotOnline.replaceAll("%target%", senderIn);
Util.sendMsg(acceptor, targetNotOnline);
return;
}
UUID senderUUID = sender.getUniqueId();
if (hasSentRequest(senderUUID, acceptorUUID)) {
DuelRequest duelRequest = getDuelRequest(senderUUID, acceptorUUID);
this.startDuel(acceptor, sender, duelRequest.getDuelArena());
this.removeDuelRequest(duelRequest);
return;
} else {
Util.sendMsg(acceptor, ChatColor.RED +
"You do not have any duel requests from " + ChatColor.AQUA + senderIn + ".");
}
}
public boolean isArenaFree(DuelArena duelArena) {
if(duelArena.getDuelState() == DuelState.WAITING) {
return true;
}
return false;
}
/**
* attempt to start the duel with the two players
*
* @param acceptor the player that accepted the request
* @param sender the player that sent the reqest
*/
public boolean startDuel(Player acceptor, Player sender, String arena) {
String acceptorName = acceptor.getName();//the duel acceptor name
String senderName = sender.getName();//the duel request sender name
DuelArena duelArena = null;
final UUID acceptorUUID = acceptor.getUniqueId();
final UUID senderUUID = sender.getUniqueId();
List<DuelArena> arenas = this.getDuelArenas();//list of arenas
FileManager fm = plugin.getFileManager();//file manager instance
ItemManager im = plugin.getItemManager();//item manager instance
if (arenas.size() <= 0) {//if there are no arenas stop the duel
Util.sendMsg(sender, mm.getNoDuelArenasMessage());
Util.sendMsg(acceptor, mm.getNoDuelArenasMessage());
return false;
}
if(arena != null) {
duelArena = getDuelArenaByName(arena);
if(duelArena == null) {
Util.sendMsg(acceptor, ChatColor.RED + "The duel arena you requested to duel in does not exist!");
return false;
}
if(!isArenaFree(duelArena)) {
Util.sendMsg(acceptor, ChatColor.RED + "The duel arena you requested to duel in is not free!");
return false;
}
}
if(duelArena == null) {
duelArena = this.getFreeArena();
}
if (duelArena == null) {
Util.sendMsg(acceptor, ChatColor.YELLOW + "There are no free duel arenas, please try again later!");
Util.sendMsg(sender, ChatColor.YELLOW + "There are no free duel arenas, please try again later!");
return false;
}
duelArena.setDuelState(DuelState.STARTING);//set the duel state to starting
this.updateDuelStatusSign(duelArena);
if (fm.isDuelStartAnnouncementEnabled()) {
String duelStartBroadcast = mm.getDuelStartMessage();
duelStartBroadcast = duelStartBroadcast.replaceAll("%sender%", senderName);
duelStartBroadcast = duelStartBroadcast.replaceAll("%acceptor%", acceptorName);
Util.broadcastMessage(duelStartBroadcast);
}
duelArena.addPlayerUUID(acceptorUUID);//add the players to the arena
duelArena.addPlayerUUID(senderUUID);
Location spawnpoint1 = duelArena.getSpawnpoint1();
Location spawnpoint2 = duelArena.getSpawnpoint2();
surroundLocation(spawnpoint1, Material.valueOf(fm.getDuelSurroundMaterial()));
surroundLocation(spawnpoint2, Material.valueOf(fm.getDuelSurroundMaterial()));
this.storePlayerData(acceptor);
this.storePlayerData(sender);
if (duelArena.getSpawnpoint1() != null && duelArena.getSpawnpoint2() != null) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Spawnpoints for arena set teleporting players to locations.");
}
removePotionEffects(acceptor);//remove players active potion effects
removePotionEffects(sender);
acceptor.teleport(duelArena.getSpawnpoint1());//teleport the players to set spawn location in the duel arena
sender.teleport(duelArena.getSpawnpoint2());
} else {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Spawnpoints for arena not set falling back to random spawn locations.");
}
acceptor.teleport(this.generateRandomLocation(duelArena));//teleport the players to a random location in the duel arena
sender.teleport(this.generateRandomLocation(duelArena));
}
if (fm.isUsingSeperateInventories()) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Storing inventories enabled, giving duel items.");
}
im.givePlayerDuelItems(acceptor);
im.givePlayerDuelItems(sender);
}
new DuelStartThread(plugin, sender, acceptor, duelArena).runTaskTimer(plugin, 20L, 20L);
return true;
}
/**
* Generates a random point between two other points.
*
* @param arg0 Point 1.
* @param arg1 Point 2.
* @return A random point.
*/
private double randomGenRange(double arg0, double arg1) {
double range = (arg0 < arg1) ? arg1 - arg0 : arg0 - arg1;
if (range < 1)
return Math.floor(arg0) + 0.5d;
double min = (arg0 < arg1) ? arg0 : arg1;
return Math.floor(min + (Math.random() * range)) + 0.5d;
}
/**
* Generates a random location in a duelarena
*
* @param a The arena.
* @return Random location.
*/
private Location generateRandomLocation(DuelArena a) {
double x, y, z;
World w = a.getPos1().getWorld();
x = randomGenRange(a.getPos1().getX(), a.getPos2().getX());
y = randomGenRange(a.getPos1().getY(), a.getPos2().getY());
z = randomGenRange(a.getPos1().getZ(), a.getPos2().getZ());
return new Location(w, x, y + 0.5, z);
}
/**
* remove a duel arena
*
* @param daIn the duel arena
*/
public void removeDuelArena(DuelArena daIn) {
for (DuelArena da : this.getDuelArenas()) {
if (da == daIn) {
this.duelArenas.remove(daIn);
return;
}
}
}
/**
* get a players data by UUID
*
* @param playerUUIDIn the players UUID
* @return the player data
*/
public PlayerData getPlayerDataByUUID(UUID playerUUIDIn) {
return playerData.get(playerUUIDIn);
}
public void addPlayerData(UUID uuidIn, PlayerData playerData) {
this.getPlayerData().put(uuidIn, playerData);
}
public void removePlayerDataByUUID(UUID playerUUIDIn) {
this.getPlayerData().remove(playerUUIDIn);
}
/**
* Method to store a players data
*
* @param player the player to store data of
*/
public void storePlayerData(Player player) {
FileManager fm = plugin.getFileManager();
UUID playerUUID = player.getUniqueId();
ItemStack[] arm = player.getInventory().getArmorContents();
ItemStack[] inv = player.getInventory().getContents();
Location loc = player.getLocation();
Float saturation = player.getSaturation();
int foodLevel = player.getFoodLevel();
int expLevel = player.getLevel();
double health = player.getHealth();
GameMode gameMode = player.getGameMode();
boolean allowedFlight = player.getAllowFlight();
if(allowedFlight) {
player.setAllowFlight(false);
}
if (plugin.isDebugEnabled()) {
SendConsoleMessage.info("Player location for player: " + player.getName() + ":" + loc);
}
if (player.getGameMode() != GameMode.SURVIVAL) {
player.setGameMode(GameMode.SURVIVAL);
}
this.addPlayerData(playerUUID, new PlayerData(arm, inv, loc, saturation, foodLevel, expLevel, health, gameMode, allowedFlight));
if (fm.isUsingSeperateInventories()) {
player.getInventory().clear();
}
}
/**
* attempt restore a players data with a player object
*
* @param player the player to restore the data to
* @return true if successful, false if not
*/
public boolean restorePlayerData(Player player) {
UUID playerUUID = player.getUniqueId();
PlayerData playerData = this.getPlayerDataByUUID(playerUUID);
try {
ItemStack[] arm = playerData.getArmour();
ItemStack[] inv = playerData.getInventory();
Location loc = playerData.getLocaton();
Float saturation = playerData.getSaturation();
int foodLevel = playerData.getFoodLevel();
int expLevel = playerData.getEXPLevel();
double health = playerData.getHealth();
GameMode gameMode = playerData.getGameMode();
boolean allowedFlight = playerData.getAllowedFight();
if (!isDeadPlayer(playerUUID)) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Player is not dead, Teleporting: " + player.getName() + " to location:" + loc);
}
player.teleport(loc);
}
if (plugin.isUsingSeperatedInventories()) {
player.getInventory().clear();// clear their inventory completely
player.getInventory().setContents(inv);
player.getInventory().setArmorContents(arm);
}
player.setGameMode(gameMode);
player.setAllowFlight(allowedFlight);
player.setSaturation(saturation);
player.setFoodLevel(foodLevel);
player.setLevel(expLevel);
player.setHealth(health);
removePotionEffects(player);
this.removePlayerDataByUUID(playerUUID);
return true;
} catch (Exception e) {
Util.sendMsg(player, ChatColor.RED + "There was an error restoring your player data!");
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug(e.getMessage());
}
return false;
}
}
/**
* end a duel by passing in a player.
* this would be used for if a player dies,
* leaves the game or leaves a duel by command
*
* @param player the losing player
*/
public void endDuel(Player player) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("End duel by player.");
}
ItemManager im = plugin.getItemManager();
UUID playerUUID = player.getUniqueId();
DuelArena arena = this.getPlayersArenaByUUID(playerUUID);
arena.removePlayer(playerUUID);
if (!player.isDead()) {
this.restorePlayerData(player);
}
if (arena.getPlayers().size() == 1) {
im.rewardPlayer(arena, player.getName());
}
}
public void removePotionEffects(Player player) {
int activePotions = 0;
for (PotionEffect p : player.getActivePotionEffects()) {
player.removePotionEffect(p.getType());
activePotions++;
}
}
/**
* end a duel by duelarena
* player is rewarded only if there is one left
* otherwise both players get nothing
*
* @param arena the arena to be ended
*/
public void endDuel(DuelArena arena) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("End duel by duel arena.");
}
ItemManager im = plugin.getItemManager();
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Playercount: " + arena.getPlayers().size());
}
if (arena.getPlayers().size() == 1) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("One player remains, rewarding.");
}
im.rewardPlayer(arena, "");
return;
}
for (UUID playerUUID : arena.getPlayers()) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Player UUID: " + playerUUID.toString());
}
Player playerOut = Bukkit.getPlayer(playerUUID);
if (playerOut != null) {
String playerName = playerOut.getName();
this.restorePlayerData(playerOut);
Util.sendMsg(playerOut, mm.getDuelForcefullyCancelledMessage());
}
}
this.resetArena(arena);
}
/**
* reset a duel arena to initial state
* @param arena the duel arena
*/
public void resetArena(DuelArena arena) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("resetting arena.");
}
arena.getPlayers().clear();
arena.setDuelState(DuelState.WAITING);
this.updateDuelStatusSign(arena);
}
/**
* update the state of a status sign if there is one for that arena
* @param arena the arena
*/
public void updateDuelStatusSign(DuelArena arena) {
FileManager fm = plugin.getFileManager();
Location location;
Block block;
try {
location = fm.getArenaStatusSignLocation(arena.getName());
block = location.getBlock();
} catch (NullPointerException e) {
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("No sign set for arena " + arena.getName());
}
return;
}
if (!block.getType().equals(Material.WALL_SIGN)) {
return;
}
try {
Sign sign = (Sign) block.getState();
sign.setLine(2, arena.getDuelState().toString());
sign.setLine(3, arena.getPlayers().size() + "/2");
sign.update();
if (plugin.isDebugEnabled()) {
SendConsoleMessage.debug("Update duel sign");
}
} catch (Exception e) {
SendConsoleMessage.debug(e.getMessage());
}
}
/**
* loop through the list of arenas and add the free ones to a new list
* then return a random free arena
* @return a random free duel arena, null if none is available
*/
public DuelArena getFreeArena() {
Random random = new Random();
List<DuelArena> freeDuelArenas = new ArrayList<DuelArena>();
for (DuelArena duelArena : getDuelArenas()) {
if (duelArena.getDuelState().equals(DuelState.WAITING) && duelArena.getPlayers().size() == 0) {//if the duel arena state is waiting for players and there are no players in the arena.
if(getDuelArenas().size() > 1) {
freeDuelArenas.add(duelArena);
} else {
return duelArena;
}
}
}
if(freeDuelArenas.isEmpty()) {
return null;//no free duel arenas
}
return freeDuelArenas.get(random.nextInt(freeDuelArenas.size()));
}
/**
* returns a list of locations based on the inputs given
* credits for original source comes from bukkit forums, here:
* https://bukkit.org/threads/creating-a-3x3-square-dissapearing-after-time.140159/
* @param loc the location to surround
* @param r radius
* @param h height
* @param hollow true if hollow false if not
* @param sphere true of sphere false if not
* @param plus_y
* @return
*/
public List<Location> surround(Location loc, Integer r, Integer h, Boolean hollow, Boolean sphere, int plus_y) {
List<Location> circleblocks = new ArrayList<Location>();
int cx = loc.getBlockX();
int cy = loc.getBlockY();
int cz = loc.getBlockZ();
for (int x = cx - r; x <= cx +r; x++)
for (int z = cz - r; z <= cz +r; z++)
for (int y = (sphere ? cy - r : cy); y < (sphere ? cy + r : cy + h); y++) {
double dist = (cx - x) * (cx - x) + (cz - z) * (cz - z) + (sphere ? (cy - y) * (cy - y) : 0);
if (dist < r*r && !(hollow && dist < (r-1)*(r-1))) {
Location l = new Location(loc.getWorld(), x, y + plus_y, z);
if(!(l.getBlockY() == cy)) {
circleblocks.add(l);
}
}
}
return circleblocks;
}
/**
* surround a specific location with a given material
* @param location the location
* @param material the material to set it to
*/
public void surroundLocation(Location location, Material material) {
final List<Location> circs = surround(location, 2, 2, true, true, 1);
for (Location loc : circs) {
loc.getBlock().setType(material);
}
}
public List<DuelRequest> getDuelRequests() {
return duelRequests;
}
/**
* Get a list of queued players
* @return a list of queued players
*/
public List<UUID> getQueuedPlayerUUIDs() {
return queuedPlayerUUIDs;
}
/**
* Add a player to the queue
* @param playersUUID the players UUID
*/
public void addQueuedPlayer(UUID playersUUID) {
this.queuedPlayerUUIDs.add(playersUUID);
}
/**
* remove a player from the queue
* @param playersUUID the players uuid
*/
public void removeQueuedPlayer(UUID playersUUID) {
this.queuedPlayerUUIDs.remove(playersUUID);
}
/**
* Remove a queued player by index
* @param index the index to remove from
*/
public void removeQueuedPlayerByIndex(int index) {
this.queuedPlayerUUIDs.remove(index);
}
/**
* check if a player is currently in the queue
* @param playersUUID the players uuid
* @return true if the player is queued false if not
*/
public boolean isQueued(UUID playersUUID) {
return this.queuedPlayerUUIDs.contains(playersUUID);
}
/**
* Set the queued players uuid list
* @param queuedPlayerUUIDs a list of queued players
*/
public void setQueuedPlayerUUIDs(List<UUID> queuedPlayerUUIDs) {
this.queuedPlayerUUIDs = queuedPlayerUUIDs;
}
/**
* get the size of the current duel queue
* @return the size of the current duel queue
*/
public int getQueuedPlayersSize()
{
return this.queuedPlayerUUIDs.size();
}
}
|
|
/*
* Created on 29-apr-2005
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package it.finsiel.siged.util.ldap;
import it.finsiel.siged.constant.ReturnValues;
import it.finsiel.siged.exception.AuthenticationException;
import it.finsiel.siged.exception.DataException;
import it.finsiel.siged.mvc.vo.IdentityVO;
import it.finsiel.siged.mvc.vo.protocollo.DestinatarioVO;
import it.finsiel.siged.util.FileUtil;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Iterator;
import org.apache.log4j.Logger;
import com.novell.ldap.LDAPAttribute;
import com.novell.ldap.LDAPAttributeSet;
import com.novell.ldap.LDAPConnection;
import com.novell.ldap.LDAPEntry;
import com.novell.ldap.LDAPException;
import com.novell.ldap.LDAPSearchConstraints;
import com.novell.ldap.LDAPSearchResults;
import com.novell.ldap.LDAPUrl;
/**
* @author Almaviva sud
*
*/
public class LdapUtil {
static Logger logger = Logger.getLogger(LdapUtil.class.getName());
public static boolean autenticaUtente(String host, int port, String dn,
String passwd) throws AuthenticationException {
boolean auth = false;
try {
LDAPConnection conn = new LDAPConnection();
conn.connect(host, port);
try {
conn.bind(LDAPConnection.LDAP_V3, dn, passwd.getBytes("UTF8"));
auth = conn.isBound();
} catch (UnsupportedEncodingException u) {
throw new LDAPException("UTF8 Invalid Encoding",
LDAPException.LOCAL_ERROR, (String) null, u);
}
try {
conn.disconnect();
} catch (LDAPException e1) {
logger.debug("", e1);
}
} catch (LDAPException e) {
throw new AuthenticationException(
"Errore nella connessione al server LDAP.\n"
+ e.getLocalizedMessage());
}
return auth;
}
public static ArrayList cercaAmministrazione(String host, int port,
String searchBase, String searchFilter, int searchScope,
int maxResult) throws DataException {
ArrayList res = new ArrayList();
try {
LDAPConnection conn = new LDAPConnection();
conn.connect(host, port);
LDAPSearchConstraints constraints = new LDAPSearchConstraints();
constraints.setMaxResults(maxResult);
LDAPSearchResults searchResults = conn.search(searchBase,
searchScope, searchFilter, null, false, constraints);
logger.info(searchBase + " . " + searchFilter);
while (searchResults.hasMore()) {
LDAPEntry nextEntry = null;
try {
nextEntry = searchResults.next();
} catch (LDAPException e) {
logger.error("", e);
continue;
}
IdentityVO rec = new IdentityVO();
LDAPAttributeSet attributeSet = nextEntry.getAttributeSet();
LDAPAttribute cur = attributeSet.getAttribute("o");
rec.setCodice(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("provincia");
rec.setName(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("description");
rec.setDescription(cur != null ? cur.getStringValue() : "");
res.add(rec);
}
conn.disconnect();
} catch (Exception e) {
logger.error("", e);
throw new DataException(e.getMessage());
}
return res;
}
public static ArrayList listaAOO(String host, int port, String searchBase,
String searchFilter, int searchScope, int maxResult)
throws DataException {
ArrayList res = new ArrayList();
try {
LDAPConnection conn = new LDAPConnection();
conn.connect(host, port);
LDAPSearchConstraints constraints = new LDAPSearchConstraints();
constraints.setMaxResults(maxResult);
LDAPSearchResults searchResults = conn.search(searchBase,
searchScope, searchFilter, null, false, constraints);
while (searchResults.hasMore()) {
LDAPEntry nextEntry = null;
try {
nextEntry = searchResults.next();
} catch (LDAPException e) {
logger.error("", e);
continue;
}
DestinatarioVO vo = new DestinatarioVO();
LDAPAttributeSet attributeSet = nextEntry.getAttributeSet();
LDAPAttribute cur = attributeSet.getAttribute("cognomeResp");
vo.setDestinatario(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("nomeResp");
vo
.setDestinatario(cur != null ? (vo.getDestinatario()
+ " " + cur.getStringValue()) : vo
.getDestinatario());
vo.setCodice(nextEntry.getDN());
cur = attributeSet.getAttribute("l");
vo.setCitta(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("mail");
vo.setEmail(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("street");
vo.setIndirizzo(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("description");
vo.setIntestazione(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("postalCode");
vo.setCodicePostale(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("provincia");
vo.setProvinciaId(cur != null ? cur.getStringValue() : "");
res.add(vo);
}
conn.disconnect();
} catch (Exception e) {
logger.error("", e);
throw new DataException(e.getMessage());
}
return res;
}
public static DestinatarioVO getAOO(String host, int port, String dn)
throws DataException {
DestinatarioVO vo = new DestinatarioVO();
try {
LDAPConnection conn = new LDAPConnection();
conn.connect(host, port);
try {
LDAPEntry nextEntry = conn.read(dn);
LDAPAttributeSet attributeSet = nextEntry.getAttributeSet();
LDAPAttribute cur = attributeSet.getAttribute("cognomeResp");
vo.setDestinatario(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("nomeResp");
vo
.setDestinatario(cur != null ? (vo.getDestinatario()
+ " " + cur.getStringValue()) : vo
.getDestinatario());
cur = attributeSet.getAttribute("aoo");
vo.setCodice(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("l");
vo.setCitta(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("mail");
vo.setEmail(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("street");
vo.setIndirizzo(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("description");
vo.setIntestazione(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("postalCode");
vo.setCodicePostale(cur != null ? cur.getStringValue() : "");
cur = attributeSet.getAttribute("provincia");
vo.setProvinciaId(cur != null ? cur.getStringValue() : "");
logger.info(vo.getCodice());
} catch (LDAPException e) {
logger.error("", e);
vo.setReturnValue(ReturnValues.NOT_FOUND);
}
conn.disconnect();
} catch (Exception e) {
logger.error("", e);
throw new DataException(e.getMessage());
}
return vo;
}
public static byte[] downloadCRLfromURL(String url) throws DataException {
byte[] crlBytes = new byte[0];
try {
LDAPUrl ldapUrl = new LDAPUrl(LDAPUrl.decode(url));
LDAPEntry entry = LDAPConnection.read(ldapUrl);
LDAPAttributeSet attributeSet = entry.getAttributeSet();
Iterator allAttributes = attributeSet.iterator();
while (allAttributes.hasNext()) {
LDAPAttribute attribute = (LDAPAttribute) allAttributes.next();
String attributeName = attribute.getName();
logger.info(" " + attributeName);
if (attributeName.toLowerCase().indexOf(
"certificaterevocationlist") >= 0) {
logger.info("Saving CRL...");
byte[] content = attribute.getByteValue();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ByteArrayInputStream bais = new ByteArrayInputStream(
content);
FileUtil.writeFile(bais, baos);
crlBytes = baos.toByteArray();
baos.close();
bais.close();
break;
} else {
logger.info("Attributo sconosciuto=" + attributeName);
}
}
} catch (MalformedURLException e) {
throw new DataException("URL non valido.\n" + e.getMessage());
} catch (LDAPException e) {
throw new DataException("Errore durante il download dei dati.\n"
+ e.getMessage());
} catch (IOException e) {
throw new DataException("Errore durante il download dei dati.\n"
+ e.getMessage());
}
return crlBytes;
}
public static String getDN(String name, String baseDN) {
return "uid=" + name + "," + baseDN;
}
}
|
|
package com.github.tilastokeskus.matertis.core;
import com.github.tilastokeskus.matertis.core.command.*;
import com.github.tilastokeskus.matertis.core.command.Command;
import java.awt.event.KeyEvent;
import java.util.HashMap;
import java.util.Map;
/**
* Works as a collection of game commands bound to different identifiers.
* Registered commands may be set, unset and executed with the respective id of
* the command. The commands are meant to be bound to keyCodes defined in the
* {@link KeyEvent} class, but do not have to be.
*
* @author tilastokeskus
*/
public class CommandHandler {
/**
* A null command identifier - a command identifier for a command that does
* nothing or does not exist. Whatever. Should never be bound to any
* command.
*/
public static final int COMMAND_NONE = -1;
/**
* An identifier for a command that moves a tetromino left by one.
*/
public static final int COMMAND_LEFT = 1;
/**
* An identifier for a command that moves a tetromino right by one.
*/
public static final int COMMAND_RIGHT = 2;
/**
* An identifier for a command that moves a tetromino down by one.
*/
public static final int COMMAND_DOWN = 3;
/**
* An identifier for a command that rotates a tetromino.
*/
public static final int COMMAND_ROTATE = 4;
/**
* An identifier for a command that drops a tetromino down completely.
*/
public static final int COMMAND_DROP = 5;
/**
* An identifier for a command that pauses the game.
*/
public static final int COMMAND_PAUSE = 6;
/**
* An identifier for a command that restarts the game.
*/
public static final int COMMAND_RESTART = 7;
protected final Map<Integer, Command> commands;
protected final Map<Integer, Integer> bindings;
protected final GameHandler gameHandler;
/**
* Constructs a command handler with default commands and mappings.
* The default mappings are as follows:
* <ul>
* <li>KeyEvent.VK_LEFT - move tetromino left.</li>
* <li>KeyEvent.VK_RIGHT - move tetromino right.</li>
* <li>KeyEvent.VK_DOWN - move tetromino down.</li>
* <li>KeyEvent.VK_UP - rotate tetromino.</li>
* <li>KeyEvent.VK_SPACE - drop tetromino.</li>
* <li>KeyEvent.VK_P - pause game.</li>
* <li>KeyEvent.VK_R - restart game.</li>
* </ul>
*
* @param gameHandler Game handler to initialize commands with.
*/
public CommandHandler(GameHandler gameHandler) {
this.gameHandler = gameHandler;
this.commands = this.getDefaultCommands();
this.bindings = this.getDefaultBindings();
}
private Map<Integer, Command> getDefaultCommands() {
Map<Integer, Command> map = new HashMap<>();
map.put(COMMAND_LEFT, new MoveCommand(gameHandler, Direction.LEFT));
map.put(COMMAND_RIGHT, new MoveCommand(gameHandler, Direction.RIGHT));
map.put(COMMAND_DOWN, new MoveCommand(gameHandler, Direction.DOWN));
map.put(COMMAND_ROTATE, new RotateCommand(gameHandler));
map.put(COMMAND_DROP, new DropCommand(gameHandler));
map.put(COMMAND_PAUSE, new PauseCommand(gameHandler));
map.put(COMMAND_RESTART, new RestartCommand(gameHandler));
return map;
}
private Map<Integer, Integer> getDefaultBindings() {
Map<Integer, Integer> map = new HashMap<>();
map.put(COMMAND_LEFT, KeyEvent.VK_LEFT);
map.put(COMMAND_RIGHT, KeyEvent.VK_RIGHT);
map.put(COMMAND_DOWN, KeyEvent.VK_DOWN);
map.put(COMMAND_ROTATE, KeyEvent.VK_UP);
map.put(COMMAND_DROP, KeyEvent.VK_SPACE);
map.put(COMMAND_PAUSE, KeyEvent.VK_P);
map.put(COMMAND_RESTART, KeyEvent.VK_R);
return map;
}
/**
* Changes the binding of some command identifier.
*
* @param commandID identifier of the command whose binding should be
* changed.
* @param newBinding the binding that the defined command should be bound
* to.
*/
public void rebindCommand(int commandID, int newBinding) {
if (this.bindings.containsKey(commandID)) {
this.bindings.put(commandID, newBinding);
}
}
/**
* Retrieves the command associated with the given command identifier.
*
* @param commandID identifier of the command that should be retrieved.
* @return command that was associated with the given identifier,
* or null if there was no command associated with the id.
*/
public Command getCommand(int commandID) {
return this.commands.get(commandID);
}
/**
* Retrieves identifier that is currently bound to the given command
* identifier.
*
* @param commandID identifier of the command whose binding should be
* retrieved.
* @return identifier that is currently bound to the given command
* identifier, or {@link #COMMAND_NONE} if nothing is bound
* to it.
*/
public int getBinding(int commandID) {
int binding = COMMAND_NONE;
if (this.bindings.containsKey(commandID)) {
binding = this.bindings.get(commandID);
}
return binding;
}
/**
* Retrieves the command associated with the given binding.
*
* @param binding the current binding to some command.
* @return command that was associated with the given binding, or
* null if there was no command bound to the given id.
*/
public Command getBoundCommand(int binding) {
Command command = null;
for (Integer commandID : this.bindings.keySet()) {
if (this.bindings.get(commandID) == binding) {
command = this.commands.get(commandID);
break;
}
}
return command;
}
/**
* Executes a command with the given id, for example, {@link #COMMAND_LEFT}.
*
* @param commandID identifier of the command to be executed.
* @return true if the register contained a command with the
* given command identifier, otherwise false.
* @see Command
*/
public boolean executeCommand(int commandID) {
boolean wasExecuted = false;
if (this.commands.containsKey(commandID)) {
Command command = this.getCommand(commandID);
command.execute();
wasExecuted = true;
}
return wasExecuted;
}
/**
* Executes a command bound to some identifier, for example,
* {@link KeyEvent#VK_LEFT}.
*
* @param binding identifier of the current binding to some command.
* @return true if the register contained a command with the given
* identifier, otherwise false.
* @see Command
*/
public boolean executeBoundCommand(int binding) {
boolean wasExecuted = false;
Command command = this.getBoundCommand(binding);
if (command != null) {
command.execute();
wasExecuted = true;
}
return wasExecuted;
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.variable.service.impl.persistence.entity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType;
import org.flowable.engine.common.impl.context.Context;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.engine.common.impl.javax.el.ELContext;
import org.flowable.engine.common.impl.persistence.entity.AbstractEntity;
import org.flowable.variable.api.delegate.VariableScope;
import org.flowable.variable.api.persistence.entity.VariableInstance;
import org.flowable.variable.api.types.VariableType;
import org.flowable.variable.api.types.VariableTypes;
import org.flowable.variable.service.VariableServiceConfiguration;
import org.flowable.variable.service.event.impl.FlowableVariableEventBuilder;
import org.flowable.variable.service.impl.util.CommandContextUtil;
/**
* @author Tom Baeyens
* @author Joram Barrez
* @author Tijs Rademakers
* @author Saeid Mirzaei
*/
public abstract class VariableScopeImpl extends AbstractEntity implements Serializable, VariableScope {
private static final long serialVersionUID = 1L;
// The cache used when fetching all variables
protected Map<String, VariableInstanceEntity> variableInstances; // needs to be null, the logic depends on it for checking if vars were already fetched
// The cache is used when fetching/setting specific variables
protected Map<String, VariableInstanceEntity> usedVariablesCache = new HashMap<>();
protected Map<String, VariableInstance> transientVariabes;
protected ELContext cachedElContext;
protected abstract Collection<VariableInstanceEntity> loadVariableInstances();
protected abstract VariableScopeImpl getParentVariableScope();
protected abstract void initializeVariableInstanceBackPointer(VariableInstanceEntity variableInstance);
protected void ensureVariableInstancesInitialized() {
if (variableInstances == null) {
variableInstances = new HashMap<>();
CommandContext commandContext = Context.getCommandContext();
if (commandContext == null) {
throw new FlowableException("lazy loading outside command context");
}
Collection<VariableInstanceEntity> variableInstancesList = loadVariableInstances();
for (VariableInstanceEntity variableInstance : variableInstancesList) {
variableInstances.put(variableInstance.getName(), variableInstance);
}
}
}
/**
* Only to be used when creating a new entity, to avoid an extra call to the database.
*/
public void internalSetVariableInstances(Map<String, VariableInstanceEntity> variableInstances) {
this.variableInstances = variableInstances;
}
@Override
public Map<String, Object> getVariables() {
return collectVariables(new HashMap<String, Object>());
}
@Override
public Map<String, VariableInstance> getVariableInstances() {
return collectVariableInstances(new HashMap<String, VariableInstance>());
}
@Override
public Map<String, Object> getVariables(Collection<String> variableNames) {
return getVariables(variableNames, true);
}
@Override
public Map<String, VariableInstance> getVariableInstances(Collection<String> variableNames) {
return getVariableInstances(variableNames, true);
}
@Override
public Map<String, Object> getVariables(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, Object> requestedVariables = new HashMap<>();
Set<String> variableNamesToFetch = new HashSet<>(variableNames);
// Transient variables 'shadow' any existing variables.
// The values in the fetch-cache will be more recent, so they can override any existing ones
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables) {
// getVariables() will go up the execution hierarchy, no need to do
// it here also, the cached values will already be applied too
Map<String, Object> allVariables = getVariables();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
return requestedVariables;
} else {
// Go up if needed
VariableScope parent = getParentVariableScope();
if (parent != null) {
requestedVariables.putAll(parent.getVariables(variableNamesToFetch, fetchAllVariables));
}
// Fetch variables on this scope
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable.getValue());
}
return requestedVariables;
}
}
@Override
public Map<String, VariableInstance> getVariableInstances(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, VariableInstance> requestedVariables = new HashMap<>();
Set<String> variableNamesToFetch = new HashSet<>(variableNames);
// The values in the fetch-cache will be more recent, so they can override any existing ones
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName));
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName));
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables) {
// getVariables() will go up the execution hierarchy, no need to do it here
// also, the cached values will already be applied too
Map<String, VariableInstance> allVariables = getVariableInstances();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
return requestedVariables;
} else {
// Go up if needed
VariableScope parent = getParentVariableScope();
if (parent != null) {
requestedVariables.putAll(parent.getVariableInstances(variableNamesToFetch, fetchAllVariables));
}
// Fetch variables on this scope
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable);
}
return requestedVariables;
}
}
protected Map<String, Object> collectVariables(HashMap<String, Object> variables) {
ensureVariableInstancesInitialized();
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variables.putAll(parentScope.collectVariables(variables));
}
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance.getValue());
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName).getValue());
}
if (transientVariabes != null) {
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
}
return variables;
}
protected Map<String, VariableInstance> collectVariableInstances(HashMap<String, VariableInstance> variables) {
ensureVariableInstancesInitialized();
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variables.putAll(parentScope.collectVariableInstances(variables));
}
for (VariableInstance variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance);
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName));
}
if (transientVariabes != null) {
variables.putAll(transientVariabes);
}
return variables;
}
@Override
public Object getVariable(String variableName) {
return getVariable(variableName, true);
}
@Override
public VariableInstance getVariableInstance(String variableName) {
return getVariableInstance(variableName, true);
}
/**
* The same operation as {@link VariableScopeImpl#getVariable(String)}, but with an extra parameter to indicate whether or not all variables need to be fetched.
*
* Note that the default way (because of backwards compatibility) is to fetch all the variables when doing a get/set of variables. So this means 'true' is the default value for this method, and in
* fact it will simply delegate to {@link #getVariable(String)}. This can also be the most performant, if you're doing a lot of variable gets in the same transaction (eg in service tasks).
*
* In case 'false' is used, only the specific variable will be fetched.
*/
@Override
public Object getVariable(String variableName, boolean fetchAllVariables) {
Object value = null;
VariableInstance variable = getVariableInstance(variableName, fetchAllVariables);
if (variable != null) {
value = variable.getValue();
}
return value;
}
@Override
public VariableInstance getVariableInstance(String variableName, boolean fetchAllVariables) {
// Transient variable
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return transientVariabes.get(variableName);
}
// Check the local single-fetch cache
if (usedVariablesCache.containsKey(variableName)) {
return usedVariablesCache.get(variableName);
}
if (fetchAllVariables) {
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.get(variableName);
if (variableInstance != null) {
return variableInstance;
}
// Go up the hierarchy
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.getVariableInstance(variableName, true);
}
return null;
} else {
if (variableInstances != null && variableInstances.containsKey(variableName)) {
return variableInstances.get(variableName);
}
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
usedVariablesCache.put(variableName, variable);
return variable;
}
// Go up the hierarchy
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.getVariableInstance(variableName, false);
}
return null;
}
}
protected abstract VariableInstanceEntity getSpecificVariable(String variableName);
@Override
public Object getVariableLocal(String variableName) {
return getVariableLocal(variableName, true);
}
@Override
public VariableInstance getVariableInstanceLocal(String variableName) {
return getVariableInstanceLocal(variableName, true);
}
@Override
public Object getVariableLocal(String variableName, boolean fetchAllVariables) {
Object value = null;
VariableInstance variable = getVariableInstanceLocal(variableName, fetchAllVariables);
if (variable != null) {
value = variable.getValue();
}
return value;
}
@Override
public VariableInstance getVariableInstanceLocal(String variableName, boolean fetchAllVariables) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return transientVariabes.get(variableName);
}
if (usedVariablesCache.containsKey(variableName)) {
return usedVariablesCache.get(variableName);
}
if (fetchAllVariables) {
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.get(variableName);
if (variableInstance != null) {
return variableInstance;
}
return null;
} else {
if (variableInstances != null && variableInstances.containsKey(variableName)) {
VariableInstanceEntity variable = variableInstances.get(variableName);
if (variable != null) {
return variableInstances.get(variableName);
}
}
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
usedVariablesCache.put(variableName, variable);
return variable;
}
return null;
}
}
@Override
public boolean hasVariables() {
if (transientVariabes != null && !transientVariabes.isEmpty()) {
return true;
}
ensureVariableInstancesInitialized();
if (!variableInstances.isEmpty()) {
return true;
}
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.hasVariables();
}
return false;
}
@Override
public boolean hasVariablesLocal() {
if (transientVariabes != null && !transientVariabes.isEmpty()) {
return true;
}
ensureVariableInstancesInitialized();
return !variableInstances.isEmpty();
}
@Override
public boolean hasVariable(String variableName) {
if (hasVariableLocal(variableName)) {
return true;
}
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.hasVariable(variableName);
}
return false;
}
@Override
public boolean hasVariableLocal(String variableName) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return true;
}
ensureVariableInstancesInitialized();
return variableInstances.containsKey(variableName);
}
protected Set<String> collectVariableNames(Set<String> variableNames) {
if (transientVariabes != null) {
variableNames.addAll(transientVariabes.keySet());
}
ensureVariableInstancesInitialized();
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variableNames.addAll(parentScope.collectVariableNames(variableNames));
}
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variableNames.add(variableInstance.getName());
}
return variableNames;
}
@Override
public Set<String> getVariableNames() {
return collectVariableNames(new HashSet<String>());
}
@Override
public Map<String, Object> getVariablesLocal() {
Map<String, Object> variables = new HashMap<>();
ensureVariableInstancesInitialized();
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance.getValue());
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName).getValue());
}
if (transientVariabes != null) {
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
}
return variables;
}
@Override
public Map<String, VariableInstance> getVariableInstancesLocal() {
Map<String, VariableInstance> variables = new HashMap<>();
ensureVariableInstancesInitialized();
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance);
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName));
}
if (transientVariabes != null) {
variables.putAll(transientVariabes);
}
return variables;
}
@Override
public Map<String, Object> getVariablesLocal(Collection<String> variableNames) {
return getVariablesLocal(variableNames, true);
}
@Override
public Map<String, VariableInstance> getVariableInstancesLocal(Collection<String> variableNames) {
return getVariableInstancesLocal(variableNames, true);
}
@Override
public Map<String, Object> getVariablesLocal(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, Object> requestedVariables = new HashMap<>();
// The values in the fetch-cache will be more recent, so they can override any existing ones
Set<String> variableNamesToFetch = new HashSet<>(variableNames);
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables) {
Map<String, Object> allVariables = getVariablesLocal();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
} else {
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable.getValue());
}
}
return requestedVariables;
}
@Override
public Map<String, VariableInstance> getVariableInstancesLocal(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, VariableInstance> requestedVariables = new HashMap<>();
// The values in the fetch-cache will be more recent, so they can override any existing ones
Set<String> variableNamesToFetch = new HashSet<>(variableNames);
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName));
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName));
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables) {
Map<String, VariableInstance> allVariables = getVariableInstancesLocal();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
} else {
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable);
}
}
return requestedVariables;
}
protected abstract List<VariableInstanceEntity> getSpecificVariables(Collection<String> variableNames);
@Override
public Set<String> getVariableNamesLocal() {
Set<String> variableNames = new HashSet<>();
if (transientVariabes != null) {
variableNames.addAll(transientVariabes.keySet());
}
ensureVariableInstancesInitialized();
variableNames.addAll(variableInstances.keySet());
return variableNames;
}
public Map<String, VariableInstanceEntity> getVariableInstanceEntities() {
ensureVariableInstancesInitialized();
return Collections.unmodifiableMap(variableInstances);
}
public Map<String, VariableInstanceEntity> getUsedVariablesCache() {
return usedVariablesCache;
}
public void createVariablesLocal(Map<String, ? extends Object> variables) {
if (variables != null) {
for (Map.Entry<String, ? extends Object> entry : variables.entrySet()) {
createVariableLocal(entry.getKey(), entry.getValue());
}
}
}
@Override
public void setVariables(Map<String, ? extends Object> variables) {
if (variables != null) {
for (String variableName : variables.keySet()) {
setVariable(variableName, variables.get(variableName));
}
}
}
@Override
public void setVariablesLocal(Map<String, ? extends Object> variables) {
if (variables != null) {
for (String variableName : variables.keySet()) {
setVariableLocal(variableName, variables.get(variableName));
}
}
}
@Override
public void removeVariables() {
ensureVariableInstancesInitialized();
Set<String> variableNames = new HashSet<>(variableInstances.keySet());
for (String variableName : variableNames) {
removeVariable(variableName);
}
}
@Override
public void removeVariablesLocal() {
List<String> variableNames = new ArrayList<>(getVariableNamesLocal());
for (String variableName : variableNames) {
removeVariableLocal(variableName);
}
}
@Override
public void removeVariables(Collection<String> variableNames) {
if (variableNames != null) {
for (String variableName : variableNames) {
removeVariable(variableName);
}
}
}
@Override
public void removeVariablesLocal(Collection<String> variableNames) {
if (variableNames != null) {
for (String variableName : variableNames) {
removeVariableLocal(variableName);
}
}
}
@Override
public void setVariable(String variableName, Object value) {
setVariable(variableName, value, true);
}
/**
* The default {@link #setVariable(String, Object)} fetches all variables (for historical and backwards compatible reasons) while setting the variables.
*
* Setting the fetchAllVariables parameter to true is the default behaviour (ie fetching all variables) Setting the fetchAllVariables parameter to false does not do that.
*
*/
@Override
public void setVariable(String variableName, Object value, boolean fetchAllVariables) {
if (fetchAllVariables) {
// If it's in the cache, it's more recent
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value);
}
// If the variable exists on this scope, replace it
if (hasVariableLocal(variableName)) {
setVariableLocal(variableName, value, true);
return;
}
// Otherwise, go up the hierarchy (we're trying to put it as high as possible)
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.setVariable(variableName, value);
return;
}
// We're as high as possible and the variable doesn't exist yet, so we're creating it
createVariableLocal(variableName, value);
} else {
// Check local cache first
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value);
} else if (variableInstances != null && variableInstances.containsKey(variableName)) {
updateVariableInstance(variableInstances.get(variableName), value);
} else {
// Not in local cache, check if defined on this scope
// Create it if it doesn't exist yet
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
updateVariableInstance(variable, value);
usedVariablesCache.put(variableName, variable);
} else {
VariableScopeImpl parent = getParentVariableScope();
if (parent != null) {
parent.setVariable(variableName, value, fetchAllVariables);
return;
}
variable = createVariableInstance(variableName, value);
usedVariablesCache.put(variableName, variable);
}
}
}
}
@Override
public Object setVariableLocal(String variableName, Object value) {
return setVariableLocal(variableName, value, true);
}
/**
* The default {@link #setVariableLocal(String, Object)} fetches all variables (for historical and backwards compatible reasons) while setting the variables.
*
* Setting the fetchAllVariables parameter to true is the default behaviour (ie fetching all variables) Setting the fetchAllVariables parameter to false does not do that.
*
*/
@Override
public Object setVariableLocal(String variableName, Object value, boolean fetchAllVariables) {
if (fetchAllVariables) {
// If it's in the cache, it's more recent
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value);
}
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.get(variableName);
if (variableInstance == null) {
variableInstance = usedVariablesCache.get(variableName);
}
if (variableInstance == null) {
createVariableLocal(variableName, value);
} else {
updateVariableInstance(variableInstance, value);
}
return null;
} else {
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value);
} else if (variableInstances != null && variableInstances.containsKey(variableName)) {
updateVariableInstance(variableInstances.get(variableName), value);
} else {
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
updateVariableInstance(variable, value);
} else {
variable = createVariableInstance(variableName, value);
}
usedVariablesCache.put(variableName, variable);
}
return null;
}
}
/**
* only called when a new variable is created on this variable scope. This method is also responsible for propagating the creation of this variable to the history.
*/
protected void createVariableLocal(String variableName, Object value) {
ensureVariableInstancesInitialized();
if (variableInstances.containsKey(variableName)) {
throw new FlowableException("variable '" + variableName + "' already exists. Use setVariableLocal if you want to overwrite the value");
}
createVariableInstance(variableName, value);
}
@Override
public void removeVariable(String variableName) {
ensureVariableInstancesInitialized();
if (variableInstances.containsKey(variableName)) {
removeVariableLocal(variableName);
return;
}
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.removeVariable(variableName);
}
}
@Override
public void removeVariableLocal(String variableName) {
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.remove(variableName);
if (variableInstance != null) {
deleteVariableInstanceForExplicitUserCall(variableInstance);
}
}
protected void deleteVariableInstanceForExplicitUserCall(VariableInstanceEntity variableInstance) {
CommandContextUtil.getVariableInstanceEntityManager().delete(variableInstance);
variableInstance.setValue(null);
initializeVariableInstanceBackPointer(variableInstance);
if (isPropagateToHistoricVariable()) {
VariableServiceConfiguration variableServiceConfiguration = CommandContextUtil.getVariableServiceConfiguration();
if (variableServiceConfiguration.getInternalHistoryVariableManager() != null) {
variableServiceConfiguration.getInternalHistoryVariableManager().recordVariableRemoved(variableInstance);
}
}
}
protected void updateVariableInstance(VariableInstanceEntity variableInstance, Object value) {
// Always check if the type should be altered. It's possible that the previous type is lower in the type
// checking chain (e.g. serializable) and will return true on isAbleToStore(), even though another type
// higher in the chain is eligible for storage.
VariableTypes variableTypes = CommandContextUtil.getVariableServiceConfiguration().getVariableTypes();
VariableType newType = variableTypes.findVariableType(value);
if (newType != null && !newType.equals(variableInstance.getType())) {
variableInstance.setValue(null);
variableInstance.setType(newType);
variableInstance.forceUpdate();
variableInstance.setValue(value);
} else {
variableInstance.setValue(value);
}
initializeVariableInstanceBackPointer(variableInstance);
VariableServiceConfiguration variableServiceConfiguration = CommandContextUtil.getVariableServiceConfiguration();
if (isPropagateToHistoricVariable()) {
if (variableServiceConfiguration.getInternalHistoryVariableManager() != null) {
variableServiceConfiguration.getInternalHistoryVariableManager().recordVariableUpdate(variableInstance);
}
}
// Dispatch event, if needed
if (variableServiceConfiguration.getEventDispatcher() != null && variableServiceConfiguration.getEventDispatcher().isEnabled()) {
variableServiceConfiguration.getEventDispatcher().dispatchEvent(
FlowableVariableEventBuilder.createVariableEvent(FlowableEngineEventType.VARIABLE_UPDATED, variableInstance.getName(), value,
variableInstance.getType(), variableInstance.getTaskId(), variableInstance.getExecutionId(),
variableInstance.getProcessInstanceId(), variableInstance.getProcessDefinitionId()));
}
}
protected VariableInstanceEntity createVariableInstance(String variableName, Object value) {
VariableTypes variableTypes = CommandContextUtil.getVariableServiceConfiguration().getVariableTypes();
VariableType type = variableTypes.findVariableType(value);
VariableInstanceEntityManager variableInstanceEntityManager = CommandContextUtil.getVariableInstanceEntityManager();
VariableInstanceEntity variableInstance = variableInstanceEntityManager.create(variableName, type, value);
initializeVariableInstanceBackPointer(variableInstance);
variableInstanceEntityManager.insert(variableInstance);
if (variableInstances != null) {
variableInstances.put(variableName, variableInstance);
}
VariableServiceConfiguration variableServiceConfiguration = CommandContextUtil.getVariableServiceConfiguration();
if (isPropagateToHistoricVariable()) {
if (variableServiceConfiguration.getInternalHistoryVariableManager() != null) {
variableServiceConfiguration.getInternalHistoryVariableManager().recordVariableCreate(variableInstance);
}
}
if (variableServiceConfiguration.getEventDispatcher() != null && variableServiceConfiguration.getEventDispatcher().isEnabled()) {
variableServiceConfiguration.getEventDispatcher().dispatchEvent(
FlowableVariableEventBuilder.createVariableEvent(FlowableEngineEventType.VARIABLE_CREATED, variableName, value,
variableInstance.getType(), variableInstance.getTaskId(), variableInstance.getExecutionId(),
variableInstance.getProcessInstanceId(), variableInstance.getProcessDefinitionId()));
}
return variableInstance;
}
/*
* Transient variables
*/
@Override
public void setTransientVariablesLocal(Map<String, Object> transientVariables) {
for (String variableName : transientVariables.keySet()) {
setTransientVariableLocal(variableName, transientVariables.get(variableName));
}
}
@Override
public void setTransientVariableLocal(String variableName, Object variableValue) {
if (transientVariabes == null) {
transientVariabes = new HashMap<>();
}
transientVariabes.put(variableName, new TransientVariableInstance(variableName, variableValue));
}
@Override
public void setTransientVariables(Map<String, Object> transientVariables) {
for (String variableName : transientVariables.keySet()) {
setTransientVariable(variableName, transientVariables.get(variableName));
}
}
@Override
public void setTransientVariable(String variableName, Object variableValue) {
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.setTransientVariable(variableName, variableValue);
return;
}
setTransientVariableLocal(variableName, variableValue);
}
@Override
public Object getTransientVariableLocal(String variableName) {
if (transientVariabes != null) {
return transientVariabes.get(variableName).getValue();
}
return null;
}
@Override
public Map<String, Object> getTransientVariablesLocal() {
if (transientVariabes != null) {
Map<String, Object> variables = new HashMap<>();
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
return variables;
} else {
return Collections.emptyMap();
}
}
@Override
public Object getTransientVariable(String variableName) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return transientVariabes.get(variableName).getValue();
}
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.getTransientVariable(variableName);
}
return null;
}
@Override
public Map<String, Object> getTransientVariables() {
return collectTransientVariables(new HashMap<String, Object>());
}
protected Map<String, Object> collectTransientVariables(HashMap<String, Object> variables) {
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variables.putAll(parentScope.collectVariables(variables));
}
if (transientVariabes != null) {
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
}
return variables;
}
@Override
public void removeTransientVariableLocal(String variableName) {
if (transientVariabes != null) {
transientVariabes.remove(variableName);
}
}
@Override
public void removeTransientVariablesLocal() {
if (transientVariabes != null) {
transientVariabes.clear();
}
}
@Override
public void removeTransientVariable(String variableName) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
removeTransientVariableLocal(variableName);
return;
}
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.removeTransientVariable(variableName);
}
}
@Override
public void removeTransientVariables() {
removeTransientVariablesLocal();
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.removeTransientVariablesLocal();
}
}
/**
* Return whether changes to the variables are progagated to the history storage.
*/
protected abstract boolean isPropagateToHistoricVariable();
// getters and setters
// //////////////////////////////////////////////////////
public ELContext getCachedElContext() {
return cachedElContext;
}
public void setCachedElContext(ELContext cachedElContext) {
this.cachedElContext = cachedElContext;
}
@Override
public <T> T getVariable(String variableName, Class<T> variableClass) {
return variableClass.cast(getVariable(variableName));
}
@Override
public <T> T getVariableLocal(String variableName, Class<T> variableClass) {
return variableClass.cast(getVariableLocal(variableName));
}
}
|
|
package com.clockwork.scene;
import com.clockwork.bounding.BoundingVolume;
import com.clockwork.collision.Collidable;
import com.clockwork.collision.CollisionResults;
import com.clockwork.export.CWExporter;
import com.clockwork.export.CWImporter;
import com.clockwork.export.Savable;
import com.clockwork.material.Material;
import com.clockwork.util.SafeArrayList;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Node defines an internal node of a scene graph. The internal
* node maintains a collection of children and handles merging said children
* into a single bound to allow for very fast culling of multiple nodes. Node
* allows for any number of children to be attached.
*
*/
public class Node extends Spatial implements Savable {
private static final Logger logger = Logger.getLogger(Node.class.getName());
/**
* This node's children.
*/
protected SafeArrayList<Spatial> children = new SafeArrayList<Spatial>(Spatial.class);
/**
* Serialisation only. Do not use.
*/
public Node() {
}
/**
* Constructor instantiates a new Node with a default empty
* list for containing children.
*
* @param name
* the name of the scene element. This is required for
* identification and comparision purposes.
*/
public Node(String name) {
super(name);
}
/**
*
* getQuantity returns the number of children this node
* maintains.
*
* @return the number of children this node maintains.
*/
public int getQuantity() {
return children.size();
}
@Override
protected void setTransformRefresh(){
super.setTransformRefresh();
for (Spatial child : children.getArray()){
if ((child.refreshFlags & RF_TRANSFORM) != 0)
continue;
child.setTransformRefresh();
}
}
@Override
protected void setLightListRefresh(){
super.setLightListRefresh();
for (Spatial child : children.getArray()){
if ((child.refreshFlags & RF_LIGHTLIST) != 0)
continue;
child.setLightListRefresh();
}
}
@Override
protected void updateWorldBound(){
super.updateWorldBound();
// for a node, the world bound is a combination of all it's children
// bounds
BoundingVolume resultBound = null;
for (Spatial child : children.getArray()) {
// child bound is assumed to be updated
assert (child.refreshFlags & RF_BOUND) == 0;
if (resultBound != null) {
// merge current world bound with child world bound
resultBound.mergeLocal(child.getWorldBound());
} else {
// set world bound to first non-null child world bound
if (child.getWorldBound() != null) {
resultBound = child.getWorldBound().clone(this.worldBound);
}
}
}
this.worldBound = resultBound;
}
@Override
public void updateLogicalState(float tpf){
super.updateLogicalState(tpf);
if (children.isEmpty()) {
return;
}
for (Spatial child : children.getArray()) {
child.updateLogicalState(tpf);
}
}
@Override
public void updateGeometricState(){
if ((refreshFlags & RF_LIGHTLIST) != 0){
updateWorldLightList();
}
if ((refreshFlags & RF_TRANSFORM) != 0){
// combine with parent transforms- same for all spatial
// subclasses.
updateWorldTransforms();
}
if (!children.isEmpty()) {
// the important part- make sure child geometric state is refreshed
// first before updating own world bound. This saves
// a round-trip later on.
// NOTE 9/19/09
// Although it does save a round trip,
for (Spatial child : children.getArray()) {
child.updateGeometricState();
}
}
if ((refreshFlags & RF_BOUND) != 0){
updateWorldBound();
}
assert refreshFlags == 0;
}
/**
* getTriangleCount returns the number of triangles contained
* in all sub-branches of this node that contain geometry.
*
* @return the triangle count of this branch.
*/
@Override
public int getTriangleCount() {
int count = 0;
if(children != null) {
for(int i = 0; i < children.size(); i++) {
count += children.get(i).getTriangleCount();
}
}
return count;
}
/**
* getVertexCount returns the number of vertices contained
* in all sub-branches of this node that contain geometry.
*
* @return the vertex count of this branch.
*/
@Override
public int getVertexCount() {
int count = 0;
if(children != null) {
for(int i = 0; i < children.size(); i++) {
count += children.get(i).getVertexCount();
}
}
return count;
}
/**
* attachChild attaches a child to this node. This node
* becomes the child's parent. The current number of children maintained is
* returned.
*
* If the child already had a parent it is detached from that former parent.
*
* @param child
* the child to attach to this node.
* @return the number of children maintained by this node.
* @throws IllegalArgumentException if child is null.
*/
public int attachChild(Spatial child) {
if (child == null)
throw new IllegalArgumentException("child cannot be null");
if (child.getParent() != this && child != this) {
if (child.getParent() != null) {
child.getParent().detachChild(child);
}
child.setParent(this);
children.add(child);
// XXX: Not entirely correct? Forces bound update up the
// tree stemming from the attached child. Also forces
// transform update down the tree-
child.setTransformRefresh();
child.setLightListRefresh();
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,"Child ({0}) attached to this node ({1})",
new Object[]{child.getName(), getName()});
}
}
return children.size();
}
/**
*
* attachChildAt attaches a child to this node at an index. This node
* becomes the child's parent. The current number of children maintained is
* returned.
*
* If the child already had a parent it is detached from that former parent.
*
* @param child
* the child to attach to this node.
* @return the number of children maintained by this node.
* @throws NullPointerException if child is null.
*/
public int attachChildAt(Spatial child, int index) {
if (child == null)
throw new NullPointerException();
if (child.getParent() != this && child != this) {
if (child.getParent() != null) {
child.getParent().detachChild(child);
}
child.setParent(this);
children.add(index, child);
child.setTransformRefresh();
child.setLightListRefresh();
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,"Child ({0}) attached to this node ({1})",
new Object[]{child.getName(), getName()});
}
}
return children.size();
}
/**
* detachChild removes a given child from the node's list.
* This child will no longer be maintained.
*
* @param child
* the child to remove.
* @return the index the child was at. -1 if the child was not in the list.
*/
public int detachChild(Spatial child) {
if (child == null)
throw new NullPointerException();
if (child.getParent() == this) {
int index = children.indexOf(child);
if (index != -1) {
detachChildAt(index);
}
return index;
}
return -1;
}
/**
* detachChild removes a given child from the node's list.
* This child will no longe be maintained. Only the first child with a
* matching name is removed.
*
* @param childName
* the child to remove.
* @return the index the child was at. -1 if the child was not in the list.
*/
public int detachChildNamed(String childName) {
if (childName == null)
throw new NullPointerException();
for (int x = 0, max = children.size(); x < max; x++) {
Spatial child = children.get(x);
if (childName.equals(child.getName())) {
detachChildAt( x );
return x;
}
}
return -1;
}
/**
*
* detachChildAt removes a child at a given index. That child
* is returned for saving purposes.
*
* @param index
* the index of the child to be removed.
* @return the child at the supplied index.
*/
public Spatial detachChildAt(int index) {
Spatial child = children.remove(index);
if ( child != null ) {
child.setParent( null );
logger.log(Level.FINE, "{0}: Child removed.", this.toString());
// since a child with a bound was detached;
// our own bound will probably change.
setBoundRefresh();
// our world transform no longer influences the child.
// XXX: Not neccessary? Since child will have transform updated
// when attached anyway.
child.setTransformRefresh();
// lights are also inherited from parent
child.setLightListRefresh();
}
return child;
}
/**
*
* detachAllChildren removes all children attached to this
* node.
*/
public void detachAllChildren() {
for ( int i = children.size() - 1; i >= 0; i-- ) {
detachChildAt(i);
}
logger.log(Level.FINE, "{0}: All children removed.", this.toString());
}
/**
* getChildIndex returns the index of the given spatial
* in this node's list of children.
* @param sp
* The spatial to look up
* @return
* The index of the spatial in the node's children, or -1
* if the spatial is not attached to this node
*/
public int getChildIndex(Spatial sp) {
return children.indexOf(sp);
}
/**
* More efficient than e.g detaching and attaching as no updates are needed.
*
* @param index1 The index of the first child to swap
* @param index2 The index of the second child to swap
*/
public void swapChildren(int index1, int index2) {
Spatial c2 = children.get(index2);
Spatial c1 = children.remove(index1);
children.add(index1, c2);
children.remove(index2);
children.add(index2, c1);
}
/**
*
* getChild returns a child at a given index.
*
* @param i
* the index to retrieve the child from.
* @return the child at a specified index.
*/
public Spatial getChild(int i) {
return children.get(i);
}
/**
* getChild returns the first child found with exactly the
* given name (case sensitive.) This method does a depth first recursive
* search of all descendants of this node, it will return the first spatial
* found with a matching name.
*
* @param name
* the name of the child to retrieve. If null, we'll return null.
* @return the child if found, or null.
*/
public Spatial getChild(String name) {
if (name == null)
return null;
for (Spatial child : children.getArray()) {
if (name.equals(child.getName())) {
return child;
} else if(child instanceof Node) {
Spatial out = ((Node)child).getChild(name);
if(out != null) {
return out;
}
}
}
return null;
}
/**
* determines if the provided Spatial is contained in the children list of
* this node.
*
* @param spat
* the child object to look for.
* @return true if the object is contained, false otherwise.
*/
public boolean hasChild(Spatial spat) {
if (children.contains(spat))
return true;
for (Spatial child : children.getArray()) {
if (child instanceof Node && ((Node) child).hasChild(spat))
return true;
}
return false;
}
/**
* Returns all children to this node. Note that modifying that given
* list is not allowed.
*
* @return a list containing all children to this node
*/
public List<Spatial> getChildren() {
return children;
}
@Override
public void setMaterial(Material mat){
for (int i = 0; i < children.size(); i++){
children.get(i).setMaterial(mat);
}
}
@Override
public void setLodLevel(int lod){
super.setLodLevel(lod);
for (Spatial child : children.getArray()) {
child.setLodLevel(lod);
}
}
public int collideWith(Collidable other, CollisionResults results){
int total = 0;
for (Spatial child : children.getArray()){
total += child.collideWith(other, results);
}
return total;
}
/**
* Returns flat list of Spatials implementing the specified class AND
* with name matching the specified pattern.
*
* Note that we are <i>matching</i> the pattern, therefore the pattern
* must match the entire pattern (i.e. it behaves as if it is sandwiched
* between "^" and "$").
* You can set regex modes, like case insensitivity, by using the (?X)
* or (?X:Y) constructs.
*
* By design, it is always safe to code loops like:
* for (Spatial spatial : node.descendantMatches(AClass.class, "regex"))
*
*
* "Descendants" does not include self, per the definition of the word.
* To test for descendants AND self, you must do a
* node.matches(aClass, aRegex) +
* node.descendantMatches(aClass, aRegex).
*
*
* @param spatialSubclass Subclass which matching Spatials must implement.
* Null causes all Spatials to qualify.
* @param nameRegex Regular expression to match Spatial name against.
* Null causes all Names to qualify.
* @return Non-null, but possibly 0-element, list of matching Spatials (also Instances extending Spatials).
*
* see java.util.regex.Pattern
* see Spatial#matches(java.lang.Class, java.lang.String)
*/
@SuppressWarnings("unchecked")
public <T extends Spatial>List<T> descendantMatches(
Class<T> spatialSubclass, String nameRegex) {
List<T> newList = new ArrayList<T>();
if (getQuantity() < 1) return newList;
for (Spatial child : getChildren()) {
if (child.matches(spatialSubclass, nameRegex))
newList.add((T)child);
if (child instanceof Node)
newList.addAll(((Node) child).descendantMatches(
spatialSubclass, nameRegex));
}
return newList;
}
/**
* Convenience wrapper.
*
* see #descendantMatches(java.lang.Class, java.lang.String)
*/
public <T extends Spatial>List<T> descendantMatches(
Class<T> spatialSubclass) {
return descendantMatches(spatialSubclass, null);
}
/**
* Convenience wrapper.
*
* see #descendantMatches(java.lang.Class, java.lang.String)
*/
public <T extends Spatial>List<T> descendantMatches(String nameRegex) {
return descendantMatches(null, nameRegex);
}
@Override
public Node clone(boolean cloneMaterials){
Node nodeClone = (Node) super.clone(cloneMaterials);
// nodeClone.children = new ArrayList<Spatial>();
// for (Spatial child : children){
// Spatial childClone = child.clone();
// childClone.parent = nodeClone;
// nodeClone.children.add(childClone);
// }
return nodeClone;
}
@Override
public Spatial deepClone(){
Node nodeClone = (Node) super.clone();
nodeClone.children = new SafeArrayList<Spatial>(Spatial.class);
for (Spatial child : children){
Spatial childClone = child.deepClone();
childClone.parent = nodeClone;
nodeClone.children.add(childClone);
}
return nodeClone;
}
@Override
public void write(CWExporter e) throws IOException {
super.write(e);
e.getCapsule(this).writeSavableArrayList(new ArrayList(children), "children", null);
}
@Override
public void read(CWImporter e) throws IOException {
// XXX: Load children before loading itself!!
// This prevents empty children list if controls query
// it in Control.setSpatial().
children = new SafeArrayList( Spatial.class,
e.getCapsule(this).readSavableArrayList("children", null) );
// go through children and set parent to this node
if (children != null) {
for (Spatial child : children.getArray()) {
child.parent = this;
}
}
super.read(e);
}
@Override
public void setModelBound(BoundingVolume modelBound) {
if(children != null) {
for (Spatial child : children.getArray()) {
child.setModelBound(modelBound != null ? modelBound.clone(null) : null);
}
}
}
@Override
public void updateModelBound() {
if(children != null) {
for (Spatial child : children.getArray()) {
child.updateModelBound();
}
}
}
@Override
public void depthFirstTraversal(SceneGraphVisitor visitor) {
for (Spatial child : children.getArray()) {
child.depthFirstTraversal(visitor);
}
visitor.visit(this);
}
@Override
protected void breadthFirstTraversal(SceneGraphVisitor visitor, Queue<Spatial> queue) {
queue.addAll(children);
}
}
|
|
package com.amee.domain.environment;
import com.amee.base.utils.XMLUtils;
import com.amee.domain.AMEEEntity;
import com.amee.domain.ObjectType;
import org.joda.time.DateTime;
import org.json.JSONException;
import org.json.JSONObject;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.util.Date;
/**
* @deprecated Environments have been removed from the platform and are only retained for backwards compatibility
* in representations.
*/
@Deprecated
public class Environment extends AMEEEntity implements Comparable {
/**
* A mock Environment 'entity' so that existing API responses continue to look the same.
*/
public final static Environment ENVIRONMENT =
new Environment(
2L,
"5F5887BCF726",
"AMEE",
"",
"",
10,
10,
new DateTime(2007, 7, 27, 8, 30, 44, 0).toDate(),
new DateTime(2007, 7, 27, 8, 30, 44, 0).toDate(),
"");
private String name;
private String path;
private String description;
private String owner;
private Integer itemsPerPage = 10;
private Integer itemsPerFeed = 10;
public Environment() {
super();
setName("");
setPath("");
setDescription("");
setOwner("");
setItemsPerPage(10);
setItemsPerFeed(10);
}
public Environment(String name) {
this();
setName(name);
}
public Environment(
Long id,
String uid,
String name,
String path,
String description,
Integer itemsPerPage,
Integer itemsPerFeed,
Date created,
Date modified,
String owner) {
this();
setId(id);
setUid(uid);
setName(name);
setPath(path);
setDescription(description);
setItemsPerPage(itemsPerPage);
setItemsPerFeed(itemsPerFeed);
setCreated(created);
setModified(modified);
setOwner(owner);
}
public int compareTo(Object o) {
if (this == o) return 0;
if (equals(o)) return 0;
Environment environment = (Environment) o;
return getUid().compareTo(environment.getUid());
}
public JSONObject getJSONObject() throws JSONException {
return getJSONObject(true);
}
public JSONObject getJSONObject(boolean detailed) throws JSONException {
JSONObject obj = new JSONObject();
obj.put("uid", getUid());
obj.put("name", getName());
obj.put("path", getPath());
obj.put("description", getDescription());
obj.put("owner", getOwner());
obj.put("itemsPerPage", getItemsPerPage());
obj.put("itemsPerFeed", getItemsPerFeed());
if (detailed) {
obj.put("created", getCreated().toString());
obj.put("modified", getModified().toString());
}
return obj;
}
public JSONObject getIdentityJSONObject() throws JSONException {
return XMLUtils.getIdentityJSONObject(this);
}
public Element getElement(Document document) {
return getElement(document, true);
}
public Element getElement(Document document, boolean detailed) {
Element element = document.createElement("Environment");
element.setAttribute("uid", getUid());
element.appendChild(XMLUtils.getElement(document, "Name", getName()));
element.appendChild(XMLUtils.getElement(document, "Path", getPath()));
element.appendChild(XMLUtils.getElement(document, "Description", getDescription()));
element.appendChild(XMLUtils.getElement(document, "Owner", getOwner()));
element.appendChild(XMLUtils.getElement(document, "ItemsPerPage", getItemsPerPage().toString()));
element.appendChild(XMLUtils.getElement(document, "ItemsPerFeed", getItemsPerFeed().toString()));
if (detailed) {
element.setAttribute("created", getCreated().toString());
element.setAttribute("modified", getModified().toString());
}
return element;
}
public Element getIdentityElement(Document document) {
return XMLUtils.getIdentityElement(document, this);
}
public String getName() {
return name;
}
public void setName(String name) {
if (name == null) {
name = "";
}
this.name = name;
}
public String getPath() {
return path;
}
public void setPath(String path) {
if (path == null) {
path = "";
}
this.path = path;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
if (description == null) {
description = "";
}
this.description = description;
}
public Integer getItemsPerPage() {
return itemsPerPage;
}
public void setItemsPerPage(Integer itemsPerPage) {
if (itemsPerPage != null) {
this.itemsPerPage = itemsPerPage;
}
}
public void setItemsPerPage(String itemsPerPage) {
try {
setItemsPerPage(Integer.parseInt(itemsPerPage));
} catch (NumberFormatException e) {
// swallow
}
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
if (owner == null) {
owner = "";
}
this.owner = owner;
}
public Integer getItemsPerFeed() {
return itemsPerFeed;
}
public void setItemsPerFeed(Integer itemsPerFeed) {
this.itemsPerFeed = itemsPerFeed;
}
public void setItemsPerFeed(String itemsPerFeed) {
try {
setItemsPerFeed(Integer.parseInt(itemsPerFeed));
} catch (NumberFormatException e) {
// swallow
}
}
public ObjectType getObjectType() {
return ObjectType.ENV;
}
}
|
|
package user_interface;
import interfaces.ChatEventListener;
import interfaces.ClientEventListener;
import interfaces.PeerEventListener;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.EventQueue;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
import javax.swing.border.LineBorder;
import main.Main;
import net.miginfocom.swing.MigLayout;
import network_communication.ClientThread;
import network_communication.CommunicationController;
import network_communication.ServerSideThread;
import javax.swing.ButtonGroup;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JRadioButton;
import javax.swing.JButton;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import javax.swing.SwingConstants;
import application_logic.SetMyShipsManager;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import javax.swing.event.ChangeListener;
import javax.swing.event.ChangeEvent;
import utilities.BattleShipStatus;
public class SetMyShipsFrame extends JFrame {
//za azuriranje labela kada ja POGODIM
boolean unistenSam = false;
public ImageIcon cover = new ImageIcon(getClass().getResource("/resources/cover.jpg"));
Main main = null;
public SetMyShipsFrame me = this;
SetMyShipsManager myShipsManager = new SetMyShipsManager(me);
JButton[][] buttonGameBoard = myShipsManager.initializeButtonsforGameBoard();
String myUserName = "";
String mainServerIP = "192.168.1.181";
boolean opponent1Destroyed = false;
boolean opponent2Destroyed = false;
private JPanel contentPane;
private JPanel centerPanel;
private JPanel upPanel;
private JRadioButton rdbtnOneCellShips;
private JRadioButton rdbtnTwoCellShips;
private JRadioButton rdbtnThreeCellsShips;
private JRadioButton rdbtnFiveCellShips;
private JPanel leftPanel;
public ButtonGroup groupShips;
public ButtonGroup groupHV;
public int shipSize = 1;
public char orijentation = 'H';
// /**
// * Launch the application.
// */
// public static void main(String[] args) {
// EventQueue.invokeLater(new Runnable() {
// public void run() {
// try {
// SetMyShipsFrame frame = new SetMyShipsFrame(null);
// frame.setVisible(true);
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
// });
// }
/**
* Create the frame.
*/
public SetMyShipsFrame(Main main) {
this.main = main;
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setBounds(100, 100, 485, 381);
contentPane = new JPanel();
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
setContentPane(contentPane);
contentPane.setLayout(new BorderLayout(0, 0));
contentPane.add(getCenterPanel(), BorderLayout.CENTER);
contentPane.add(getUpPanel(), BorderLayout.NORTH);
contentPane.add(getLeftPanel(), BorderLayout.WEST);
groupShips = new ButtonGroup();
groupShips.add(getRdbtnOneCellShips());
groupShips.add(rdbtnTwoCellShips);
groupShips.add(rdbtnThreeCellsShips);
groupShips.add(rdbtnFiveCellShips);
groupHV = new ButtonGroup();
groupHV.add(rdbtnHorisontal);
groupHV.add(rdbtnVertical);
}
private JRadioButton rdbtnHorisontal;
private JRadioButton rdbtnVertical;
private JLabel lblLinija;
public JLabel lblOneCellsShip;
public JLabel lblTwoCellsShip;
public JLabel lblThreeCellsShip;
public JLabel lblFrourCellsShip;
public JButton btnDone;
private JLabel label;
private JPanel getCenterPanel() {
if (centerPanel == null) {
centerPanel = new JPanel();
// centerPanel.setLayout(new BorderLayout(0, 0)); //<-
//centerPanel.setLayout(new MigLayout("", "[left][38.00][]", "[][][]"));
centerPanel.setLayout(new MigLayout("gap 0px 0px", "[][][]", "[][][][][][]"));
for(int i=0;i<10;i++){
for (int j=0;j<10;j++){
buttonGameBoard[i][j].setSize(40, 40);
centerPanel.add(buttonGameBoard[i][j], "cell "+j+" "+i+"");
}
}
}
return centerPanel;
}
private JPanel getUpPanel() {
if (upPanel == null) {
upPanel = new JPanel();
}
return upPanel;
}
private JRadioButton getRdbtnOneCellShips() {
if (rdbtnOneCellShips == null) {
rdbtnOneCellShips = new JRadioButton("One cell ships:");
rdbtnOneCellShips.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
refreshShipSize();
}
});
rdbtnOneCellShips.setSelected(true);
}
return rdbtnOneCellShips;
}
private JRadioButton getRdbtnTwoCellShips() {
if (rdbtnTwoCellShips == null) {
rdbtnTwoCellShips = new JRadioButton("Two cell ships:");
rdbtnTwoCellShips.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
refreshShipSize();
}
});
}
return rdbtnTwoCellShips;
}
private JRadioButton getRdbtnThreeCellsShips() {
if (rdbtnThreeCellsShips == null) {
rdbtnThreeCellsShips = new JRadioButton("Three cells ships:");
rdbtnThreeCellsShips.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
refreshShipSize();
}
});
}
return rdbtnThreeCellsShips;
}
private JRadioButton getRdbtnFiveCellShips() {
if (rdbtnFiveCellShips == null) {
rdbtnFiveCellShips = new JRadioButton("Five cell ships:");
rdbtnFiveCellShips.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
refreshShipSize();
}
});
}
return rdbtnFiveCellShips;
}
private JPanel getLeftPanel() {
if (leftPanel == null) {
leftPanel = new JPanel();
leftPanel.setLayout(new MigLayout("", "[][]", "[][][][][][][][][][][][]"));
leftPanel.add(getRdbtnOneCellShips(), "cell 0 1");
leftPanel.add(getLblOneCellsShip(), "cell 1 1");
leftPanel.add(getRdbtnTwoCellShips(), "cell 0 2");
leftPanel.add(getLblTwoCellsShip(), "cell 1 2");
leftPanel.add(getRdbtnThreeCellsShips(), "cell 0 3");
leftPanel.add(getLblThreeCellsShip(), "cell 1 3");
leftPanel.add(getRdbtnFiveCellShips(), "cell 0 4");
leftPanel.add(getLblFrourCellsShip(), "cell 1 4");
leftPanel.add(getLblLinija(), "cell 0 5,growx,aligny center");
leftPanel.add(getRdbtnHorisontal(), "cell 0 7");
leftPanel.add(getRdbtnVertical(), "cell 0 8");
leftPanel.add(getLabel(), "cell 0 9,growx");
leftPanel.add(getBtnDone(), "cell 0 10,growx");
}
return leftPanel;
}
private JRadioButton getRdbtnHorisontal() {
if (rdbtnHorisontal == null) {
rdbtnHorisontal = new JRadioButton("Horisontal");
rdbtnHorisontal.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
refreshOrjientation();
}
});
rdbtnHorisontal.setSelected(true);
}
return rdbtnHorisontal;
}
private JRadioButton getRdbtnVertical() {
if (rdbtnVertical == null) {
rdbtnVertical = new JRadioButton("Vertical");
rdbtnVertical.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
refreshOrjientation();
}
});
}
return rdbtnVertical;
}
private JLabel getLblLinija() {
if (lblLinija == null) {
lblLinija = new JLabel("-----------------");
lblLinija.setHorizontalTextPosition(SwingConstants.CENTER);
lblLinija.setHorizontalAlignment(SwingConstants.CENTER);
}
return lblLinija;
}
private JLabel getLblOneCellsShip() {
if (lblOneCellsShip == null) {
lblOneCellsShip = new JLabel("1");
}
return lblOneCellsShip;
}
private JLabel getLblTwoCellsShip() {
if (lblTwoCellsShip == null) {
lblTwoCellsShip = new JLabel("1");
}
return lblTwoCellsShip;
}
private JLabel getLblThreeCellsShip() {
if (lblThreeCellsShip == null) {
lblThreeCellsShip = new JLabel("2");
}
return lblThreeCellsShip;
}
private JLabel getLblFrourCellsShip() {
if (lblFrourCellsShip == null) {
lblFrourCellsShip = new JLabel("1");
}
return lblFrourCellsShip;
}
/**
* metoda postavlja vrednost int shipSize u zavisnosti koji RadioButton je checked.
*/
public void refreshShipSize(){
if(rdbtnOneCellShips.isSelected()) shipSize=1;
if(rdbtnTwoCellShips.isSelected()) shipSize=2;
if(rdbtnThreeCellsShips.isSelected()) shipSize=3;
if(rdbtnFiveCellShips.isSelected()) shipSize=5;
}
public void refreshOrjientation(){
if(rdbtnHorisontal.isSelected()) orijentation='H';
if(rdbtnVertical.isSelected()) orijentation='V';
}
/**
* Metoda proveri koji radioButton je selektovan (velicina broda) i vodi racuna o boriju postavljenih brodova.
* Koristi je setMyShipsManager
* @return int index - sifru broda
*/
public int updateLabels(){
int indeks = 0;
JRadioButton randomJB = null;
int moreShips = 0;
JLabel randomL = null;
if(rdbtnOneCellShips.isSelected()){
indeks=1;
randomJB=rdbtnOneCellShips;
randomL=lblOneCellsShip;
}
if(rdbtnTwoCellShips.isSelected()){
indeks=2;
randomJB=rdbtnTwoCellShips;
randomL=lblTwoCellsShip;;
}
if(rdbtnThreeCellsShips.isSelected()){
randomJB=rdbtnThreeCellsShips;
randomL=lblThreeCellsShip;
if(randomL.getText().equals("2")) indeks=3;
else indeks=4;//kada se drugi put bira brod tipa 3 celije
}
if(rdbtnFiveCellShips.isSelected()){
indeks=5;
randomJB=rdbtnFiveCellShips;
randomL=lblFrourCellsShip;
}
moreShips=Integer.parseInt(randomL.getText());
if(moreShips > 0){
moreShips--;
randomL.setText(""+moreShips);
if(moreShips==0){
randomJB.setEnabled(false);
randomJB.setSelected(false);
}
dopustiDoneAkoTreba();
return indeks;
}else{
randomJB.setSelected(false);
randomJB.setEnabled(false);
dopustiDoneAkoTreba();
return -1;}
}
//------------------------------------------------
private JButton getBtnDone() {
if (btnDone == null) {
btnDone = new JButton("DONE");
btnDone.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
// prebacujem logicalMatrix iz setMyShipsManager u setMyShipFrame
main.mainGui.logicMatrixMine = myShipsManager.logicMatrix;
main.mainGui.popuniniMojePoljeBrodicima();
//USLOVI DA LI SU SVI BRODICI POSTAVLJENI
main.mainGui.labelMe.setText(myUserName);
sveKrene();
main.mainGui.setVisible(true);
setVisible(false);
}
});
btnDone.setHorizontalTextPosition(SwingConstants.CENTER);
}
return btnDone;
}
private JLabel getLabel() {
if (label == null) {
label = new JLabel("-----------------");
label.setHorizontalTextPosition(SwingConstants.CENTER);
label.setHorizontalAlignment(SwingConstants.CENTER);
}
return label;
}
//
BlockingQueue<String> queue;
CommunicationController controler;
ServerSideThread myServer;
ClientThread myClient;
public void setUserAndServerIP(String userName, String serverIP) {
this.myUserName = userName;
this.mainServerIP = serverIP;
main.mainGui.myUserName=userName;
}
public void sveKrene(){
queue = new ArrayBlockingQueue<String>(10);
controler = new CommunicationController();
myServer = new ServerSideThread(queue,0,myUserName);
myClient = new ClientThread(controler, queue, "192.168.1.181", 9080);
myServer.setChatEventListener(new ChatEventListener() {
@Override
public void onChatMessageReceived(String username, String message) {
// TODO Auto-generated method stub
main.mainGui.textPaneChat.setText(username+">: "+message+'\n'+main.mainGui.textPaneChat.getText());
}
});
myClient.setClientEventListener(new ClientEventListener() {
@Override
public void onWait(String message) {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.textPane.setText("Prijavljeni ste na glavni server ali se ceka jos igraca.\n" + main.mainGui.textPane.getText());
}
});
}
@Override
public void onStart(String message) {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.textPane.setText("Postoji dovoljan broj igraca na serveru.\n" + main.mainGui.textPane.getText());
}
});
}
@Override
public void onBye(String message) {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.textPane.setText("Sisao si sa servera.\n" + main.mainGui.textPane.getText());
}
});
}
});
myClient.setPeerEventListener(new PeerEventListener() {
@Override
public void onSynchronized() {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.textPane.setText("Sinhronizovani !\n" + main.mainGui.textPane.getText());
}
});
}
@Override
public void onRnd(final boolean myTurn, final int myRND, final int myIndex,
final String peer1Username, final int peer1Index, final String peer2Username,
final int peer2Index) {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.labelOpponent1.setText(peer1Username);
main.mainGui.labelOpponent2.setText(peer2Username);
if(myTurn){
main.mainGui.seaFieldOpponent1.setEnableToAll(true);
main.mainGui.seaFieldOpponent2.setEnableToAll(true);
main.mainGui.textPane.setText("Ti si na potezu.\n" + main.mainGui.textPane.getText());
}
else{
main.mainGui.seaFieldOpponent1.setEnableToAll(false);
main.mainGui.seaFieldOpponent2.setEnableToAll(false);
}
main.mainGui.praviRedosledIgranja(peer1Username,peer2Username,peer1Index,peer2Index,myIndex);
//na pocetku zatamni ko igra
// main.mainGui.zatamniLabele();
}
});
}
@Override
public void onNext(final String username, final boolean myTurn) {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.textPane.setText("Igrac "+ username +" je propustio potez ili je diskonektovan.\n" + main.mainGui.textPane.getText());
//main.mainGui.koJeNaPotezu();
if(myTurn){
if(!opponent1Destroyed) main.mainGui.seaFieldOpponent1.setEnableToAll(true);
if(!opponent2Destroyed) main.mainGui.seaFieldOpponent2.setEnableToAll(true);
main.mainGui.textPane.setText("Ti si na potezu.\n" + main.mainGui.textPane.getText());
}
}
});
}
@Override
public void onBye() {
EventQueue.invokeLater(new Runnable() {
public void run() {
main.mainGui.textPane.setText("Kraj igre. ??? \n" + main.mainGui.textPane.getText());
}
});
}
//Mene gadja NEKO, proverim moju logigicku matricu i vratim status poteza pogledati interface BattleShipStatus
@Override
public int onAttacked(int coorI, int coorJ) {
int sifra;
sifra = myShipsManager.gameBoardMask.takeAHit(coorI, coorJ);
main.mainGui.azurirajMojaPolja(sifra,coorI,coorJ);
if(sifra==BattleShipStatus.FLEET_DESTROYED){
JOptionPane.showMessageDialog(centerPanel,"Unisten si !");
unistenSam = true;
}
return sifra;
}
// onaj ko je gadjan, njegove kordinate i status
//ako je myTurn tru znaci otkljucavam sva polja
@Override
public void onAttackResponse(String username, int coorI, int coorJ,
int status, boolean myTurn) {
if(!username.equals(myUserName)){
if(username.equals(main.mainGui.labelOpponent1.getText())){
main.mainGui.azurirajOpponentsPolja(status,coorI,coorJ,1);
}
else{//opponent 2 je
if(username.equals(main.mainGui.labelOpponent2.getText()))
main.mainGui.azurirajOpponentsPolja(status,coorI,coorJ,2);
}
}
if(status == BattleShipStatus.SHIP_SUNKED) {
main.mainGui.textPane.setText("Igracu "+username+" je potopljen brod!\n"+main.mainGui.textPane.getText());
}
// if(myTurn && unistenSam) main.mainGui.koJeNaPotezu();
//if(status==BattleShipStatus.SHIP_MISSED) main.mainGui.koJeNaPotezu();
if(status==BattleShipStatus.FLEET_DESTROYED) {
if(main.mainGui.labelOpponent1.getText().equals(username)) opponent1Destroyed=true;
if(main.mainGui.labelOpponent2.getText().equals(username)) opponent2Destroyed=true;
if(opponent1Destroyed && opponent2Destroyed) main.mainGui.textPane.setText("POBEDIO SI !\n" + main.mainGui.textPane.getText());
}
if(myTurn){
if(!opponent1Destroyed) main.mainGui.seaFieldOpponent1.setEnableToAll(true);
if(!opponent2Destroyed) main.mainGui.seaFieldOpponent2.setEnableToAll(true);
main.mainGui.textPane.setText("Ti si na potezu.\n" + main.mainGui.textPane.getText() );
}
//kada neko izgubi
}
});
Thread server = new Thread(myServer);
Thread client = new Thread(myClient);
server.start();
client.start();
}
public void dopustiDoneAkoTreba(){
if(lblOneCellsShip.getText().equals("0") &&
lblTwoCellsShip.getText().equals("0") &&
lblThreeCellsShip.getText().equals("0") &&
lblFrourCellsShip.getText().equals("0") )
btnDone.setEnabled(true);
}
}
|
|
package think.rpgitems.power.impl;
import org.bukkit.NamespacedKey;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityShootBowEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.player.PlayerSwapHandItemsEvent;
import org.bukkit.event.player.PlayerToggleSneakEvent;
import org.bukkit.event.player.PlayerToggleSprintEvent;
import org.bukkit.inventory.ItemStack;
import think.rpgitems.power.*;
import static think.rpgitems.power.Utils.checkCooldownByString;
/**
* Power dummy.
* <p>
* Won't do anything but give you fine control.
* </p>
*/
@PowerMeta(defaultTrigger = "RIGHT_CLICK", generalInterface = {PowerLivingEntity.class, PowerPlain.class})
public class PowerDummy extends BasePower implements PowerHit, PowerHitTaken, PowerLeftClick, PowerRightClick, PowerOffhandClick, PowerProjectileHit, PowerSneak, PowerSneaking, PowerSprint, PowerOffhandItem, PowerMainhandItem, PowerTick, PowerPlain, PowerLivingEntity, PowerHurt, PowerBowShoot {
/**
* Cooldown time of this power
*/
@Property
public long cooldown = 0;
/**
* Cost of this power
*/
@Property
public int cost = 0;
@Property
public boolean checkDurabilityBound = true;
/**
* Display message on item
*/
@Property
public String display;
/**
* Whether enchantments can determine cost
*/
@Property
public boolean costByEnchantment = false;
/**
* If reversed, enchantment reduces the cost instead of increasing
*/
@Property
public boolean doEnchReduceCost = false;
/**
* Percentage of cost per level of enchantment
*/
@Property
public double enchCostPercentage = 6;
/**
* Type of enchantment that reduces cost
*/
@Property
public String enchantmentType = "unbreaking";
/**
* Whether cost by damage
*/
@Property
public boolean costByDamage = false;
/**
* Whether to require hurt by entity for HURT trigger
*/
@Property
public boolean requireHurtByEntity = true;
@Property
public String cooldownKey = "dummy";
@Property
public TriggerResult successResult = TriggerResult.OK;
@Property
public TriggerResult costResult = TriggerResult.COST;
@Property
public TriggerResult cooldownResult = TriggerResult.COOLDOWN;
@Property
public boolean showCDWarning = true;
@Override
public PowerResult<Void> fire(Player player, ItemStack stack) {
return fire(player, stack, null, null);
}
@Override
public PowerResult<Void> fire(Player player, ItemStack stack, LivingEntity entity, Double damage) {
if (!checkCooldownByString(this, player, cooldownKey, cooldown, showCDWarning, false))
return PowerResult.of(cooldownResult);
int damageCost = cost;
if (damage != null && costByDamage) {
if (damage < 0) damage = 0d;
damageCost = (int) Math.round(damage * cost / 100d);
}
int finalCost = damageCost;
if (costByEnchantment) {
Enchantment ench = Enchantment.getByKey(NamespacedKey.minecraft(enchantmentType));
if (ench == null) return PowerResult.fail();
double costPercentage = (stack.getEnchantmentLevel(ench) * enchCostPercentage / 100d);
if (finalCost < 0){
finalCost = (int) Math.round(Math.random() <= costPercentage ? Math.floor(damageCost * costPercentage) : Math.ceil(finalCost * costPercentage));
} else {
finalCost = (int) Math.round(Math.random() <= costPercentage ? Math.ceil(damageCost * costPercentage) : Math.floor(finalCost * costPercentage));
}
if (doEnchReduceCost) finalCost = damageCost - finalCost;
}
if (!getItem().consumeDurability(stack, finalCost, checkDurabilityBound)) return PowerResult.of(costResult);
return PowerResult.of(successResult);
}
@Override
public PowerResult<Void> leftClick(Player player, ItemStack stack, PlayerInteractEvent event) {
return fire(player, stack);
}
@Override
public PowerResult<Void> rightClick(Player player, ItemStack stack, PlayerInteractEvent event) {
return fire(player, stack);
}
@Override
public String getName() {
return "dummy";
}
@Override
public String displayText() {
return display;
}
@Override
public PowerResult<Double> hit(Player player, ItemStack stack, LivingEntity entity, double damage, EntityDamageByEntityEvent event) {
return fire(player, stack, entity, damage).with(damage);
}
@Override
public PowerResult<Double> takeHit(Player target, ItemStack stack, double damage, EntityDamageEvent event) {
if (!requireHurtByEntity || event instanceof EntityDamageByEntityEvent) {
return fire(target, stack, null, damage).with(damage);
}
return PowerResult.noop();
}
@Override
public PowerResult<Void> hurt(Player target, ItemStack stack, EntityDamageEvent event) {
if (!requireHurtByEntity || event instanceof EntityDamageByEntityEvent) {
return fire(target, stack, null, event.getDamage());
}
return PowerResult.noop();
}
@Override
public PowerResult<Void> offhandClick(Player player, ItemStack stack, PlayerInteractEvent event) {
return fire(player, stack);
}
@Override
public PowerResult<Void> projectileHit(Player player, ItemStack stack, ProjectileHitEvent event) {
return fire(player, stack);
}
@Override
public PowerResult<Void> sneak(Player player, ItemStack stack, PlayerToggleSneakEvent event) {
return fire(player, stack);
}
@Override
public PowerResult<Void> sprint(Player player, ItemStack stack, PlayerToggleSprintEvent event) {
return fire(player, stack);
}
@Override
public PowerResult<Boolean> swapToMainhand(Player player, ItemStack stack, PlayerSwapHandItemsEvent event) {
return fire(player, stack).with(true);
}
@Override
public PowerResult<Boolean> swapToOffhand(Player player, ItemStack stack, PlayerSwapHandItemsEvent event) {
return fire(player, stack).with(true);
}
@Override
public PowerResult<Void> tick(Player player, ItemStack stack) {
return fire(player, stack);
}
@Override
public PowerResult<Void> sneaking(Player player, ItemStack stack) {
return fire(player, stack);
}
@Override
public void init(ConfigurationSection section) {
if (section.isBoolean("ignoreDurabilityBound")) {
checkDurabilityBound = section.isBoolean("ignoreDurabilityBound");
}
super.init(section);
}
@Override
public PowerResult<Float> bowShoot(Player player, ItemStack itemStack, EntityShootBowEvent e) {
return fire(player, itemStack).with(e.getForce());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.