gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Erik Ramfelt, Xavier Le Vourch, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.tasks.junit; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.Element; import org.dom4j.io.SAXReader; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; /** * Result of one test suite. * * <p> * The notion of "test suite" is rather arbitrary in JUnit ant task. * It's basically one invocation of junit. * * <p> * This object is really only used as a part of the persisted * object tree. * * @author Kohsuke Kawaguchi */ @ExportedBean public final class SuiteResult implements Serializable { private final String file; private final String name; private final String stdout; private final String stderr; private float duration; /** * The 'timestamp' attribute of the test suite. * AFAICT, this is not a required attribute in XML, so the value may be null. */ private String timestamp; /** * All test cases. */ private final List<CaseResult> cases = new ArrayList<CaseResult>(); private transient TestResult parent; SuiteResult(String name, String stdout, String stderr) { this.name = name; this.stderr = stderr; this.stdout = stdout; this.file = null; } /** * Parses the JUnit XML file into {@link SuiteResult}s. * This method returns a collection, as a single XML may have multiple &lt;testsuite> * elements wrapped into the top-level &lt;testsuites>. */ static List<SuiteResult> parse(File xmlReport) throws DocumentException { List<SuiteResult> r = new ArrayList<SuiteResult>(); // parse into DOM SAXReader saxReader = new SAXReader(); // install EntityResolver for resolving DTDs, which are in files created by TestNG. // (see https://hudson.dev.java.net/servlets/ReadMsg?listName=users&msgNo=5530) XMLEntityResolver resolver = new XMLEntityResolver(); saxReader.setEntityResolver(resolver); Document result = saxReader.read(xmlReport); Element root = result.getRootElement(); if(root.getName().equals("testsuites")) { // multi-suite file for (Element suite : (List<Element>)root.elements("testsuite")) r.add(new SuiteResult(xmlReport,suite)); } else { // single suite file r.add(new SuiteResult(xmlReport,root)); } return r; } private SuiteResult(File xmlReport, Element suite) throws DocumentException { this.file = xmlReport.getAbsolutePath(); String name = suite.attributeValue("name"); if(name==null) // some user reported that name is null in their environment. // see http://www.nabble.com/Unexpected-Null-Pointer-Exception-in-Hudson-1.131-tf4314802.html name = '('+xmlReport.getName()+')'; else { String pkg = suite.attributeValue("package"); if(pkg!=null&& pkg.length()>0) name=pkg+'.'+name; } this.name = TestObject.safe(name); this.timestamp = suite.attributeValue("timestamp"); stdout = suite.elementText("system-out"); stderr = suite.elementText("system-err"); Element ex = suite.element("error"); if(ex!=null) { // according to junit-noframes.xsl l.229, this happens when the test class failed to load addCase(new CaseResult(this,suite,"<init>")); } for (Element e : (List<Element>)suite.elements("testcase")) { // https://hudson.dev.java.net/issues/show_bug.cgi?id=1233 indicates that // when <testsuites> is present, we are better off using @classname on the // individual testcase class. // https://hudson.dev.java.net/issues/show_bug.cgi?id=1463 indicates that // @classname may not exist in individual testcase elements. We now // also test if the testsuite element has a package name that can be used // as the class name instead of the file name which is default. String classname = e.attributeValue("classname"); if (classname == null) { classname = suite.attributeValue("name"); } // https://hudson.dev.java.net/issues/show_bug.cgi?id=1233 and // http://www.nabble.com/difference-in-junit-publisher-and-ant-junitreport-tf4308604.html#a12265700 // are at odds with each other --- when both are present, // one wants to use @name from <testsuite>, // the other wants to use @classname from <testcase>. addCase(new CaseResult(this, e, classname)); } } /*package*/ void addCase(CaseResult cr) { cases.add(cr); duration += cr.getDuration(); } @Exported public String getName() { return name; } @Exported public float getDuration() { return duration; } /** * The stdout of this test. * * @since 1.281 * @see CaseResult#getStdout() */ @Exported public String getStdout() { return stdout; } /** * The stderr of this test. * * @since 1.281 * @see CaseResult#getStderr() */ @Exported public String getStderr() { return stderr; } /** * The absolute path to the original test report. OS-dependent. */ public String getFile() { return file; } public TestResult getParent() { return parent; } @Exported public String getTimestamp() { return timestamp; } @Exported(inline=true) public List<CaseResult> getCases() { return cases; } public SuiteResult getPreviousResult() { TestResult pr = parent.getPreviousResult(); if(pr==null) return null; return pr.getSuite(name); } /** * Returns the {@link CaseResult} whose {@link CaseResult#getName()} * is the same as the given string. * * <p> * Note that test name needs not be unique. */ public CaseResult getCase(String name) { for (CaseResult c : cases) { if(c.getName().equals(name)) return c; } return null; } public Set<String> getClassNames() { Set<String> result = new HashSet<String>(); for (CaseResult c : cases) { result.add(c.getClassName()); } return result; } /*package*/ boolean freeze(TestResult owner) { if(this.parent!=null) return false; // already frozen this.parent = owner; for (CaseResult c : cases) c.freeze(this); return true; } private static final long serialVersionUID = 1L; }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.testframework.sm.runner; import com.intellij.execution.process.ProcessOutputTypes; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.testframework.Printable; import com.intellij.execution.testframework.Printer; import com.intellij.execution.testframework.TestConsoleProperties; import com.intellij.execution.testframework.sm.runner.events.*; import com.intellij.execution.testframework.sm.runner.ui.MockPrinter; import com.intellij.execution.testframework.sm.runner.ui.SMTRunnerConsoleView; import com.intellij.execution.testframework.sm.runner.ui.SMTestRunnerResultsForm; import com.intellij.execution.testframework.ui.TestsOutputConsolePrinter; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.openapi.util.Disposer; import org.jetbrains.annotations.NotNull; /** * @author Roman Chernyatchik */ public class SMTRunnerConsoleTest extends BaseSMTRunnerTestCase { private MyConsoleView myConsole; private GeneralToSMTRunnerEventsConvertor myEventsProcessor; private MockPrinter myMockResettablePrinter; private SMTestProxy myRootSuite; private SMTestRunnerResultsForm myResultsViewer; private class MyConsoleView extends SMTRunnerConsoleView { private final TestsOutputConsolePrinter myTestsOutputConsolePrinter; private MyConsoleView(final TestConsoleProperties consoleProperties, final ExecutionEnvironment environment) { super(consoleProperties); myTestsOutputConsolePrinter = new TestsOutputConsolePrinter(MyConsoleView.this, consoleProperties, null) { @Override public void print(final String text, final ConsoleViewContentType contentType) { myMockResettablePrinter.print(text, contentType); } }; } @Override public TestsOutputConsolePrinter getPrinter() { return myTestsOutputConsolePrinter; } } @Override protected void setUp() throws Exception { super.setUp(); final TestConsoleProperties consoleProperties = createConsoleProperties(); final ExecutionEnvironment environment = new ExecutionEnvironment(); myMockResettablePrinter = new MockPrinter(true); myConsole = new MyConsoleView(consoleProperties, environment); myConsole.initUI(); myResultsViewer = myConsole.getResultsViewer(); myRootSuite = myResultsViewer.getTestsRootNode(); myEventsProcessor = new GeneralToSMTRunnerEventsConvertor(consoleProperties.getProject(), myResultsViewer.getTestsRootNode(), "SMTestFramework"); myEventsProcessor.onStartTesting(); } @Override protected void tearDown() throws Exception { Disposer.dispose(myEventsProcessor); Disposer.dispose(myConsole); super.tearDown(); } public void testPrintTestProxy() { mySimpleTest.setPrinter(myMockResettablePrinter); mySimpleTest.addLast(new Printable() { @Override public void printOn(final Printer printer) { printer.print("std out", ConsoleViewContentType.NORMAL_OUTPUT); printer.print("std err", ConsoleViewContentType.ERROR_OUTPUT); printer.print("std sys", ConsoleViewContentType.SYSTEM_OUTPUT); } }); assertAllOutputs(myMockResettablePrinter, "std out", "std err", "std sys"); } public void testAddStdOut() { mySimpleTest.setPrinter(myMockResettablePrinter); mySimpleTest.addStdOutput("one", ProcessOutputTypes.STDOUT); assertStdOutput(myMockResettablePrinter, "one"); mySimpleTest.addStdErr("two"); assertStdErr(myMockResettablePrinter, "two"); mySimpleTest.addStdOutput("one", ProcessOutputTypes.STDOUT); mySimpleTest.addStdOutput("one", ProcessOutputTypes.STDOUT); mySimpleTest.addStdErr("two"); mySimpleTest.addStdErr("two"); assertAllOutputs(myMockResettablePrinter, "oneone", "twotwo", ""); } public void testAddStdSys() { mySimpleTest.setPrinter(myMockResettablePrinter); mySimpleTest.addSystemOutput("sys"); assertAllOutputs(myMockResettablePrinter, "", "", "sys"); } public void testPrintTestProxy_Order() { mySimpleTest.setPrinter(myMockResettablePrinter); sendToTestProxyStdOut(mySimpleTest, "first "); sendToTestProxyStdOut(mySimpleTest, "second"); assertStdOutput(myMockResettablePrinter, "first second"); } public void testSetPrintListener_ForExistingChildren() { mySuite.addChild(mySimpleTest); mySuite.setPrinter(myMockResettablePrinter); sendToTestProxyStdOut(mySimpleTest, "child "); sendToTestProxyStdOut(mySuite, "root"); assertStdOutput(myMockResettablePrinter, "child root"); } public void testSetPrintListener_OnNewChild() { mySuite.setPrinter(myMockResettablePrinter); sendToTestProxyStdOut(mySuite, "root "); sendToTestProxyStdOut(mySimpleTest, "[child old msg] "); mySuite.addChild(mySimpleTest); sendToTestProxyStdOut(mySuite, "{child added} "); sendToTestProxyStdOut(mySimpleTest, "[child new msg]"); // printer for parent have been already set, thus new // child should immediately print himself on this printer assertStdOutput(myMockResettablePrinter, "root [child old msg] {child added} [child new msg]"); } public void testDeferredPrint() { sendToTestProxyStdOut(mySimpleTest, "one "); sendToTestProxyStdOut(mySimpleTest, "two "); sendToTestProxyStdOut(mySimpleTest, "three"); myMockResettablePrinter.onNewAvailable(mySimpleTest); assertStdOutput(myMockResettablePrinter, "one two three"); myMockResettablePrinter.resetIfNecessary(); assertFalse(myMockResettablePrinter.hasPrinted()); myMockResettablePrinter.onNewAvailable(mySimpleTest); assertStdOutput(myMockResettablePrinter, "one two three"); } public void testProcessor_OnTestStdOutput() { startTestWithPrinter("my_test"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout2", true)); assertStdOutput(myMockResettablePrinter, "stdout1 stdout2"); } public void testProcessor_OnTestStdErr() { startTestWithPrinter("my_test"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr2", false)); assertStdErr(myMockResettablePrinter, "stderr1 stderr2"); } public void testProcessor_OnTestMixedStd() { startTestWithPrinter("my_test"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout2", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr2", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 stdout2", "stderr1 stderr2", ""); } public void testProcessor_OnFailure() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test", "error msg", "method1:1\nmethod2:2", false, null, null)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "\nerror msg\nmethod1:1\nmethod2:2\nstderr1 ", ""); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); //other output order final SMTestProxy myTest2 = startTestWithPrinter("my_test2"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stderr1 ", false)); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test2", "error msg", "method1:1\nmethod2:2", false, null, null)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); final MockPrinter mockPrinter2 = new MockPrinter(true); mockPrinter2.onNewAvailable(myTest2); assertAllOutputs(mockPrinter2, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); } public void testProcessor_OnFailure_EmptyStacktrace() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test", "error msg", "\n\n", false, null, null)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "\nerror msg\nstderr1 ", ""); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "stderr1 \nerror msg\n", ""); } public void testProcessor_OnFailure_Comparision_Strings() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test", "error msg", "method1:1\nmethod2:2", false, "actual", "expected")); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, // std out "stdout1 ", // std err "\n" + "error msg\n" + "expected\n" + "actual\n" + " \n" + "\n" + "method1:1\n" + "method2:2\n" + "stderr1 ", // std sys "Expected :Actual :"); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, // std out "stdout1 ", // std err "stderr1 \nerror msg\n" + "expected\n" + "actual\n" + " \n" + "\n" + "method1:1\nmethod2:2\n", // std sys "Expected :Actual :"); } public void testProcessor_OnFailure_Comparision_MultilineTexts() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test", "error msg", "method1:1\nmethod2:2", false, "this is:\nactual", "this is:\nexpected")); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "\nerror msg \n" + "\n" + "method1:1\n" + "method2:2\n" + "stderr1 ", ""); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "stderr1 \n" + "error msg \n" + "\n" + "method1:1\n" + "method2:2\n", ""); } public void testProcessor_OnError() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test", "error msg", "method1:1\nmethod2:2", true, null, null)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "\nerror msg\nmethod1:1\nmethod2:2\nstderr1 ", ""); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); //other output order final SMTestProxy myTest2 = startTestWithPrinter("my_test2"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stderr1 ", false)); myEventsProcessor.onTestFailure(new TestFailedEvent("my_test2", "error msg", "method1:1\nmethod2:2", true, null, null)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); final MockPrinter mockPrinter2 = new MockPrinter(true); mockPrinter2.onNewAvailable(myTest2); assertAllOutputs(mockPrinter2, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); } public void testProcessor_OnErrorMsg() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onError("error msg", "method1:1\nmethod2:2", true); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "\nerror msg\nmethod1:1\nmethod2:2\nstderr1 ", ""); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "\n" + "error msg\n" + "method1:1\n" + "method2:2\n" + "stderr1 ", ""); myEventsProcessor.onTestFinished(new TestFinishedEvent("my_test", 1l)); myTest1.setFinished(); //other output order final SMTestProxy myTest2 = startTestWithPrinter("my_test2"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stderr1 ", false)); myEventsProcessor.onError("error msg", "method1:1\nmethod2:2", true); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); final MockPrinter mockPrinter2 = new MockPrinter(true); mockPrinter2.onNewAvailable(myTest2); assertAllOutputs(mockPrinter2, "stdout1 ", "stderr1 \nerror msg\nmethod1:1\nmethod2:2\n", ""); } public void testProcessor_Suite_OnErrorMsg() { myEventsProcessor.onError("error msg:root", "method1:1\nmethod2:2", true); myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("suite", null)); final SMTestProxy suite = myEventsProcessor.getCurrentSuite(); suite.setPrinter(myMockResettablePrinter); myEventsProcessor.onError("error msg:suite", "method1:1\nmethod2:2", true); assertAllOutputs(myMockResettablePrinter, "", "\n" + "error msg:suite\n" + "method1:1\n" + "method2:2\n", ""); final MockPrinter mockSuitePrinter = new MockPrinter(true); mockSuitePrinter.onNewAvailable(suite); assertAllOutputs(mockSuitePrinter, "", "\n" + "error msg:suite\n" + "method1:1\n" + "method2:2\n", ""); final MockPrinter mockRootSuitePrinter = new MockPrinter(true); mockRootSuitePrinter.onNewAvailable(myRootSuite); assertAllOutputs(mockRootSuitePrinter, "", "\n" + "error msg:root\n" + "method1:1\n" + "method2:2\n" + "\n" + "error msg:suite\n" + "method1:1\n" + "method2:2\n", ""); } public void testProcessor_OnIgnored() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestIgnored(new TestIgnoredEvent("my_test", "ignored msg", null)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "stderr1 ", "\nignored msg"); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "stderr1 ", "\nignored msg"); //other output order final SMTestProxy myTest2 = startTestWithPrinter("my_test2"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stderr1 ", false)); myEventsProcessor.onTestIgnored(new TestIgnoredEvent("my_test2", "ignored msg", null)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "stderr1 ", "\nignored msg"); final MockPrinter mockPrinter2 = new MockPrinter(true); mockPrinter2.onNewAvailable(myTest2); assertAllOutputs(mockPrinter2, "stdout1 ", "stderr1 ", "\nignored msg"); } public void testProcessor_OnIgnored_WithStacktrace() { final SMTestProxy myTest1 = startTestWithPrinter("my_test"); myEventsProcessor.onTestIgnored(new TestIgnoredEvent("my_test", "ignored2 msg", "method1:1\nmethod2:2")); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test", "stderr1 ", false)); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "\nmethod1:1\nmethod2:2\nstderr1 ", "\nignored2 msg"); final MockPrinter mockPrinter1 = new MockPrinter(true); mockPrinter1.onNewAvailable(myTest1); assertAllOutputs(mockPrinter1, "stdout1 ", "stderr1 \nmethod1:1\nmethod2:2\n", "\nignored2 msg"); //other output order final SMTestProxy myTest2 = startTestWithPrinter("my_test2"); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stdout1 ", true)); myEventsProcessor.onTestOutput(new TestOutputEvent("my_test2", "stderr1 ", false)); myEventsProcessor.onTestIgnored(new TestIgnoredEvent("my_test2", "ignored msg", "method1:1\nmethod2:2")); assertAllOutputs(myMockResettablePrinter, "stdout1 ", "stderr1 \nmethod1:1\nmethod2:2\n", "\nignored msg"); final MockPrinter mockPrinter2 = new MockPrinter(true); mockPrinter2.onNewAvailable(myTest2); assertAllOutputs(mockPrinter2, "stdout1 ", "stderr1 \nmethod1:1\nmethod2:2\n", "\nignored msg"); } public void testOnUncapturedOutput_BeforeProcessStarted() { myRootSuite.setPrinter(myMockResettablePrinter); assertOnUncapturedOutput(); } public void testOnUncapturedOutput_BeforeFirstSuiteStarted() { myRootSuite.setPrinter(myMockResettablePrinter); myEventsProcessor.onStartTesting(); assertOnUncapturedOutput(); } public void testOnUncapturedOutput_SomeSuite() { myEventsProcessor.onStartTesting(); myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("my suite", null)); final SMTestProxy mySuite = myEventsProcessor.getCurrentSuite(); assertTrue(mySuite != myRootSuite); mySuite.setPrinter(myMockResettablePrinter); assertOnUncapturedOutput(); } public void testOnUncapturedOutput_SomeTest() { myEventsProcessor.onStartTesting(); myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("my suite", null)); startTestWithPrinter("my test"); assertOnUncapturedOutput(); } public void assertOnUncapturedOutput() { myEventsProcessor.onUncapturedOutput("stdout", ProcessOutputTypes.STDOUT); myEventsProcessor.onUncapturedOutput("stderr", ProcessOutputTypes.STDERR); myEventsProcessor.onUncapturedOutput("system", ProcessOutputTypes.SYSTEM); assertAllOutputs(myMockResettablePrinter, "stdout", "stderr", "system"); } public static void assertStdOutput(final MockPrinter printer, final String out) { assertAllOutputs(printer, out, "", ""); } public static void assertStdErr(final MockPrinter printer, final String out) { assertAllOutputs(printer, "", out, ""); } public static void assertAllOutputs(final MockPrinter printer, final String out, final String err, final String sys) { assertTrue(printer.hasPrinted()); assertEquals(out, printer.getStdOut()); assertEquals(err, printer.getStdErr()); assertEquals(sys, printer.getStdSys()); printer.resetIfNecessary(); } public void testStopCollectingOutput() { myResultsViewer.selectAndNotify(myResultsViewer.getTestsRootNode()); //noinspection NullableProblems myConsole.attachToProcess(null); myEventsProcessor.onStartTesting(); myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("suite", null)); final SMTestProxy suite = myEventsProcessor.getCurrentSuite(); myEventsProcessor.onSuiteFinished(new TestSuiteFinishedEvent("suite")); myEventsProcessor.onUncapturedOutput("preved", ProcessOutputTypes.STDOUT); myEventsProcessor.onFinishTesting(); //myResultsViewer.selectAndNotify(suite); //the string above doesn't update tree immediately so we should simulate update myConsole.getPrinter().updateOnTestSelected(suite); //Lets reset printer /clear console/ before selection changed to //get after selection event only actual ouptut myMockResettablePrinter.resetIfNecessary(); //myResultsViewer.selectAndNotify(myResultsViewer.getTestsRootNode()); //the string above doesn't update tree immediately so we should simulate update myConsole.getPrinter().updateOnTestSelected(myResultsViewer.getTestsRootNode()); assertAllOutputs(myMockResettablePrinter, "preved", "","Empty test suite.\n"); } @NotNull private SMTestProxy startTestWithPrinter(final String testName) { myEventsProcessor.onTestStarted(new TestStartedEvent(testName, null)); final SMTestProxy proxy = myEventsProcessor.getProxyByFullTestName(myEventsProcessor.getFullTestName(testName)); assertNotNull(proxy); proxy.setPrinter(myMockResettablePrinter); return proxy; } private static void sendToTestProxyStdOut(final SMTestProxy proxy, final String text) { proxy.addLast(new Printable() { @Override public void printOn(final Printer printer) { printer.print(text, ConsoleViewContentType.NORMAL_OUTPUT); } }); } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.client.objecteditor; import java.awt.Container; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.swing.JComboBox; import javax.swing.JComponent; import org.apache.axis.types.NonNegativeInteger; import org.fcrepo.client.Administrator; import org.fcrepo.client.objecteditor.types.DatastreamInputSpec; import org.fcrepo.client.objecteditor.types.MethodDefinition; import org.fcrepo.server.types.gen.ComparisonOperator; import org.fcrepo.server.types.gen.Condition; import org.fcrepo.server.types.gen.FieldSearchQuery; import org.fcrepo.server.types.gen.FieldSearchResult; import org.fcrepo.server.types.gen.ObjectFields; /** * Some static utility methods that might be needed across several classes in * this package. */ public abstract class Util { public static Map getSDefLabelMap() throws IOException { try { HashMap labelMap = new HashMap(); FieldSearchQuery query = new FieldSearchQuery(); Condition[] conditions = new Condition[1]; conditions[0] = new Condition(); conditions[0].setProperty("fType"); conditions[0].setOperator(ComparisonOperator.fromValue("eq")); conditions[0].setValue("D"); query.setConditions(conditions); String[] fields = new String[] {"pid", "label"}; if (true) { /* FIXME: find some other way to do this */ throw new UnsupportedOperationException("This operation uses obsolete field search semantics"); } FieldSearchResult result = Administrator.APIA .findObjects(fields, new NonNegativeInteger("50"), query); while (result != null) { ObjectFields[] resultList = result.getResultList(); for (ObjectFields element : resultList) { labelMap.put(element.getPid(), element.getLabel()); } if (result.getListSession() != null) { result = Administrator.APIA.resumeFindObjects(result .getListSession().getToken()); } else { result = null; } } return labelMap; } catch (Exception e) { throw new IOException(e.getMessage()); } } /** * Get a map of pid-to-label of service deployments that implement the * service defined by the indicated sDef. */ public static Map getDeploymentLabelMap(String sDefPID) throws IOException { try { HashMap labelMap = new HashMap(); FieldSearchQuery query = new FieldSearchQuery(); Condition[] conditions = new Condition[2]; conditions[0] = new Condition(); conditions[0].setProperty("fType"); conditions[0].setOperator(ComparisonOperator.fromValue("eq")); conditions[0].setValue("M"); conditions[1] = new Condition(); conditions[1].setProperty("bDef"); conditions[1].setOperator(ComparisonOperator.fromValue("has")); conditions[1].setValue(sDefPID); query.setConditions(conditions); String[] fields = new String[] {"pid", "label"}; if (true) { /* * FIXME: find some other way to do this, if we care. it uses * fType and bDef, which are no longer in field search, */ throw new UnsupportedOperationException("This operation uses obsolete field search semantics"); } FieldSearchResult result = Administrator.APIA .findObjects(fields, new NonNegativeInteger("50"), query); while (result != null) { ObjectFields[] resultList = result.getResultList(); for (ObjectFields element : resultList) { labelMap.put(element.getPid(), element.getLabel()); } if (result.getListSession() != null) { result = Administrator.APIA.resumeFindObjects(result .getListSession().getToken()); } else { result = null; } } return labelMap; } catch (Exception e) { throw new IOException(e.getMessage()); } } public static Map getInputSpecMap(Set deploymentPIDs) throws IOException { HashMap specMap = new HashMap(); Iterator iter = deploymentPIDs.iterator(); while (iter.hasNext()) { String pid = (String) iter.next(); specMap.put(pid, getInputSpec(pid)); } return specMap; } public static DatastreamInputSpec getInputSpec(String deploymentPID) throws IOException { HashMap hash = new HashMap(); hash.put("itemID", "DSINPUTSPEC"); /* * return DatastreamInputSpec.parse( * Administrator.DOWNLOADER.getDissemination( deploymentPID, * "fedora-system:3", "getItem", hash, null) ); */ return DatastreamInputSpec .parse(Administrator.DOWNLOADER .getDatastreamDissemination(deploymentPID, "DSINPUTSPEC", null)); } /** * Get the list of MethodDefinition objects defined by the indicated service * definition. */ public static java.util.List getMethodDefinitions(String sDefPID) throws IOException { HashMap parms = new HashMap(); parms.put("itemID", "METHODMAP"); return MethodDefinition.parse(Administrator.DOWNLOADER .getDatastreamDissemination(sDefPID, "METHODMAP", null)); } /** * Get the indicated fields of the indicated object from the repository. */ public static ObjectFields getObjectFields(String pid, String[] fields) throws IOException { FieldSearchQuery query = new FieldSearchQuery(); Condition[] conditions = new Condition[1]; conditions[0] = new Condition(); conditions[0].setProperty("pid"); conditions[0].setOperator(ComparisonOperator.fromValue("eq")); conditions[0].setValue(pid); query.setConditions(conditions); FieldSearchResult result = Administrator.APIA.findObjects(fields, new NonNegativeInteger("1"), query); ObjectFields[] resultList = result.getResultList(); if (resultList == null || resultList.length == 0) { throw new IOException("Object not found in repository"); } return resultList[0]; } /** * Layout the provided components in two columns, each left-aligned, where * the left column's cells are as narrow as possible. If north is true, all * cells will be laid out to the NORTHwest. This is useful when some rows' * cells aren't the same size vertically. If allowStretching is true, * components on the right will be stretched if they can be. */ public static void addRows(JComponent[] left, JComponent[] right, GridBagLayout gridBag, Container container, boolean north, boolean allowStretching) { GridBagConstraints c = new GridBagConstraints(); c.insets = new Insets(0, 4, 4, 4); if (north) { c.anchor = GridBagConstraints.NORTHWEST; } else { c.anchor = GridBagConstraints.WEST; } for (int i = 0; i < left.length; i++) { c.gridwidth = GridBagConstraints.RELATIVE; //next-to-last c.fill = GridBagConstraints.NONE; //reset to default c.weightx = 0.0; //reset to default gridBag.setConstraints(left[i], c); container.add(left[i]); c.gridwidth = GridBagConstraints.REMAINDER; //end row if (right[i] instanceof JComboBox) { if (allowStretching) { c.fill = GridBagConstraints.HORIZONTAL; } } else { c.fill = GridBagConstraints.HORIZONTAL; } c.weightx = 1.0; gridBag.setConstraints(right[i], c); container.add(right[i]); } } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.python; import java.awt.event.KeyEvent; import java.io.*; import java.util.List; import javax.swing.ImageIcon; import org.python.core.PySystemState; import docking.ActionContext; import docking.DockingUtils; import docking.action.*; import generic.jar.ResourceFile; import ghidra.app.CorePluginPackage; import ghidra.app.plugin.PluginCategoryNames; import ghidra.app.plugin.ProgramPlugin; import ghidra.app.plugin.core.console.CodeCompletion; import ghidra.app.plugin.core.interpreter.*; import ghidra.app.script.GhidraState; import ghidra.framework.options.OptionsChangeListener; import ghidra.framework.options.ToolOptions; import ghidra.framework.plugintool.PluginInfo; import ghidra.framework.plugintool.PluginTool; import ghidra.framework.plugintool.util.PluginStatus; import ghidra.util.HelpLocation; import ghidra.util.task.*; import resources.ResourceManager; /** * This plugin provides the interactive Python interpreter. */ //@formatter:off @PluginInfo( status = PluginStatus.RELEASED, packageName = CorePluginPackage.NAME, category = PluginCategoryNames.INTERPRETERS, shortDescription = "Python Interpreter", description = "Provides an interactive Python Interpreter that is tightly integrated with a loaded Ghidra program.", servicesRequired = { InterpreterPanelService.class }, isSlowInstallation = true ) //@formatter:on public class PythonPlugin extends ProgramPlugin implements InterpreterConnection, OptionsChangeListener { private final static int INPUT_THREAD_SHUTDOWN_TIMEOUT_MS = 1000; private InterpreterConsole console; private GhidraPythonInterpreter interpreter; private PythonScript interactiveScript; private TaskMonitor interactiveTaskMonitor; private PythonPluginInputThread inputThread; // Plugin options private final static String INCLUDE_BUILTINS_LABEL = "Include \"builtins\" in code completion?"; private final static String INCLUDE_BUILTINS_DESCRIPTION = "Whether or not to include Python's built-in functions and properties in the pop-up code completion window."; private final static boolean INCLUDE_BUILTINS_DEFAULT = true; private boolean includeBuiltins = INCLUDE_BUILTINS_DEFAULT; /** * Creates a new PythonPlugin object. * * @param tool The tool associated with this plugin. */ public PythonPlugin(PluginTool tool) { super(tool, true, true, true); } /** * Gets the plugin's interpreter console. * * @return The plugin's interpreter console. */ InterpreterConsole getConsole() { return console; } /** * Gets the plugin's Python interpreter. * * @return The plugin's Python interpreter. May be null. */ GhidraPythonInterpreter getInterpreter() { return interpreter; } /** * Gets the plugin's interactive script * * @return The plugin's interactive script. */ PythonScript getInteractiveScript() { return interactiveScript; } /** * Gets the plugin's interactive task monitor. * * @return The plugin's interactive task monitor. */ TaskMonitor getInteractiveTaskMonitor() { return interactiveTaskMonitor; } @Override protected void init() { super.init(); console = getTool().getService(InterpreterPanelService.class).createInterpreterPanel(this, false); welcome(); console.addFirstActivationCallback(() -> resetInterpreter()); createActions(); } /** * Creates various actions for the plugin. */ private void createActions() { // Interrupt Interpreter DockingAction interruptAction = new DockingAction("Interrupt Interpreter", getName()) { @Override public void actionPerformed(ActionContext context) { interrupt(); } }; interruptAction.setDescription("Interrupt Interpreter"); interruptAction.setToolBarData( new ToolBarData(ResourceManager.loadImage("images/dialog-cancel.png"), null)); interruptAction.setEnabled(true); interruptAction.setKeyBindingData( new KeyBindingData(KeyEvent.VK_I, DockingUtils.CONTROL_KEY_MODIFIER_MASK)); interruptAction.setHelpLocation(new HelpLocation(getTitle(), "Interrupt_Interpreter")); console.addAction(interruptAction); // Reset Interpreter DockingAction resetAction = new DockingAction("Reset Interpreter", getName()) { @Override public void actionPerformed(ActionContext context) { reset(); } }; resetAction.setDescription("Reset Interpreter"); resetAction.setToolBarData( new ToolBarData(ResourceManager.loadImage("images/reload3.png"), null)); resetAction.setEnabled(true); resetAction.setKeyBindingData( new KeyBindingData(KeyEvent.VK_D, DockingUtils.CONTROL_KEY_MODIFIER_MASK)); resetAction.setHelpLocation(new HelpLocation(getTitle(), "Reset_Interpreter")); console.addAction(resetAction); } /** * Resets the interpreter to a new starting state. This is used when the plugin is first * initialized, as well as when an existing interpreter receives a Python exit command. * We used to try to reset the same interpreter, but it was really hard to do that correctly * so we now just create a brand new one. * <p> * NOTE: Loading Jython for the first time can be quite slow the first time, so we do this * when the user wants to first interact with the interpreter (rather than when the plugin loads). */ private void resetInterpreter() { TaskLauncher.launchModal("Resetting Python...", () -> { resetInterpreterInBackground(); }); } // we expect this to be called from off the Swing thread private void resetInterpreterInBackground() { // Reset the interpreter by creating a new one. Clean up the old one if present. if (interpreter == null) { // Setup options ToolOptions options = tool.getOptions("Python"); includeBuiltins = options.getBoolean(INCLUDE_BUILTINS_LABEL, INCLUDE_BUILTINS_DEFAULT); options.registerOption(INCLUDE_BUILTINS_LABEL, INCLUDE_BUILTINS_DEFAULT, null, INCLUDE_BUILTINS_DESCRIPTION); options.addOptionsChangeListener(this); interpreter = GhidraPythonInterpreter.get(); // Setup code completion. This currently has to be done after the interpreter // is created. Otherwise an exception will occur. PythonCodeCompletionFactory.setupOptions(this, options); } else { inputThread.shutdown(); inputThread = null; interpreter.cleanup(); interpreter = GhidraPythonInterpreter.get(); } // Reset the console. console.clear(); console.setPrompt(interpreter.getPrimaryPrompt()); // Tie the interpreter's input/output to the plugin's console. interpreter.setIn(console.getStdin()); interpreter.setOut(console.getStdOut()); interpreter.setErr(console.getStdErr()); // Print a welcome message. welcome(); // Setup the PythonScript describing the state of the interactive prompt. // This allows things like currentProgram and currentAddress to dynamically reflect // what's happening in the listing. interactiveScript = new PythonScript(); interactiveTaskMonitor = new PythonInteractiveTaskMonitor(console.getStdOut()); // Start the input thread that receives python commands to execute. inputThread = new PythonPluginInputThread(this); inputThread.start(); } /** * Handle a change in one of our options. * * @param options the options handle * @param optionName name of the option changed * @param oldValue the old value * @param newValue the new value */ @Override public void optionsChanged(ToolOptions options, String optionName, Object oldValue, Object newValue) { if (optionName.startsWith(PythonCodeCompletionFactory.COMPLETION_LABEL)) { PythonCodeCompletionFactory.changeOptions(options, optionName, oldValue, newValue); } else if (optionName.equals(PythonCodeCompletionFactory.INCLUDE_TYPES_LABEL)) { PythonCodeCompletionFactory.changeOptions(options, optionName, oldValue, newValue); } else if (optionName.equals(INCLUDE_BUILTINS_LABEL)) { includeBuiltins = ((Boolean) newValue).booleanValue(); } } /** * Returns a list of possible command completion values. * * @param cmd current command line (without prompt) * @return A list of possible command completion values. Could be empty if there aren't any. */ @Override public List<CodeCompletion> getCompletions(String cmd) { // Refresh the environment interactiveScript.setSourceFile(new ResourceFile(new File("python"))); interactiveScript.set( new GhidraState(tool, tool.getProject(), currentProgram, currentLocation, currentSelection, currentHighlight), interactiveTaskMonitor, console.getOutWriter()); return interpreter.getCommandCompletions(cmd, includeBuiltins); } @Override protected void dispose() { // Do an interrupt in case there is a loop or something running interrupt(); // Terminate the input thread if (inputThread != null) { inputThread.shutdown(); inputThread = null; } // Dispose of the console if (console != null) { console.dispose(); console = null; } // Cleanup the interpreter if (interpreter != null) { interpreter.cleanup(); interpreter = null; } super.dispose(); } /** * Interrupts what the interpreter is currently doing. */ public void interrupt() { if (interpreter == null) { return; } interpreter.interrupt(inputThread.getPythonPluginExecutionThread()); console.setPrompt(interpreter.getPrimaryPrompt()); } /** * Resets the interpreter's state. */ public void reset() { // Do an interrupt in case there is a loop or something running interrupt(); resetInterpreter(); } @Override public String getTitle() { return "Python"; } @Override public String toString() { return getPluginDescription().getName(); } @Override public ImageIcon getIcon() { return ResourceManager.loadImage("images/python.png"); } /** * Prints a welcome message to the console. */ private void welcome() { console.getOutWriter().println("Python Interpreter for Ghidra"); console.getOutWriter().println("Based on Jython version " + PySystemState.version); console.getOutWriter().println("Press 'F1' for usage instructions"); } /** * Support for cancelling execution using a TaskMonitor. */ class PythonInteractiveTaskMonitor extends TaskMonitorAdapter { private PrintWriter output = null; public PythonInteractiveTaskMonitor(PrintWriter stdOut) { output = stdOut; } public PythonInteractiveTaskMonitor(OutputStream stdout) { this(new PrintWriter(stdout)); } @Override public void setMessage(String message) { output.println("<python-interactive>: " + message); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.aggregation.mean; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.collect.Utils; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.AggregatorUtil; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.query.aggregation.VectorAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import org.apache.druid.segment.ColumnInspector; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.column.ColumnCapabilities; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.vector.VectorColumnSelectorFactory; import javax.annotation.Nullable; import java.util.Collections; import java.util.Comparator; import java.util.List; /** */ public class DoubleMeanAggregatorFactory extends AggregatorFactory { private final String name; private final String fieldName; @JsonCreator public DoubleMeanAggregatorFactory( @JsonProperty("name") String name, @JsonProperty("fieldName") final String fieldName ) { this.name = Preconditions.checkNotNull(name, "null name"); this.fieldName = Preconditions.checkNotNull(fieldName, "null fieldName"); } @Override @JsonProperty public String getName() { return name; } @JsonProperty public String getFieldName() { return fieldName; } @Override public List<String> requiredFields() { return Collections.singletonList(fieldName); } @Override public String getComplexTypeName() { return "doubleMean"; } /** * actual type is {@link DoubleMeanHolder} */ @Override public ValueType getType() { return ValueType.COMPLEX; } @Override public ValueType getFinalizedType() { return ValueType.DOUBLE; } @Override public int getMaxIntermediateSize() { return DoubleMeanHolder.MAX_INTERMEDIATE_SIZE; } @Override public Aggregator factorize(ColumnSelectorFactory metricFactory) { return new DoubleMeanAggregator(metricFactory.makeColumnValueSelector(fieldName)); } @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { return new DoubleMeanBufferAggregator(metricFactory.makeColumnValueSelector(fieldName)); } @Override public VectorAggregator factorizeVector(final VectorColumnSelectorFactory selectorFactory) { return new DoubleMeanVectorAggregator(selectorFactory.makeValueSelector(fieldName)); } @Override public boolean canVectorize(ColumnInspector columnInspector) { final ColumnCapabilities capabilities = columnInspector.getColumnCapabilities(fieldName); return capabilities == null || capabilities.getType().isNumeric(); } @Override public Comparator getComparator() { return DoubleMeanHolder.COMPARATOR; } @Nullable @Override public Object combine(@Nullable Object lhs, @Nullable Object rhs) { if (lhs instanceof DoubleMeanHolder && rhs instanceof DoubleMeanHolder) { return ((DoubleMeanHolder) lhs).update((DoubleMeanHolder) rhs); } else { throw new IAE( "lhs[%s] or rhs[%s] not of type [%s]", Utils.safeObjectClassGetName(lhs), Utils.safeObjectClassGetName(rhs), DoubleMeanHolder.class.getName() ); } } @Override public AggregatorFactory getCombiningFactory() { return new DoubleMeanAggregatorFactory(name, name); } @Override public List<AggregatorFactory> getRequiredColumns() { return Collections.singletonList(new DoubleMeanAggregatorFactory(fieldName, fieldName)); } @Override public Object deserialize(Object object) { if (object instanceof byte[]) { return DoubleMeanHolder.fromBytes((byte[]) object); } else if (object instanceof String) { return DoubleMeanHolder.fromBytes(StringUtils.decodeBase64(StringUtils.toUtf8((String) object))); } else if (object instanceof DoubleMeanHolder) { return object; } else { throw new IAE("Unknown object type [%s]", Utils.safeObjectClassGetName(object)); } } @Nullable @Override public Object finalizeComputation(@Nullable Object object) { if (object instanceof byte[]) { return DoubleMeanHolder.fromBytes((byte[]) object).mean(); } else if (object instanceof DoubleMeanHolder) { return ((DoubleMeanHolder) object).mean(); } else if (object == null) { return null; } else { throw new IAE("Unknown object type [%s]", object.getClass().getName()); } } @Override public byte[] getCacheKey() { return new CacheKeyBuilder(AggregatorUtil.MEAN_CACHE_TYPE_ID) .appendString(name) .appendString(fieldName) .build(); } }
/* * Copyright (c) 2008-2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb.acceptancetest.querying; import com.mongodb.client.DatabaseTestCase; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import org.bson.BsonObjectId; import org.bson.BsonReader; import org.bson.BsonWriter; import org.bson.Document; import org.bson.codecs.Codec; import org.bson.codecs.CollectibleCodec; import org.bson.codecs.DecoderContext; import org.bson.codecs.DocumentCodecProvider; import org.bson.codecs.EncoderContext; import org.bson.codecs.ValueCodecProvider; import org.bson.codecs.configuration.CodecProvider; import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.ObjectId; import org.junit.Ignore; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static com.mongodb.client.model.Filters.or; import static com.mongodb.client.model.Filters.type; import static com.mongodb.client.model.Sorts.descending; import static java.util.Arrays.asList; import static org.bson.BsonType.INT32; import static org.bson.BsonType.INT64; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; public class QueryAcceptanceTest extends DatabaseTestCase { @Test public void shouldBeAbleToQueryWithDocumentSpecification() { collection.insertOne(new Document("name", "Bob")); Document query = new Document("name", "Bob"); MongoCursor<Document> results = collection.find().filter(query).iterator(); assertThat(results.next().get("name").toString(), is("Bob")); } @Test public void shouldBeAbleToQueryWithDocument() { collection.insertOne(new Document("name", "Bob")); Document query = new Document("name", "Bob"); MongoCursor<Document> results = collection.find(query).iterator(); assertThat(results.next().get("name").toString(), is("Bob")); } @Test public void shouldBeAbleToQueryTypedCollectionWithDocument() { CodecRegistry codecRegistry = fromProviders(asList(new ValueCodecProvider(), new DocumentCodecProvider(), new PersonCodecProvider())); MongoCollection<Person> collection = database .getCollection(getCollectionName(), Person.class) .withCodecRegistry(codecRegistry); collection.insertOne(new Person("Bob")); MongoCursor<Person> results = collection.find(new Document("name", "Bob")).iterator(); assertThat(results.next().name, is("Bob")); } @Test public void shouldBeAbleToFilterByType() { collection.insertOne(new Document("product", "Book").append("numTimesOrdered", "some")); collection.insertOne(new Document("product", "CD").append("numTimesOrdered", "6")); collection.insertOne(new Document("product", "DVD").append("numTimesOrdered", 9)); collection.insertOne(new Document("product", "SomethingElse").append("numTimesOrdered", 10)); List<Document> results = new ArrayList<Document>(); collection.find(new Document("numTimesOrdered", new Document("$type", 16))) .sort(new Document("numTimesOrdered", -1)).into(results); assertThat(results.size(), is(2)); assertThat(results.get(0).get("product").toString(), is("SomethingElse")); assertThat(results.get(1).get("product").toString(), is("DVD")); } @Test public void shouldUseFriendlyQueryType() { collection.insertOne(new Document("product", "Book").append("numTimesOrdered", "some")); collection.insertOne(new Document("product", "CD").append("numTimesOrdered", "6")); collection.insertOne(new Document("product", "DVD").append("numTimesOrdered", 9)); collection.insertOne(new Document("product", "SomethingElse").append("numTimesOrdered", 10)); collection.insertOne(new Document("product", "VeryPopular").append("numTimesOrdered", 7843273657286478L)); List<Document> results = new ArrayList<Document>(); //TODO make BSON type serializable Document filter = new Document("$or", asList(new Document("numTimesOrdered", new Document("$type", INT32.getValue())), new Document("numTimesOrdered", new Document("$type", INT64.getValue())))); collection.find(filter).sort(new Document("numTimesOrdered", -1)).into(results); assertThat(results.size(), is(3)); assertThat(results.get(0).get("product").toString(), is("VeryPopular")); assertThat(results.get(1).get("product").toString(), is("SomethingElse")); assertThat(results.get(2).get("product").toString(), is("DVD")); } @Test public void shouldBeAbleToSortAscending() { collection.insertOne(new Document("product", "Book")); collection.insertOne(new Document("product", "DVD")); collection.insertOne(new Document("product", "CD")); List<Document> results = new ArrayList<Document>(); collection.find().sort(new Document("product", 1)).into(results); assertThat(results.size(), is(3)); assertThat(results.get(0).get("product").toString(), is("Book")); assertThat(results.get(1).get("product").toString(), is("CD")); assertThat(results.get(2).get("product").toString(), is("DVD")); } @Test public void shouldBeAbleToUseQueryBuilderForFilter() { collection.insertOne(new Document("product", "Book").append("numTimesOrdered", "some")); collection.insertOne(new Document("product", "CD").append("numTimesOrdered", "6")); collection.insertOne(new Document("product", "DVD").append("numTimesOrdered", 9)); collection.insertOne(new Document("product", "SomethingElse").append("numTimesOrdered", 10)); collection.insertOne(new Document("product", "VeryPopular").append("numTimesOrdered", 7843273657286478L)); List<Document> results = new ArrayList<Document>(); collection.find(or(type("numTimesOrdered", INT32), type("numTimesOrdered", INT64))) .sort(descending("numTimesOrdered")).into(results); assertThat(results.size(), is(3)); assertThat(results.get(0).get("product").toString(), is("VeryPopular")); assertThat(results.get(1).get("product").toString(), is("SomethingElse")); assertThat(results.get(2).get("product").toString(), is("DVD")); } @Test @Ignore("JSON stuff not implemented") public void shouldBeAbleToQueryWithJSON() { } private class PersonCodecProvider implements CodecProvider { @Override @SuppressWarnings("unchecked") public <T> Codec<T> get(final Class<T> clazz, final CodecRegistry registry) { if (clazz.equals(Person.class)) { return (Codec<T>) new PersonCodec(); } return null; } } private class PersonCodec implements CollectibleCodec<Person> { @Override public boolean documentHasId(final Person document) { return true; } @Override public BsonObjectId getDocumentId(final Person document) { return new BsonObjectId(document.id); } @Override public Person generateIdIfAbsentFromDocument(final Person person) { return person; } @Override public void encode(final BsonWriter writer, final Person value, final EncoderContext encoderContext) { writer.writeStartDocument(); writer.writeObjectId("_id", value.id); writer.writeString("name", value.name); writer.writeEndDocument(); } @Override public Person decode(final BsonReader reader, final DecoderContext decoderContext) { reader.readStartDocument(); ObjectId id = reader.readObjectId("_id"); String name = reader.readString("name"); reader.readEndDocument(); return new Person(id, name); } @Override public Class<Person> getEncoderClass() { return Person.class; } } private class Person { private ObjectId id = new ObjectId(); private final String name; public Person(final String name) { this.name = name; } public Person(final ObjectId id, final String name) { this.id = id; this.name = name; } } }
/* Copyright 2014-2015 ARM Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.arm.wlauto.uiauto.vellamo; import android.app.Activity; import android.os.Bundle; import android.util.Log; import android.view.KeyEvent; import java.util.concurrent.TimeUnit; import java.util.ArrayList; // Import the uiautomator libraries import com.android.uiautomator.core.UiObject; import com.android.uiautomator.core.UiObjectNotFoundException; import com.android.uiautomator.core.UiScrollable; import com.android.uiautomator.core.UiSelector; import com.android.uiautomator.core.UiDevice; import com.android.uiautomator.core.UiWatcher; import com.android.uiautomator.testrunner.UiAutomatorTestCase; import com.arm.wlauto.uiauto.BaseUiAutomation; public class UiAutomation extends BaseUiAutomation { public static String TAG = "vellamo"; public static ArrayList<String> scores = new ArrayList(); public static Boolean wasError = false; public void runUiAutomation() throws Exception { Bundle parameters = getParams(); String version = parameters.getString("version"); Boolean browser = parameters.getBoolean("browser"); Boolean metal = parameters.getBoolean("metal"); Boolean multicore = parameters.getBoolean("multicore"); Integer browserToUse = parameters.getInt("browserToUse") - 1; dismissEULA(); if (version.equals("2.0.3")) { dissmissWelcomebanner(); startTest(); dismissNetworkConnectionDialogIfNecessary(); dismissExplanationDialogIfNecessary(); waitForTestCompletion(15 * 60, "com.quicinc.vellamo:id/act_ba_results_btn_no"); getScore("html5", "com.quicinc.vellamo:id/act_ba_results_img_0"); getScore("metal", "com.quicinc.vellamo:id/act_ba_results_img_1"); } else { dismissLetsRoll(); if (version.equals("3.2.4")) { dismissArrow(); } if (browser) { startBrowserTest(browserToUse, version); proccessTest("Browser"); } if (multicore) { startTestV3(1, version); proccessTest("Multicore"); } if (metal) { startTestV3(2, version); proccessTest("Metal"); } } for(String result : scores){ Log.v(TAG, String.format("VELLAMO RESULT: %s", result)); } if (wasError) Log.v("vellamoWatcher", "VELLAMO ERROR: Something crashed while running browser benchmark"); } public void startTest() throws Exception { UiSelector selector = new UiSelector(); UiObject runButton = new UiObject(selector.textContains("Run All Chapters")); if (!runButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) { UiObject pager = new UiObject(selector.className("android.support.v4.view.ViewPager")); pager.swipeLeft(2); if (!runButton.exists()) { throw new UiObjectNotFoundException("Could not find \"Run All Chapters\" button."); } } runButton.click(); } public void startBrowserTest(int browserToUse, String version) throws Exception { //Ensure chrome is selected as "browser" fails to run the benchmark UiSelector selector = new UiSelector(); UiObject browserToUseButton = new UiObject(selector.className("android.widget.ImageButton") .longClickable(true).instance(browserToUse)); UiObject browserButton = new UiObject(selector.className("android.widget.ImageButton") .longClickable(true).selected(true)); //Disable browsers while(browserButton.exists()) browserButton.click(); if (browserToUseButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (browserToUseButton.exists()) { browserToUseButton.click(); } } //enable a watcher to dismiss browser dialogs UiWatcher stoppedWorkingDialogWatcher = new UiWatcher() { @Override public boolean checkForCondition() { UiObject stoppedWorkingDialog = new UiObject(new UiSelector().textStartsWith("Unfortunately")); if(stoppedWorkingDialog.exists()){ wasError = true; UiObject okButton = new UiObject(new UiSelector().className("android.widget.Button").text("OK")); try { okButton.click(); } catch (UiObjectNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } return (stoppedWorkingDialog.waitUntilGone(25000)); } return false; } }; // Register watcher UiDevice.getInstance().registerWatcher("stoppedWorkingDialogWatcher", stoppedWorkingDialogWatcher); // Run watcher UiDevice.getInstance().runWatchers(); startTestV3(0, version); } public void startTestV3(int run, String version) throws Exception { UiSelector selector = new UiSelector(); UiObject thirdRunButton = new UiObject(selector.resourceId("com.quicinc.vellamo:id/card_launcher_run_button").instance(2)); if (!thirdRunButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!thirdRunButton.exists()) { throw new UiObjectNotFoundException("Could not find three \"Run\" buttons."); } } //Run benchmarks UiObject runButton = new UiObject(selector.resourceId("com.quicinc.vellamo:id/card_launcher_run_button").instance(run)); if (!runButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!runButton.exists()) { throw new UiObjectNotFoundException("Could not find correct \"Run\" button."); } } runButton.click(); //Skip tutorial screen if (version.equals("3.2.4")) { UiObject gotItButton = new UiObject(selector.textContains("Got it")); if (!gotItButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!gotItButton.exists()) { throw new UiObjectNotFoundException("Could not find correct \"GOT IT\" button."); } } gotItButton.click(); } else { UiObject swipeScreen = new UiObject(selector.textContains("Swipe left to continue")); if (!swipeScreen.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!swipeScreen.exists()) { throw new UiObjectNotFoundException("Could not find \"Swipe screen\"."); } } sleep(1); swipeScreen.swipeLeft(2); sleep(1); swipeScreen.swipeLeft(2); } } public void proccessTest(String metric) throws Exception{ waitForTestCompletion(15 * 60, "com.quicinc.vellamo:id/button_no"); //Remove watcher UiDevice.getInstance().removeWatcher("stoppedWorkingDialogWatcher"); getScore(metric, "com.quicinc.vellamo:id/card_score_score"); getUiDevice().pressBack(); getUiDevice().pressBack(); getUiDevice().pressBack(); } public void getScore(String metric, String resourceID) throws Exception { UiSelector selector = new UiSelector(); UiObject score = new UiObject(selector.resourceId(resourceID)); if (!score.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!score.exists()) { throw new UiObjectNotFoundException("Could not find score on screen."); } } scores.add(metric + " " + score.getText().trim()); } public void waitForTestCompletion(int timeout, String resourceID) throws Exception { UiSelector selector = new UiSelector(); UiObject resultsNoButton = new UiObject(selector.resourceId(resourceID)); if (!resultsNoButton.waitForExists(TimeUnit.SECONDS.toMillis(timeout))) { throw new UiObjectNotFoundException("Did not see results screen."); } } public void dismissEULA() throws Exception { UiSelector selector = new UiSelector(); waitText("Vellamo EULA"); UiObject acceptButton = new UiObject(selector.text("Accept") .className("android.widget.Button")); if (acceptButton.exists()) { acceptButton.click(); } } public void dissmissWelcomebanner() throws Exception { UiSelector selector = new UiSelector(); UiObject welcomeBanner = new UiObject(selector.textContains("WELCOME")); if (welcomeBanner.waitForExists(TimeUnit.SECONDS.toMillis(5))) { UiObject pager = new UiObject(selector.className("android.support.v4.view.ViewPager")); pager.swipeLeft(2); pager.swipeLeft(2); } } public void dismissLetsRoll() throws Exception { UiSelector selector = new UiSelector(); UiObject letsRollButton = new UiObject(selector.className("android.widget.Button") .textContains("LET'S ROLL")); if (!letsRollButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!letsRollButton.exists()) { // As a fall-back look for the old capitalization letsRollButton = new UiObject(selector.className("android.widget.Button") .textContains("Let's Roll")); if (!letsRollButton.exists()) { throw new UiObjectNotFoundException("Could not find \"Let's Roll\" button."); } } } letsRollButton.click(); } public void dismissArrow() throws Exception { UiSelector selector = new UiSelector(); UiObject cardContainer = new UiObject(selector.resourceId("com.quicinc.vellamo:id/cards_container")) ; if (!cardContainer.waitForExists(TimeUnit.SECONDS.toMillis(5))) { if (!cardContainer.exists()) { throw new UiObjectNotFoundException("Could not find vellamo main screen"); } } cardContainer.click(); } public void dismissNetworkConnectionDialogIfNecessary() throws Exception { UiSelector selector = new UiSelector(); UiObject dialog = new UiObject(selector.className("android.widget.TextView") .textContains("No Network Connection")); if (dialog.exists()) { UiObject yesButton = new UiObject(selector.className("android.widget.Button") .text("Yes")); yesButton.click(); } } public void dismissExplanationDialogIfNecessary() throws Exception { UiSelector selector = new UiSelector(); UiObject dialog = new UiObject(selector.className("android.widget.TextView") .textContains("Benchmarks Explanation")); if (dialog.exists()) { UiObject noButton = new UiObject(selector.className("android.widget.Button") .text("No")); noButton.click(); } } }
/* * Copyright (C) 2017 Synacts GmbH, Switzerland (info@synacts.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.digitalid.utility.generator.information.type; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.lang.model.element.ElementKind; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeVariable; import net.digitalid.utility.annotations.method.Pure; import net.digitalid.utility.annotations.state.Unmodifiable; import net.digitalid.utility.circumfixes.Brackets; import net.digitalid.utility.collaboration.annotations.TODO; import net.digitalid.utility.collaboration.enumerations.Author; import net.digitalid.utility.functional.iterables.FiniteIterable; import net.digitalid.utility.generator.annotations.generators.GenerateBuilder; import net.digitalid.utility.generator.annotations.generators.GenerateSubclass; import net.digitalid.utility.generator.exceptions.FailedClassGenerationException; import net.digitalid.utility.generator.generators.BuilderGenerator; import net.digitalid.utility.generator.generators.ConverterGenerator; import net.digitalid.utility.generator.generators.SubclassGenerator; import net.digitalid.utility.generator.information.ElementInformation; import net.digitalid.utility.generator.information.ElementInformationImplementation; import net.digitalid.utility.generator.information.field.FieldInformation; import net.digitalid.utility.generator.information.field.GeneratedDerivedFieldInformation; import net.digitalid.utility.generator.information.field.GeneratedRepresentingFieldInformation; import net.digitalid.utility.generator.information.filter.InformationFilter; import net.digitalid.utility.generator.information.filter.MethodSignatureMatcher; import net.digitalid.utility.generator.information.method.ConstructorInformation; import net.digitalid.utility.generator.information.method.ExecutableInformation; import net.digitalid.utility.generator.information.method.MethodInformation; import net.digitalid.utility.generator.information.variable.VariableElementInformation; import net.digitalid.utility.processing.logging.ProcessingLog; import net.digitalid.utility.processing.logging.SourcePosition; import net.digitalid.utility.string.Strings; import net.digitalid.utility.tuples.Pair; import net.digitalid.utility.validation.annotations.generation.Derive; import net.digitalid.utility.validation.annotations.generation.Recover; import net.digitalid.utility.validation.annotations.size.MinSize; import net.digitalid.utility.validation.annotations.type.Immutable; import net.digitalid.utility.validation.processing.AnnotationHandlerUtility; /** * This type collects the relevant information about a type for generating a {@link SubclassGenerator subclass}, {@link BuilderGenerator builder} and {@link ConverterGenerator converter}. * * @see InterfaceInformation * @see ClassInformation */ @Immutable @TODO(task = "The type validators are never loaded, which means their usage is never checked.", date = "2016-05-16", author = Author.KASPAR_ETTER) public abstract class TypeInformation extends ElementInformationImplementation { /* -------------------------------------------------- Recover Method or Constructor -------------------------------------------------- */ public @Nullable ExecutableInformation getRecoverConstructorOrMethod() { if (getRecoverMethod() != null) { return getRecoverMethod(); } else { @Nullable ConstructorInformation recoverConstructor = null; if (getConstructors().size() == 1) { recoverConstructor = getConstructors().getFirst(); } else if (getConstructors().size() > 1) { for (@Nonnull ConstructorInformation constructorInformation : getConstructors()) { if (constructorInformation.hasAnnotation(Recover.class)) { if (recoverConstructor != null) { throw FailedClassGenerationException.with("Only one recover constructor allowed, but multiple @Recover annotations found in type $", SourcePosition.of(getElement()), getName()); } recoverConstructor = constructorInformation; } } if (recoverConstructor == null) { throw FailedClassGenerationException.with("Found multiple constructors, but none is annotated with @Recover in type $", SourcePosition.of(getElement()), getName()); } } return recoverConstructor; } } /* -------------------------------------------------- Recover Method -------------------------------------------------- */ /** * Returns the recover method, if one exists. */ @Pure public abstract @Nullable MethodInformation getRecoverMethod(); /* -------------------------------------------------- Constructors -------------------------------------------------- */ /** * Returns an iterable of constructor information objects. */ @Pure public abstract @Nonnull @MinSize(0) FiniteIterable<ConstructorInformation> getConstructors(); /* -------------------------------------------------- Constructor Parameters -------------------------------------------------- */ /** * Return parameters required for the recovery of an object of the class. */ @Pure public abstract @Nonnull FiniteIterable<VariableElementInformation> getRecoverParameters(); /** * Return parameters required for the construction of the class. */ @Pure public abstract @Nonnull FiniteIterable<VariableElementInformation> getConstructorParameters(); /* -------------------------------------------------- Representing Field Information -------------------------------------------------- */ /** * Returns an iterable of the representing field information objects. */ @Pure public abstract @Nonnull FiniteIterable<FieldInformation> getRepresentingFieldInformation(); /* -------------------------------------------------- Accessible Field Information -------------------------------------------------- */ /** * Returns an iterable of the accessible field information objects. */ @Pure public abstract @Nonnull FiniteIterable<FieldInformation> getAccessibleFieldInformation(); /* -------------------------------------------------- Field Information -------------------------------------------------- */ /** * Returns an iterable of the field information objects. */ @Pure public abstract @Nonnull FiniteIterable<FieldInformation> getFieldInformation(); /* -------------------------------------------------- Overridden Methods -------------------------------------------------- */ /** * Returns an iterable of the overridden method information objects. */ @Pure public abstract @Nonnull FiniteIterable<MethodInformation> getOverriddenMethods(); /* -------------------------------------------------- Element -------------------------------------------------- */ @Pure @Override public @Nonnull TypeElement getElement() { return (TypeElement) super.getElement(); } /* -------------------------------------------------- Type -------------------------------------------------- */ @Pure @Override public @Nonnull DeclaredType getType() { return (DeclaredType) super.getType(); } /** * Returns the type arguments of the represented declared type. */ @Pure public @Nonnull FiniteIterable<@Nonnull TypeVariable> getTypeArguments() { return FiniteIterable.of(getType().getTypeArguments()).instanceOf(TypeVariable.class); } /* -------------------------------------------------- Subclass -------------------------------------------------- */ /** * Returns the simple name of the generated subclass. */ @Pure public @Nonnull String getSimpleNameOfGeneratedSubclass() { return getName() + "Subclass"; } /** * Returns the qualified name of the generated subclass. */ @Pure public @Nonnull String getQualifiedNameOfGeneratedSubclass() { return getQualifiedPackageName() + "." + getSimpleNameOfGeneratedSubclass(); } /* -------------------------------------------------- Builder -------------------------------------------------- */ /** * Returns the simple name of the generated builder. */ @Pure public @Nonnull String getSimpleNameOfGeneratedBuilder() { return getName() + "Builder"; } /** * Returns the qualified name of the generated builder. */ @Pure public @Nonnull String getQualifiedNameOfGeneratedBuilder() { return getQualifiedPackageName() + "." + getSimpleNameOfGeneratedBuilder(); } /* -------------------------------------------------- Converter -------------------------------------------------- */ /** * Returns the simple name of the generated converter. */ @Pure public @Nonnull String getSimpleNameOfGeneratedConverter() { return getName() + "Converter"; } /** * Returns the qualified name of the generated converter. */ @Pure public @Nonnull String getQualifiedNameOfGeneratedConverter() { return getQualifiedPackageName() + "." + getSimpleNameOfGeneratedConverter(); } /* -------------------------------------------------- Generated Field Information -------------------------------------------------- */ /** * An iterable of all generated field information objects. */ public final @Nonnull FiniteIterable<GeneratedRepresentingFieldInformation> generatedRepresentingFieldInformation; /* -------------------------------------------------- Derived Field Information -------------------------------------------------- */ /** * An iterable of all derived field information objects. */ public final @Nonnull FiniteIterable<GeneratedDerivedFieldInformation> derivedFieldInformation; /* -------------------------------------------------- Abstract Getter -------------------------------------------------- */ /** * Returns a map of indexed abstract getters. */ public final @Unmodifiable @Nonnull Map<@Nonnull String, @Nonnull MethodInformation> abstractGetters; /* -------------------------------------------------- Abstract Setter -------------------------------------------------- */ /** * Returns a map of indexed abstract setters. */ public final @Unmodifiable @Nonnull Map<@Nonnull String, @Nonnull MethodInformation> abstractSetters; /* -------------------------------------------------- Abstract Methods -------------------------------------------------- */ /** * Returns an iterable of methods that must be implemented. */ public final @Unmodifiable @Nonnull FiniteIterable<@Nonnull MethodInformation> generatedMethods; /* -------------------------------------------------- Initialization Marker -------------------------------------------------- */ /** * Returns true, iff the object is fully initialized. This means, implementing classes should only return true at the end of the object construction. To avoid errors, this should only be implemented and returned by classes that are final. */ @Pure public abstract boolean isInitialized(); /* -------------------------------------------------- Instance Code -------------------------------------------------- */ /** * Returns a string that can be used in generated code to instantiate this type. */ public @Nonnull String getInstantiationCode(boolean useBuilderIfAvailable, boolean useSubclassIfAvailable, boolean useRecoverMethodIfAvailable, @Nullable FiniteIterable<@Nonnull FieldInformation> variables) { // The expected result is: // - the call to the generated builder, if useBuilderIfAvailable flag is set to true and the @GenerateBuilder annotation is available for the type, // - the call to the recover method, if useRecoverMethodIfAvailable is true and a @Recover annotated method is available, or // - the call to the generated subclass constructor, if useSubclassIfAvailable is true and the @GenerateSubclass annotation is available for the type, // - the call to the constructor. if (useRecoverMethodIfAvailable && getRecoverMethod() != null) { final @Nonnull MethodInformation recoverMethod = getRecoverMethod(); return "return " + getName() + "." + recoverMethod.getName() + getRecoverParameters().map(ElementInformation::getName).join(Brackets.ROUND); } else if (useBuilderIfAvailable && hasAnnotation(GenerateBuilder.class)) { final @Nonnull StringBuilder assignedParameters = new StringBuilder(); final @Nonnull StringBuilder optionalParameters = new StringBuilder(); for (@Nonnull VariableElementInformation constructorParameter : getRecoverParameters()) { if (variables == null || variables.map(FieldInformation::getName).contains(constructorParameter.getName())) { (constructorParameter.isMandatory() ? assignedParameters : optionalParameters).append(".with").append(Strings.capitalizeFirstLetters(constructorParameter.getName())).append("(").append(constructorParameter.getName()).append(")"); } } assignedParameters.append(optionalParameters); return "return " + getSimpleNameOfGeneratedBuilder() + assignedParameters.append(".build()").toString(); } else if (getElement().getKind() == ElementKind.ENUM) { // maybe move to subclass, because that's a special case of EnumInformation return "return " + getName() + ".valueOf(value)"; } else { final @Nonnull String nameOfConstructor; if (useSubclassIfAvailable && hasAnnotation(GenerateSubclass.class)) { nameOfConstructor = getSimpleNameOfGeneratedSubclass(); } else { nameOfConstructor = getName(); } return "return new " + nameOfConstructor + getConstructorParameters().map(ElementInformation::getName).join(Brackets.ROUND); } } /* -------------------------------------------------- Constructor -------------------------------------------------- */ /** * Creates a new type information instance and initializes the abstract getters, abstract setters, generated and derived field information. */ protected TypeInformation(@Nonnull TypeElement typeElement, @Nonnull DeclaredType containingType) { super(typeElement, typeElement.asType(), containingType); // Make the usage checks of the type annotations: AnnotationHandlerUtility.getTypeValidators(typeElement); // TODO: Enforce that every type has an @Immutable, @Stateless, @Utility and the like annotation? final @Nonnull FiniteIterable<@Nonnull MethodInformation> methodInformation = InformationFilter.getMethodInformation(typeElement, containingType, this); this.abstractGetters = methodInformation.filter((method) -> method.isGetter() && method.isAbstract()).toMap(MethodInformation::getFieldName); this.abstractSetters = methodInformation.filter((method) -> method.isSetter() && method.isAbstract()).toMap(MethodInformation::getFieldName); this.generatedMethods = methodInformation.filter((method) -> method.isAbstract() && method.canBeImplemented()); final @Nonnull List<@Nonnull Pair<@Nonnull MethodInformation, @Nullable MethodInformation>> gettersAndSetters = new ArrayList<>(); for (Map.@Nonnull Entry<String, MethodInformation> indexedGetter : abstractGetters.entrySet()) { final @Nonnull MethodInformation getter = indexedGetter.getValue(); final @Nullable MethodInformation setter = abstractSetters.get(indexedGetter.getKey()); ProcessingLog.debugging("For field '" + indexedGetter.getKey() + "', adding getter: '" + getter + "' and setter '" + setter + "'."); gettersAndSetters.add(Pair.of(getter, setter)); } this.generatedRepresentingFieldInformation = FiniteIterable.of(gettersAndSetters).filter(pair -> !pair.get0().hasAnnotation(Derive.class)).map((pair) -> (GeneratedRepresentingFieldInformation.of(pair.get0().getContainingType(), pair.get0(), pair.get1()))); this.derivedFieldInformation = FiniteIterable.of(abstractGetters.entrySet()).filter(entry -> entry.getValue().hasAnnotation(Derive.class)).map(entry -> GeneratedDerivedFieldInformation.of(entry.getValue().getContainingType(), entry.getValue())); final @Nonnull FiniteIterable<MethodInformation> allRemainingAbstractMethods = methodInformation.filter((method) -> (method.isAbstract() && !method.isSetter() && !method.isGetter() && (!hasAnnotation(GenerateSubclass.class) || !method.canBeImplemented()))).filter(MethodSignatureMatcher.of("equals", Object.class).and(MethodSignatureMatcher.of("toString")).and(MethodSignatureMatcher.of("hashCode"))); if (allRemainingAbstractMethods.size() != 0) { ProcessingLog.debugging("Found $ abstract methods which cannot be generated", allRemainingAbstractMethods.size()); for (MethodInformation remainingAbstractMethod : allRemainingAbstractMethods) { ProcessingLog.debugging("Remaining method $ cannot be generated", remainingAbstractMethod); } throw FailedClassGenerationException.with("Found abstract methods which cannot be generates: ", SourcePosition.of(typeElement), allRemainingAbstractMethods.join()); } } }
/* * Copyright 2012 Dynastream Innovations Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.dsi.ant.sample.acquirechannels; import com.dsi.ant.channel.ChannelNotAvailableException; import com.dsi.ant.sample.acquirechannels.ChannelService.ChannelChangedListener; import com.dsi.ant.sample.acquirechannels.ChannelService.ChannelServiceComm; import android.app.Activity; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.os.Bundle; import android.os.IBinder; import android.util.Log; import android.util.SparseArray; import android.view.View; import android.view.View.OnClickListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CompoundButton; import android.widget.ListView; import android.widget.Toast; import android.widget.ToggleButton; import java.util.ArrayList; public class ChannelList extends Activity { private static final String TAG = ChannelList.class.getSimpleName(); private final String PREF_TX_BUTTON_CHECKED_KEY = "ChannelList.TX_BUTTON_CHECKED"; private boolean mCreateChannelAsMaster; private ChannelServiceComm mChannelService; private ArrayList<String> mChannelDisplayList = new ArrayList<String>(); private ArrayAdapter<String> mChannelListAdapter; private SparseArray<Integer> mIdChannelListIndexMap = new SparseArray<Integer>(); private boolean mChannelServiceBound = false; private void initButtons() { Log.v(TAG, "initButtons..."); //Register Master/Slave Toggle handler ToggleButton toggleButton_MasterSlave = (ToggleButton)findViewById(R.id.toggleButton_MasterSlave); toggleButton_MasterSlave.setEnabled(mChannelServiceBound); toggleButton_MasterSlave.setChecked(mCreateChannelAsMaster); toggleButton_MasterSlave.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton arg0, boolean enabled) { mCreateChannelAsMaster = enabled; } }); //Register Add Channel Button handler Button button_addChannel = (Button)findViewById(R.id.button_AddChannel); button_addChannel.setEnabled(mChannelServiceBound); button_addChannel.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { addNewChannel(mCreateChannelAsMaster); } }); //Register Clear Channels Button handler Button button_clearChannels = (Button)findViewById(R.id.button_ClearChannels); button_clearChannels.setEnabled(mChannelServiceBound); button_clearChannels.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { clearAllChannels(); } }); Log.v(TAG, "...initButtons"); } private void initPrefs() { Log.v(TAG, "initPrefs..."); // Retrieves the app's current state of channel transmission mode // from preferences to handle app resuming. SharedPreferences preferences = getPreferences(MODE_PRIVATE); mCreateChannelAsMaster = preferences.getBoolean(PREF_TX_BUTTON_CHECKED_KEY, true); Log.v(TAG, "...initPrefs"); } private void savePrefs() { Log.v(TAG, "savePrefs..."); // Saves the app's current state of channel transmission mode to preferences SharedPreferences preferences = getPreferences(MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putBoolean(PREF_TX_BUTTON_CHECKED_KEY, mCreateChannelAsMaster); editor.commit(); Log.v(TAG, "...savePrefs"); } private void doBindChannelService() { Log.v(TAG, "doBindChannelService..."); // Binds to ChannelService. ChannelService binds and manages connection between the // app and the ANT Radio Service Intent bindIntent = new Intent(this, ChannelService.class); startService(bindIntent); mChannelServiceBound = bindService(bindIntent, mChannelServiceConnection, Context.BIND_AUTO_CREATE); if(!mChannelServiceBound) //If the bind returns false, run the unbind method to update the GUI doUnbindChannelService(); Log.i(TAG, " Channel Service binding = "+ mChannelServiceBound); Log.v(TAG, "...doBindChannelService"); } private void doUnbindChannelService() { Log.v(TAG, "doUnbindChannelService..."); if(mChannelServiceBound) { unbindService(mChannelServiceConnection); mChannelServiceBound = false; } ((Button)findViewById(R.id.button_ClearChannels)).setEnabled(false); ((Button)findViewById(R.id.button_AddChannel)).setEnabled(false); ((Button)findViewById(R.id.toggleButton_MasterSlave)).setEnabled(false); Log.v(TAG, "...doUnbindChannelService"); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.v(TAG, "onCreate..."); mChannelServiceBound = false; setContentView(R.layout.activity_channel_list); initPrefs(); mChannelListAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, android.R.id.text1, mChannelDisplayList); ListView listView_channelList = (ListView)findViewById(R.id.listView_channelList); listView_channelList.setAdapter(mChannelListAdapter); if(!mChannelServiceBound) doBindChannelService(); initButtons(); Log.v(TAG, "...onCreate"); } public void onBack() { finish(); } @Override public void onDestroy() { Log.v(TAG, "onDestroy..."); doUnbindChannelService(); if(isFinishing()) { stopService(new Intent(this, ChannelService.class)); } mChannelServiceConnection = null; savePrefs(); Log.v(TAG, "...onDestroy"); super.onDestroy(); } private ServiceConnection mChannelServiceConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder serviceBinder) { Log.v(TAG, "mChannelServiceConnection.onServiceConnected..."); mChannelService = (ChannelServiceComm) serviceBinder; // Sets a listener that handles channel events mChannelService.setOnChannelChangedListener(new ChannelChangedListener() { // Occurs when a channel has new info/data @Override public void onChannelChanged(final ChannelInfo newInfo) { Integer index = mIdChannelListIndexMap.get(newInfo.deviceNumber); if(null != index && index.intValue() < mChannelDisplayList.size()) { mChannelDisplayList.set(index.intValue(), getDisplayText(newInfo)); runOnUiThread(new Runnable() { @Override public void run() { mChannelListAdapter.notifyDataSetChanged(); } }); } } // Updates the UI to allow/disallow acquiring new channels @Override public void onAllowAddChannel(boolean addChannelAllowed) { // Enable Add Channel button and Master/Slave toggle if // adding channels is allowed ((Button)findViewById(R.id.button_AddChannel)).setEnabled(addChannelAllowed); ((Button)findViewById(R.id.toggleButton_MasterSlave)).setEnabled(addChannelAllowed); } }); // Initial check when connecting to ChannelService if adding channels is allowed boolean allowAcquireChannel = mChannelService.isAddChannelAllowed(); ((Button)findViewById(R.id.button_AddChannel)).setEnabled(allowAcquireChannel); ((Button)findViewById(R.id.toggleButton_MasterSlave)).setEnabled(allowAcquireChannel); refreshList(); Log.v(TAG, "...mChannelServiceConnection.onServiceConnected"); } @Override public void onServiceDisconnected(ComponentName arg0) { Log.v(TAG, "mChannelServiceConnection.onServiceDisconnected..."); // Clearing and disabling when disconnecting from ChannelService mChannelService = null; ((Button)findViewById(R.id.button_ClearChannels)).setEnabled(false); ((Button)findViewById(R.id.button_AddChannel)).setEnabled(false); ((Button)findViewById(R.id.toggleButton_MasterSlave)).setEnabled(false); Log.v(TAG, "...mChannelServiceConnection.onServiceDisconnected"); } }; // This method is called when 'Add Channel' button is clicked private void addNewChannel(final boolean isMaster) { Log.v(TAG, "addNewChannel..."); if(null != mChannelService) { ChannelInfo newChannelInfo; try { // Telling the ChannelService to add a new channel. This method // in ChannelService contains code required to acquire an ANT // channel from ANT Radio Service. newChannelInfo = mChannelService.addNewChannel(isMaster); } catch (ChannelNotAvailableException e) { // Occurs when a channel is not available. Printing out the // stack trace will show why no channels are available. Toast.makeText(this, "Channel Not Available", Toast.LENGTH_SHORT).show(); return; } if(null != newChannelInfo) { // Adding new channel info to the list addChannelToList(newChannelInfo); mChannelListAdapter.notifyDataSetChanged(); } } Log.v(TAG, "...addNewChannel"); } private void refreshList() { Log.v(TAG, "refreshList..."); if(null != mChannelService) { ArrayList<ChannelInfo> chInfoList = mChannelService.getCurrentChannelInfoForAllChannels(); mChannelDisplayList.clear(); for(ChannelInfo i: chInfoList) { addChannelToList(i); } mChannelListAdapter.notifyDataSetChanged(); } Log.v(TAG, "...refreshList"); } private void addChannelToList(ChannelInfo channelInfo) { Log.v(TAG, "addChannelToList..."); mIdChannelListIndexMap.put(channelInfo.deviceNumber, mChannelDisplayList.size()); mChannelDisplayList.add(getDisplayText(channelInfo)); Log.v(TAG, "...addChannelToList"); } private static String getDisplayText(ChannelInfo channelInfo) { Log.v(TAG, "getDisplayText..."); String displayText = null; if(channelInfo.error) { displayText = String.format("#%-6d !:%s", channelInfo.deviceNumber, channelInfo.getErrorString()); } else { if(channelInfo.isMaster) { displayText = String.format("#%-6d Tx:[%2d]", channelInfo.deviceNumber, channelInfo.broadcastData[0] & 0xFF); } else { displayText = String.format("#%-6d Rx:[%2d]", channelInfo.deviceNumber, channelInfo.broadcastData[0] & 0xFF); } } Log.v(TAG, "...getDisplayText"); return displayText; } private void clearAllChannels() { Log.v(TAG, "clearAllChannels..."); if(null != mChannelService) { // Telling ChannelService to close all the channels mChannelService.clearAllChannels(); mChannelDisplayList.clear(); mIdChannelListIndexMap.clear(); mChannelListAdapter.notifyDataSetChanged(); } Log.v(TAG, "...clearAllChannels"); } }
/*_############################################################################ _## _## SNMP4J-Agent - SnmpTargetMIB.java _## _## Copyright (C) 2005-2009 Frank Fock (SNMP4J.org) _## _## Licensed under the Apache License, Version 2.0 (the "License"); _## you may not use this file except in compliance with the License. _## You may obtain a copy of the License at _## _## http://www.apache.org/licenses/LICENSE-2.0 _## _## Unless required by applicable law or agreed to in writing, software _## distributed under the License is distributed on an "AS IS" BASIS, _## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. _## See the License for the specific language governing permissions and _## limitations under the License. _## _##########################################################################*/ package org.snmp4j.agent.mo.snmp; //--AgentGen BEGIN=_BEGIN //--AgentGen END import java.util.*; import org.snmp4j.*; import org.snmp4j.agent.*; import org.snmp4j.agent.mo.*; import org.snmp4j.agent.request.*; import org.snmp4j.event.*; import org.snmp4j.log.*; import org.snmp4j.mp.*; import org.snmp4j.security.*; import org.snmp4j.smi.*; //--AgentGen BEGIN=_IMPORT //--AgentGen END public class SnmpTargetMIB implements MOGroup, CounterListener, MOTableRowListener { private static final LogAdapter logger = LogFactory.getLogger(SnmpTargetMIB.class); // Constants private static final OID oidSnmpTargetSpinLock = new OID(new int[] {1, 3, 6, 1, 6, 3, 12, 1, 1, 0}); private MOScalar snmpTargetSpinLock; private static final OID oidSnmpUnknownContexts = new OID(new int[] {1, 3, 6, 1, 6, 3, 12, 1, 5, 0}); private MOScalar snmpUnknownContexts; private static final OID oidSnmpUnavailableContexts = new OID(new int[] {1, 3, 6, 1, 6, 3, 12, 1, 4, 0}); private MOScalar snmpUnavailableContexts; private static final OID oidSnmpTargetParamsEntry = new OID(new int[] {1, 3, 6, 1, 6, 3, 12, 1, 3, 1}); // Column sub-identifer defintions for snmpTargetParamsEntry: private static final int colSnmpTargetParamsMPModel = 2; private static final int colSnmpTargetParamsSecurityModel = 3; private static final int colSnmpTargetParamsSecurityName = 4; private static final int colSnmpTargetParamsSecurityLevel = 5; private static final int colSnmpTargetParamsStorageType = 6; private static final int colSnmpTargetParamsRowStatus = 7; // Column index defintions for snmpTargetParamsEntry: static final int idxSnmpTargetParamsMPModel = 0; static final int idxSnmpTargetParamsSecurityModel = 1; static final int idxSnmpTargetParamsSecurityName = 2; static final int idxSnmpTargetParamsSecurityLevel = 3; static final int idxSnmpTargetParamsStorageType = 4; static final int idxSnmpTargetParamsRowStatus = 5; private static MOTableSubIndex[] snmpTargetParamsEntryIndexes = new MOTableSubIndex[] { new MOTableSubIndex(SMIConstants.SYNTAX_OCTET_STRING, 1, 32) }; private static MOTableIndex snmpTargetParamsEntryIndex = new MOTableIndex(snmpTargetParamsEntryIndexes, true); private DefaultMOTable snmpTargetParamsEntry; private DefaultMOMutableTableModel snmpTargetParamsEntryModel; private static final OID oidSnmpTargetAddrEntry = new OID(new int[] {1, 3, 6, 1, 6, 3, 12, 1, 2, 1}); // Column sub-identifer defintions for snmpTargetAddrEntry: private static final int colSnmpTargetAddrTDomain = 2; private static final int colSnmpTargetAddrTAddress = 3; private static final int colSnmpTargetAddrTimeout = 4; private static final int colSnmpTargetAddrRetryCount = 5; private static final int colSnmpTargetAddrTagList = 6; private static final int colSnmpTargetAddrParams = 7; private static final int colSnmpTargetAddrStorageType = 8; private static final int colSnmpTargetAddrRowStatus = 9; // Column index defintions for snmpTargetAddrEntry: static final int idxSnmpTargetAddrTDomain = 0; static final int idxSnmpTargetAddrTAddress = 1; static final int idxSnmpTargetAddrTimeout = 2; static final int idxSnmpTargetAddrRetryCount = 3; static final int idxSnmpTargetAddrTagList = 4; static final int idxSnmpTargetAddrParams = 5; static final int idxSnmpTargetAddrStorageType = 6; static final int idxSnmpTargetAddrRowStatus = 7; private static MOTableSubIndex[] snmpTargetAddrEntryIndexes = new MOTableSubIndex[] { new MOTableSubIndex(SMIConstants.SYNTAX_OCTET_STRING, 1, 32) }; private static MOTableIndex snmpTargetAddrEntryIndex = new MOTableIndex(snmpTargetAddrEntryIndexes, true); private static final OID[] DEFAULT_TDOMAINS = { TransportDomains.snmpUDPDomain, TransportDomains.transportDomainTcpIpv4, TransportDomains.transportDomainTcpIpv6, TransportDomains.transportDomainUdpIpv4, TransportDomains.transportDomainUdpIpv6 }; private DefaultMOTable snmpTargetAddrEntry; private DefaultMOMutableTableModel snmpTargetAddrEntryModel; private Hashtable snmpTargetAddrTagIndex = new Hashtable(); // maps TDomain OIDs to TDomainAddressFactory instances private Hashtable supportedAddressClasses = new Hashtable(); private MessageDispatcher messageDispatcher; private CoexistenceInfoProvider coexistenceProvider; public SnmpTargetMIB(MessageDispatcher messageDispatcher) { this.messageDispatcher = messageDispatcher; snmpTargetSpinLock = new TestAndIncr(oidSnmpTargetSpinLock); snmpUnknownContexts = new MOScalar(oidSnmpUnknownContexts, MOAccessImpl.ACCESS_READ_ONLY, new Counter32()); snmpUnavailableContexts = new MOScalar(oidSnmpUnavailableContexts, MOAccessImpl.ACCESS_READ_ONLY, new Counter32()); createSnmpTargetParamsEntry(); createSnmpTargetAddrEntry(); } public Collection getTargetAddrRowsForTag(OctetString tag) { Collection l = (Collection)snmpTargetAddrTagIndex.get(tag); if (l == null) { return Collections.EMPTY_SET; } else { synchronized (l) { l = new ArrayList(l); } } return l; } /** * Returns the local SNMPv3 engine ID. * @return * the SNMP3v local engine ID, if the {@link MPv3} is available or * <code>null</code> otherwise. * @since 1.2 */ public byte[] getLocalEngineID() { MPv3 mpv3 = (MPv3) messageDispatcher.getMessageProcessingModel(MessageProcessingModel.MPv3); if (mpv3 == null) { return null; } return mpv3.getLocalEngineID(); } public Address getTargetAddress(OctetString name) { OID index = name.toSubIndex(true); SnmpTargetAddrEntryRow trow = (SnmpTargetAddrEntryRow) this.snmpTargetAddrEntryModel.getRow(index); if (trow != null) { return trow.getAddress(); } return null; } public Target getTarget(OctetString name, OctetString contextEngineID, OctetString contextName) { OID index = name.toSubIndex(true); SnmpTargetAddrEntryRow trow = (SnmpTargetAddrEntryRow) this.snmpTargetAddrEntryModel.getRow(index); if (trow != null) { return trow.getTarget(contextEngineID, contextName); } return null; } public void addDefaultTDomains() { TDomainAddressFactoryImpl factory = new TDomainAddressFactoryImpl(); for (int i=0; i<DEFAULT_TDOMAINS.length; i++) { supportedAddressClasses.put(DEFAULT_TDOMAINS[i], factory); } } public void addSupportedTDomain(OID transportDomain, TDomainAddressFactory factory) { supportedAddressClasses.put(transportDomain, factory); } public boolean addTargetAddress(OctetString name, OID transportDomain, OctetString address, int timeout, int retries, OctetString tagList, OctetString params, int storageType) { Variable[] vbs = new Variable[snmpTargetAddrEntry.getColumnCount()]; int n=0; vbs[n++] = transportDomain; vbs[n++] = address; vbs[n++] = new Integer32(timeout); vbs[n++] = new Integer32(retries); vbs[n++] = tagList; vbs[n++] = params; vbs[n++] = new Integer32(storageType); vbs[n++] = new Integer32(RowStatus.active); OID index = name.toSubIndex(true); MOTableRow row = snmpTargetAddrEntry.createRow(index, vbs); snmpTargetAddrEntry.addRow(row); return true; } public MOTableRow removeTargetAddress(OctetString name) { OID index = name.toSubIndex(true); MOTableRow removedRow = snmpTargetAddrEntry.removeRow(index); if (removedRow != null) { removeRowFromTargetAddrTagIndex(removedRow); } return removedRow; } protected void removeRowFromTargetAddrTagIndex(MOTableRow removedRow) { OctetString tagList = (OctetString) removedRow.getValue(idxSnmpTargetAddrTagList); Set tags = SnmpTagList.getTags(tagList); if ((tags != null) && (this.snmpTargetAddrTagIndex != null)) { for (Iterator it = tags.iterator(); it.hasNext(); ) { Object item = it.next(); Collection indexRows = (Collection) this.snmpTargetAddrTagIndex.get(item); if (indexRows != null) { synchronized (indexRows) { indexRows.remove(removedRow); if (indexRows.isEmpty()) { this.snmpTargetAddrTagIndex.remove(item); } } } } } } public boolean addTargetParams(OctetString name, int mpModel, int secModel, OctetString secName, int secLevel, int storageType) { Variable[] vbs = new Variable[snmpTargetParamsEntry.getColumnCount()]; int n=0; vbs[n++] = new Integer32(mpModel); vbs[n++] = new Integer32(secModel); vbs[n++] = secName; vbs[n++] = new Integer32(secLevel); vbs[n++] = new Integer32(storageType); vbs[n++] = new Integer32(RowStatus.active); OID index = name.toSubIndex(true); MOTableRow row = snmpTargetParamsEntry.createRow(index, vbs); snmpTargetParamsEntry.addRow(row); return true; } public MOTableRow removeTargetParams(OctetString name) { OID index = name.toSubIndex(true); return snmpTargetParamsEntry.removeRow(index); } private void createSnmpTargetParamsEntry() { MOColumn[] snmpTargetParamsEntryColumns = new MOColumn[6]; snmpTargetParamsEntryColumns[idxSnmpTargetParamsMPModel] = new MOMutableColumn(colSnmpTargetParamsMPModel, SMIConstants.SYNTAX_INTEGER, MOAccessImpl.ACCESS_READ_CREATE, null, false); ((MOMutableColumn) snmpTargetParamsEntryColumns[idxSnmpTargetParamsMPModel]). addMOValueValidationListener(new SnmpTargetParamsMPModelValidator()); snmpTargetParamsEntryColumns[idxSnmpTargetParamsSecurityModel] = new MOMutableColumn(colSnmpTargetParamsSecurityModel, SMIConstants.SYNTAX_INTEGER, MOAccessImpl.ACCESS_READ_CREATE, null, false); ((MOMutableColumn) snmpTargetParamsEntryColumns[ idxSnmpTargetParamsSecurityModel]). addMOValueValidationListener(new SnmpTargetParamsSecurityModelValidator()); snmpTargetParamsEntryColumns[idxSnmpTargetParamsSecurityName] = new SnmpAdminString(colSnmpTargetParamsSecurityName, MOAccessImpl.ACCESS_READ_CREATE, null, false); snmpTargetParamsEntryColumns[idxSnmpTargetParamsSecurityLevel] = new Enumerated(colSnmpTargetParamsSecurityLevel, MOAccessImpl.ACCESS_READ_CREATE, (Integer32)null, false, new int[] { SnmpTargetParamsSecurityLevelEnum.noAuthNoPriv, SnmpTargetParamsSecurityLevelEnum.authPriv, SnmpTargetParamsSecurityLevelEnum.authNoPriv}); snmpTargetParamsEntryColumns[idxSnmpTargetParamsStorageType] = new StorageType(colSnmpTargetParamsStorageType, MOAccessImpl.ACCESS_READ_CREATE, new Integer32(3), true); snmpTargetParamsEntryColumns[idxSnmpTargetParamsRowStatus] = new RowStatus(colSnmpTargetParamsRowStatus, MOAccessImpl.ACCESS_READ_CREATE); ((RowStatus)snmpTargetParamsEntryColumns[idxSnmpTargetParamsRowStatus]). addRowStatusListener(new SnmpTargetParamsEntryRowStatusListener()); snmpTargetParamsEntry = new DefaultMOTable(oidSnmpTargetParamsEntry, snmpTargetParamsEntryIndex, snmpTargetParamsEntryColumns); snmpTargetParamsEntryModel = new DefaultMOMutableTableModel(); snmpTargetParamsEntryModel.setRowFactory(new DefaultMOMutableRow2PCFactory()); snmpTargetParamsEntry.setModel(snmpTargetParamsEntryModel); } private void createSnmpTargetAddrEntry() { MOColumn[] snmpTargetAddrEntryColumns = new MOColumn[8]; snmpTargetAddrEntryColumns[idxSnmpTargetAddrTDomain] = new MOMutableColumn(colSnmpTargetAddrTDomain, SMIConstants.SYNTAX_OBJECT_IDENTIFIER, MOAccessImpl.ACCESS_READ_CREATE, null, false); ((MOMutableColumn) snmpTargetAddrEntryColumns[idxSnmpTargetAddrTDomain]). addMOValueValidationListener(new SnmpTargetAddrTDomainValidator()); snmpTargetAddrEntryColumns[idxSnmpTargetAddrTAddress] = new MOMutableColumn(colSnmpTargetAddrTAddress, SMIConstants.SYNTAX_OCTET_STRING, MOAccessImpl.ACCESS_READ_CREATE, null, false); ((MOMutableColumn) snmpTargetAddrEntryColumns[idxSnmpTargetAddrTAddress]). addMOValueValidationListener(new SnmpTargetAddrTAddressValidator()); snmpTargetAddrEntryColumns[idxSnmpTargetAddrTimeout] = new MOMutableColumn(colSnmpTargetAddrTimeout, SMIConstants.SYNTAX_INTEGER, MOAccessImpl.ACCESS_READ_CREATE, new Integer32(1500), true); ((MOMutableColumn) snmpTargetAddrEntryColumns[idxSnmpTargetAddrTimeout]). addMOValueValidationListener(new SnmpTargetAddrTimeoutValidator()); snmpTargetAddrEntryColumns[idxSnmpTargetAddrRetryCount] = new MOMutableColumn(colSnmpTargetAddrRetryCount, SMIConstants.SYNTAX_INTEGER32, MOAccessImpl.ACCESS_READ_CREATE, new Integer32(3), true); ((MOMutableColumn) snmpTargetAddrEntryColumns[idxSnmpTargetAddrRetryCount]). addMOValueValidationListener(new SnmpTargetAddrRetryCountValidator()); snmpTargetAddrEntryColumns[idxSnmpTargetAddrTagList] = new SnmpTagList(colSnmpTargetAddrTagList, MOAccessImpl.ACCESS_READ_CREATE, new OctetString(new byte[] {}), true); snmpTargetAddrEntryColumns[idxSnmpTargetAddrParams] = new MOMutableColumn(colSnmpTargetAddrParams, SMIConstants.SYNTAX_OCTET_STRING, MOAccessImpl.ACCESS_READ_CREATE, new OctetString(), true); ((MOMutableColumn) snmpTargetAddrEntryColumns[idxSnmpTargetAddrParams]). addMOValueValidationListener(new SnmpTargetAddrParamsValidator()); snmpTargetAddrEntryColumns[idxSnmpTargetAddrStorageType] = new StorageType(colSnmpTargetAddrStorageType, MOAccessImpl.ACCESS_READ_CREATE, new Integer32(3), true); snmpTargetAddrEntryColumns[idxSnmpTargetAddrRowStatus] = new RowStatus(colSnmpTargetAddrRowStatus); snmpTargetAddrEntry = new DefaultMOTable(oidSnmpTargetAddrEntry, snmpTargetAddrEntryIndex, snmpTargetAddrEntryColumns); snmpTargetAddrEntryModel = new DefaultMOMutableTableModel(); snmpTargetAddrEntryModel.setRowFactory(new SnmpTargetAddrEntryFactory()); snmpTargetAddrEntry.setModel(snmpTargetAddrEntryModel); } public void registerMOs(MOServer server, OctetString context) throws DuplicateRegistrationException { // Scalar Objects server.register(this.snmpTargetSpinLock, context); server.register(this.snmpUnknownContexts, context); server.register(this.snmpUnavailableContexts, context); server.register(this.snmpTargetParamsEntry, context); server.register(this.snmpTargetAddrEntry, context); } public void unregisterMOs(MOServer server, OctetString context) { // Scalar Objects server.unregister(this.snmpTargetSpinLock, context); server.unregister(this.snmpUnknownContexts, context); server.unregister(this.snmpUnavailableContexts, context); server.unregister(this.snmpTargetParamsEntry, context); server.unregister(this.snmpTargetAddrEntry, context); } class SnmpTargetAddrEntryFactory implements MOTableRowFactory { public MOTableRow createRow(OID index, Variable[] values) throws UnsupportedOperationException { SnmpTargetAddrEntryRow row = new SnmpTargetAddrEntryRow(index, values); return row; } public void freeRow(MOTableRow row) { } } class SnmpTargetParamsEntryRowStatusListener implements RowStatusListener { public void rowStatusChanged(RowStatusEvent event) { if (event.getNewStatus() == RowStatus.destroy) { OID index = event.getRow().getIndex(); OctetString paramsIndex = (OctetString) snmpTargetParamsEntryIndex.getIndexValues(index)[0]; synchronized (snmpTargetAddrEntryModel) { for (Iterator it = snmpTargetAddrEntryModel.iterator(); it.hasNext(); ) { MOTableRow r = (MOTableRow) it.next(); Integer32 rowStatus = (Integer32) r.getValue(idxSnmpTargetAddrRowStatus); if ((rowStatus == null) || (rowStatus.getValue() != RowStatus.active)) { continue; } if (paramsIndex.equals(r.getValue(idxSnmpTargetAddrParams))) { event.setDenyReason(SnmpConstants.SNMP_ERROR_INCONSISTENT_VALUE); return; } } } } } } public class SnmpTargetAddrEntryRow extends DefaultMOMutableRow2PC { public SnmpTargetAddrEntryRow(OID index, Variable[] values) { super(index, values); updateUserObject(this); } private void updateUserObject(MOTableRow changeSet) { Variable tagList = changeSet.getValue(idxSnmpTargetAddrTagList); if (tagList != null) { Set obsolete = (Set) getUserObject(); Set tags = SnmpTagList.getTags((OctetString)tagList); if (obsolete != null) { obsolete.removeAll(tags); } setUserObject(tags); updateIndex(obsolete, tags); } } public void commitRow(SubRequest subRequest, MOTableRow changeSet) { super.commitRow(subRequest, changeSet); updateUserObject(changeSet); } private void updateIndex(Set remove, Set tags) { if (remove != null) { for (Iterator it = remove.iterator(); it.hasNext(); ) { Object next = it.next(); Collection list = (Collection) snmpTargetAddrTagIndex.get(next); if (list != null) { synchronized (list) { if (!list.remove(this)) { logger.error("Inconsistent tag value '" + next + "' for rows: " + list); } } } else { logger.error("Tag value '" + next + "' not found in tag index"); } } } for (Iterator it = tags.iterator(); it.hasNext(); ) { Object next = it.next(); Set list = (Set) snmpTargetAddrTagIndex.get(next); if (list == null) { list = new HashSet(); } synchronized (list) { if (!list.add(this)) { // make sure this version of the row is part of the index list.remove(this); list.add(this); } } snmpTargetAddrTagIndex.put(next, list); } } public void prepareRow(SubRequest subRequest, MOTableRow changeSet) { OID tdomain = (OID) getResultingValue(idxSnmpTargetAddrTDomain, changeSet); OctetString taddress = (OctetString) getResultingValue(idxSnmpTargetAddrTAddress, changeSet); if (tdomain != null) { TDomainAddressFactory factory = (TDomainAddressFactory) supportedAddressClasses.get(tdomain); if ((factory == null) || (!factory.isValidAddress(tdomain, taddress))) { subRequest.getStatus(). setErrorStatus(SnmpConstants.SNMP_ERROR_INCONSISTENT_VALUE); } } else if (taddress != null) { subRequest.getStatus(). setErrorStatus(SnmpConstants.SNMP_ERROR_INCONSISTENT_VALUE); } } public Address getAddress() { OID tdomain = (OID) getValue(idxSnmpTargetAddrTDomain); TDomainAddressFactory factory = (TDomainAddressFactory) supportedAddressClasses.get(tdomain); if (factory != null) { OctetString addr = (OctetString) getValue(idxSnmpTargetAddrTAddress); return factory.createAddress(tdomain, addr); } return null; } public OctetString getTAddress(Address address) { OID tdomain = (OID) getValue(idxSnmpTargetAddrTDomain); TDomainAddressFactory factory = (TDomainAddressFactory) supportedAddressClasses.get(tdomain); OID domain = factory.getTransportDomain(address); if (!tdomain.equals(domain)) { return null; } return factory.getAddress(address); } public Target getTarget(OctetString contextEngineID, OctetString contextName) { Address addr = getAddress(); OctetString addrParams = (OctetString) getValue(idxSnmpTargetAddrParams); OID paramsIndex = addrParams.toSubIndex(true); MOTableRow paramsRow = snmpTargetParamsEntryModel.getRow(paramsIndex); if (paramsRow == null) { return null; } Target t; if (paramsRow.getValue(idxSnmpTargetParamsMPModel).toInt() == MPv3.ID) { t = new UserTarget(addr, (OctetString) paramsRow.getValue(idxSnmpTargetParamsSecurityName), contextEngineID.getValue(), paramsRow.getValue(idxSnmpTargetParamsSecurityLevel). toInt()); } else { OctetString community = (OctetString) paramsRow.getValue(idxSnmpTargetParamsSecurityName); if (coexistenceProvider != null) { community = coexistenceProvider.getCommunity(community, contextEngineID, contextName); if (community == null) { return null; } } t = new CommunityTarget(addr, community); } t.setRetries(getValue(idxSnmpTargetAddrRetryCount).toInt()); t.setTimeout(getValue(idxSnmpTargetAddrTimeout).toInt()*10); t.setVersion(paramsRow.getValue(idxSnmpTargetParamsMPModel).toInt()); return t; } } // Value Validators /** * The <code>SnmpTargetParamsMPModelValidator</code> implements the value * validation for <code>SnmpTargetParamsMPModel</code>. */ class SnmpTargetParamsMPModelValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { Variable newValue = validationEvent.getNewValue(); int v = ((Integer32) newValue).getValue(); if (messageDispatcher.getMessageProcessingModel(v) == null) { validationEvent.setValidationStatus(SnmpConstants. SNMP_ERROR_WRONG_VALUE); return; } //--AgentGen BEGIN=snmpTargetParamsMPModel::validate //--AgentGen END } } /** * The <code>SnmpTargetParamsSecurityModelValidator</code> implements the value validation * for <code>SnmpTargetParamsSecurityModel</code>. */ static class SnmpTargetParamsSecurityModelValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { //--AgentGen BEGIN=snmpTargetParamsSecurityModel::validate Variable newValue = validationEvent.getNewValue(); switch (((Integer32)newValue).getValue()) { case SecurityModel.SECURITY_MODEL_USM: { if (SecurityModels.getInstance(). getSecurityModel((Integer32) newValue) == null) { validationEvent.setValidationStatus(SnmpConstants. SNMP_ERROR_WRONG_VALUE); return; } break; } default: break; } //--AgentGen END } } /** * The <code>SnmpTargetAddrTDomainValidator</code> implements the value validation * for <code>SnmpTargetAddrTDomain</code>. */ class SnmpTargetAddrTDomainValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { Variable newValue = validationEvent.getNewValue(); //--AgentGen BEGIN=snmpTargetAddrTDomain::validate if (newValue instanceof OID) { OID tdomain = (OID)newValue; if (!supportedAddressClasses.containsKey(tdomain)) { validationEvent.setValidationStatus(SnmpConstants.SNMP_ERROR_BAD_VALUE); } } else { validationEvent.setValidationStatus(SnmpConstants.SNMP_ERROR_WRONG_TYPE); } //--AgentGen END } } /** * The <code>SnmpTargetAddrTAddressValidator</code> implements the value * validation for <code>SnmpTargetAddrTAddress</code>. */ static class SnmpTargetAddrTAddressValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { Variable newValue = validationEvent.getNewValue(); OctetString os = (OctetString) newValue; if (!(((os.length() >= 1) && (os.length() <= 255)))) { validationEvent.setValidationStatus(SnmpConstants. SNMP_ERROR_WRONG_LENGTH); return; } //--AgentGen BEGIN=snmpTargetAddrTAddress::validate //--AgentGen END } } /** * The <code>SnmpTargetAddrTimeoutValidator</code> implements the value * validation for <code>SnmpTargetAddrTimeout</code>. */ static class SnmpTargetAddrTimeoutValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { Variable newValue = validationEvent.getNewValue(); long v = ((Integer32) newValue).getValue(); if (!(((v >= 0L) /*&& (v <= 2147483647L)*/))) { validationEvent.setValidationStatus(SnmpConstants. SNMP_ERROR_WRONG_VALUE); return; } //--AgentGen BEGIN=snmpTargetAddrTimeout::validate //--AgentGen END } } /** * The <code>SnmpTargetAddrRetryCountValidator</code> implements the value validation * for <code>SnmpTargetAddrRetryCount</code>. */ static class SnmpTargetAddrRetryCountValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { Variable newValue = validationEvent.getNewValue(); long v = ((Integer32) newValue).getValue(); if (!(((v >= 0L) && (v <= 255L)))) { validationEvent.setValidationStatus(SnmpConstants. SNMP_ERROR_WRONG_VALUE); return; } //--AgentGen BEGIN=snmpTargetAddrRetryCount::validate //--AgentGen END } } /** * The <code>SnmpTargetAddrParamsValidator</code> implements the value validation * for <code>SnmpTargetAddrParams</code>. */ class SnmpTargetAddrParamsValidator implements MOValueValidationListener { public void validate(MOValueValidationEvent validationEvent) { Variable newValue = validationEvent.getNewValue(); OctetString os = (OctetString) newValue; if (!(((os.length() >= 1) && (os.length() <= 32)))) { validationEvent.setValidationStatus(SnmpConstants. SNMP_ERROR_WRONG_LENGTH); return; } //--AgentGen BEGIN=snmpTargetAddrParams::validate OID paramsIndexOID = os.toSubIndex(true); MOTableRow paramsRow = snmpTargetParamsEntryModel.getRow(paramsIndexOID); if (paramsRow == null) { validationEvent. setValidationStatus(SnmpConstants.SNMP_ERROR_INCONSISTENT_VALUE); } //--AgentGen END } } public void incrementCounter(CounterEvent event) { if (event.getOid().equals(snmpUnavailableContexts.getOid())) { ((Counter32) snmpUnavailableContexts.getValue()).increment(); event.setCurrentValue(snmpUnavailableContexts.getValue()); } else if (event.getOid().equals(snmpUnknownContexts.getOid())) { ((Counter32) snmpUnknownContexts.getValue()).increment(); event.setCurrentValue(snmpUnknownContexts.getValue()); } } // Enumerations public static final class SnmpTargetParamsSecurityLevelEnum { public static final int noAuthNoPriv = 1; public static final int authNoPriv = 2; public static final int authPriv = 3; } //--AgentGen BEGIN=_CLASSES public void rowChanged(MOTableRowEvent event) { if ((event.getType() == MOTableRowEvent.DELETE) && (event.getRow() instanceof SnmpTargetAddrEntryRow)) { Variable[] vbs = new Variable[event.getRow().size()]; vbs[idxSnmpTargetAddrTagList] = new OctetString(); MOTableRow dummyRow = new DefaultMOTableRow(event.getRow().getIndex(), vbs); ((SnmpTargetAddrEntryRow)event.getRow()).updateUserObject(dummyRow); } } /** * Returns the SNMP Target Parameters row for the specified name. * @param paramsName * the name of the parameters set to return. * @return * if the row containing the target parameters if such an entry * exists or <code>null</code> if no such entry exists */ public MOTableRow getTargetParamsRow(OctetString paramsName) { if (paramsName == null) { return null; } OID paramsIndex = paramsName.toSubIndex(true); return snmpTargetParamsEntryModel.getRow(paramsIndex); } /** * Returns the SNMP Target Parameters row for the specified name. * @param paramsName * the name of the parameters set to return. * @param activeOnly * if <code>true</code> only an active row will be returned. * @return * if the row containing the target parameters if such an entry * exists or <code>null</code> if no such entry exists */ public MOTableRow getTargetParamsRow(OctetString paramsName, boolean activeOnly) { MOTableRow row = getTargetParamsRow(paramsName); if (activeOnly && (row != null)) { if (((Integer32)row.getValue(idxSnmpTargetParamsRowStatus)).getValue() != RowStatus.active) { return null; } } return row; } public DefaultMOTable getSnmpTargetAddrEntry() { return snmpTargetAddrEntry; } public CoexistenceInfoProvider getCoexistenceProvider() { return coexistenceProvider; } public void setCoexistenceProvider(CoexistenceInfoProvider coexistenceProvider) { this.coexistenceProvider = coexistenceProvider; } //--AgentGen END //--AgentGen BEGIN=_END //--AgentGen END }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.util.scopeChooser; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.ide.favoritesTreeView.FavoritesManager; import com.intellij.ide.projectView.impl.AbstractUrl; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.packageDependencies.ChangeListsScopesProvider; import com.intellij.packageDependencies.DependencyValidationManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiWhiteSpace; import com.intellij.psi.search.*; import com.intellij.psi.search.scope.packageSet.NamedScope; import com.intellij.psi.search.scope.packageSet.NamedScopeManager; import com.intellij.psi.search.scope.packageSet.NamedScopesHolder; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.ComboboxWithBrowseButton; import com.intellij.ui.ListCellRendererWrapper; import com.intellij.usages.Usage; import com.intellij.usages.UsageView; import com.intellij.usages.UsageViewManager; import com.intellij.usages.rules.PsiElementUsage; import com.intellij.util.TreeItem; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.*; import java.util.List; public class ScopeChooserCombo extends ComboboxWithBrowseButton implements Disposable { private Project myProject; private boolean mySuggestSearchInLibs; private boolean myPrevSearchFiles; private NamedScopesHolder.ScopeListener myScopeListener; private NamedScopeManager myNamedScopeManager; private DependencyValidationManager myValidationManager; public ScopeChooserCombo() { super(new IgnoringComboBox(){ @Override protected boolean isIgnored(Object item) { return item instanceof ScopeSeparator; } }); } public ScopeChooserCombo(final Project project, boolean suggestSearchInLibs, boolean prevSearchWholeFiles, String preselect) { this(); init(project, suggestSearchInLibs, prevSearchWholeFiles, preselect); } public void init(final Project project, final String preselect){ init(project, false, true, preselect); } public void init(final Project project, final boolean suggestSearchInLibs, final boolean prevSearchWholeFiles, final String preselect) { mySuggestSearchInLibs = suggestSearchInLibs; myPrevSearchFiles = prevSearchWholeFiles; myProject = project; myScopeListener = new NamedScopesHolder.ScopeListener() { @Override public void scopesChanged() { final SearchScope selectedScope = getSelectedScope(); rebuildModel(); if (selectedScope != null) { selectScope(selectedScope.getDisplayName()); } } }; myNamedScopeManager = NamedScopeManager.getInstance(project); myNamedScopeManager.addScopeListener(myScopeListener); myValidationManager = DependencyValidationManager.getInstance(project); myValidationManager.addScopeListener(myScopeListener); addActionListener(createScopeChooserListener()); final JComboBox combo = getComboBox(); combo.setRenderer(new ScopeDescriptionWithDelimiterRenderer(combo.getRenderer())); rebuildModel(); selectScope(preselect); } @Override public void dispose() { super.dispose(); if (myValidationManager != null) { myValidationManager.removeScopeListener(myScopeListener); myValidationManager = null; } if (myNamedScopeManager != null) { myNamedScopeManager.removeScopeListener(myScopeListener); myNamedScopeManager = null; } myScopeListener = null; } private void selectScope(String preselect) { if (preselect != null) { final JComboBox combo = getComboBox(); DefaultComboBoxModel model = (DefaultComboBoxModel)combo.getModel(); for (int i = 0; i < model.getSize(); i++) { ScopeDescriptor descriptor = (ScopeDescriptor)model.getElementAt(i); if (preselect.equals(descriptor.getDisplay())) { combo.setSelectedIndex(i); break; } } } } private ActionListener createScopeChooserListener() { return new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final String selection = getSelectedScopeName(); final EditScopesDialog dlg = EditScopesDialog.showDialog(myProject, selection); if (dlg.isOK()){ rebuildModel(); final NamedScope namedScope = dlg.getSelectedScope(); if (namedScope != null) { selectScope(namedScope.getName()); } } } }; } private void rebuildModel() { getComboBox().setModel(createModel()); } private DefaultComboBoxModel createModel() { final DefaultComboBoxModel model = new DefaultComboBoxModel(); createPredefinedScopeDescriptors(model); model.addElement(new ScopeSeparator("VCS Scopes")); final List<NamedScope> changeLists = ChangeListsScopesProvider.getInstance(myProject).getCustomScopes(); for (NamedScope changeListScope : changeLists) { final GlobalSearchScope scope = GlobalSearchScopes.filterScope(myProject, changeListScope); model.addElement(new ScopeDescriptor(scope)); } final List<ScopeDescriptor> customScopes = new ArrayList<ScopeDescriptor>(); final NamedScopesHolder[] holders = NamedScopesHolder.getAllNamedScopeHolders(myProject); for (NamedScopesHolder holder : holders) { final NamedScope[] scopes = holder.getEditableScopes(); // predefined scopes already included for (NamedScope scope : scopes) { final GlobalSearchScope searchScope = GlobalSearchScopes.filterScope(myProject, scope); customScopes.add(new ScopeDescriptor(searchScope)); } } if (!customScopes.isEmpty()) { model.addElement(new ScopeSeparator("Custom Scopes")); for (ScopeDescriptor scope : customScopes) { model.addElement(scope); } } return model; } @Override public Dimension getPreferredSize() { if (isPreferredSizeSet()) { return super.getPreferredSize(); } Dimension preferredSize = super.getPreferredSize(); return new Dimension(Math.min(400, preferredSize.width), preferredSize.height); } @Override public Dimension getMinimumSize() { if (isMinimumSizeSet()) { return super.getMinimumSize(); } Dimension minimumSize = super.getMinimumSize(); return new Dimension(Math.min(200, minimumSize.width), minimumSize.height); } private void createPredefinedScopeDescriptors(DefaultComboBoxModel model) { @SuppressWarnings("deprecation") final DataContext context = DataManager.getInstance().getDataContext(); for (SearchScope scope : getPredefinedScopes(myProject, context, mySuggestSearchInLibs, myPrevSearchFiles, true, true)) { model.addElement(new ScopeDescriptor(scope)); } for (ScopeDescriptorProvider provider : Extensions.getExtensions(ScopeDescriptorProvider.EP_NAME)) { for (ScopeDescriptor scopeDescriptor : provider.getScopeDescriptors(myProject)) { model.addElement(scopeDescriptor); } } } public static List<SearchScope> getPredefinedScopes(@NotNull final Project project, @Nullable final DataContext dataContext, boolean suggestSearchInLibs, boolean prevSearchFiles, boolean currentSelection, boolean usageView) { ArrayList<SearchScope> result = new ArrayList<SearchScope>(); result.add(GlobalSearchScope.projectScope(project)); if (suggestSearchInLibs) { result.add(GlobalSearchScope.allScope(project)); } result.add(GlobalSearchScopes.projectProductionScope(project)); result.add(GlobalSearchScopes.projectTestScope(project)); result.add(GlobalSearchScopes.openFilesScope(project)); if (dataContext != null) { PsiElement dataContextElement = LangDataKeys.PSI_FILE.getData(dataContext); if (dataContextElement == null) { dataContextElement = LangDataKeys.PSI_ELEMENT.getData(dataContext); } if (dataContextElement != null) { Module module = ModuleUtilCore.findModuleForPsiElement(dataContextElement); if (module == null) { module = LangDataKeys.MODULE.getData(dataContext); } if (module != null) { result.add(module.getModuleScope()); } if (dataContextElement.getContainingFile() != null) { result.add(new LocalSearchScope(dataContextElement, IdeBundle.message("scope.current.file"))); } } } if (currentSelection) { FileEditorManager fileEditorManager = FileEditorManager.getInstance(project); final Editor selectedTextEditor = fileEditorManager.getSelectedTextEditor(); if (selectedTextEditor != null) { final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(selectedTextEditor.getDocument()); if (psiFile != null) { if (selectedTextEditor.getSelectionModel().hasSelection()) { final PsiElement startElement = psiFile.findElementAt(selectedTextEditor.getSelectionModel().getSelectionStart()); if (startElement != null) { final PsiElement endElement = psiFile.findElementAt(selectedTextEditor.getSelectionModel().getSelectionEnd()); if (endElement != null) { final PsiElement parent = PsiTreeUtil.findCommonParent(startElement, endElement); if (parent != null) { final List<PsiElement> elements = new ArrayList<PsiElement>(); final PsiElement[] children = parent.getChildren(); for (PsiElement child : children) { if (!(child instanceof PsiWhiteSpace) && child.getContainingFile() != null) { elements.add(child); } } if (!elements.isEmpty()) { SearchScope local = new LocalSearchScope(PsiUtilCore.toPsiElementArray(elements), IdeBundle.message("scope.selection")); result.add(local); } } } } } } } } if (usageView) { UsageView selectedUsageView = UsageViewManager.getInstance(project).getSelectedUsageView(); if (selectedUsageView != null && !selectedUsageView.isSearchInProgress()) { final Set<Usage> usages = selectedUsageView.getUsages(); final List<PsiElement> results = new ArrayList<PsiElement>(usages.size()); if (prevSearchFiles) { final Set<VirtualFile> files = new HashSet<VirtualFile>(); for (Usage usage : usages) { if (usage instanceof PsiElementUsage) { PsiElement psiElement = ((PsiElementUsage)usage).getElement(); if (psiElement != null && psiElement.isValid()) { PsiFile psiFile = psiElement.getContainingFile(); if (psiFile != null) { VirtualFile file = psiFile.getVirtualFile(); if (file != null) files.add(file); } } } } if (!files.isEmpty()) { GlobalSearchScope prev = new GlobalSearchScope(project) { @Override public String getDisplayName() { return IdeBundle.message("scope.files.in.previous.search.result"); } @Override public boolean contains(@NotNull VirtualFile file) { return files.contains(file); } @Override public int compare(@NotNull VirtualFile file1, @NotNull VirtualFile file2) { return 0; } @Override public boolean isSearchInModuleContent(@NotNull Module aModule) { return true; } @Override public boolean isSearchInLibraries() { return true; } }; result.add(prev); } } else { for (Usage usage : usages) { if (usage instanceof PsiElementUsage) { final PsiElement element = ((PsiElementUsage)usage).getElement(); if (element != null && element.isValid() && element.getContainingFile() != null) { results.add(element); } } } if (!results.isEmpty()) { result.add(new LocalSearchScope(PsiUtilCore.toPsiElementArray(results), IdeBundle.message("scope.previous.search.results"))); } } } } final FavoritesManager favoritesManager = FavoritesManager.getInstance(project); if (favoritesManager != null) { for (final String favorite : favoritesManager.getAvailableFavoritesListNames()) { final Collection<TreeItem<Pair<AbstractUrl,String>>> rootUrls = favoritesManager.getFavoritesListRootUrls(favorite); if (rootUrls.isEmpty()) continue; // ignore unused root result.add(new GlobalSearchScope(project) { @Override public String getDisplayName() { return "Favorite \'" + favorite + "\'"; } @Override public boolean contains(@NotNull final VirtualFile file) { return favoritesManager.contains(favorite, file); } @Override public int compare(@NotNull final VirtualFile file1, @NotNull final VirtualFile file2) { return 0; } @Override public boolean isSearchInModuleContent(@NotNull final Module aModule) { return true; } @Override public boolean isSearchInLibraries() { return true; } }); } } if (dataContext != null) { final VirtualFile[] files = PlatformDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); if (files != null) { final List<VirtualFile> openFiles = Arrays.asList(files); result.add(new DelegatingGlobalSearchScope(GlobalSearchScope.filesScope(project, openFiles)){ @Override public String getDisplayName() { return "Selected Files"; } }); } } return result; } @Nullable public SearchScope getSelectedScope() { final JComboBox combo = getComboBox(); int idx = combo.getSelectedIndex(); return idx < 0 ? null : ((ScopeDescriptor)combo.getSelectedItem()).getScope(); } @Nullable public String getSelectedScopeName() { final JComboBox combo = getComboBox(); int idx = combo.getSelectedIndex(); return idx < 0 ? null : ((ScopeDescriptor)combo.getSelectedItem()).getDisplay(); } private static class ScopeSeparator extends ScopeDescriptor { private final String myText; public ScopeSeparator(final String text) { super(null); myText = text; } @Override public String getDisplay() { return myText; } } private static class ScopeDescriptionWithDelimiterRenderer extends ListCellRendererWrapper<ScopeDescriptor> { public ScopeDescriptionWithDelimiterRenderer(final ListCellRenderer original) { super(); } @Override public void customize(JList list, ScopeDescriptor value, int index, boolean selected, boolean hasFocus) { setText(value.getDisplay()); if (value instanceof ScopeSeparator) { setSeparator(); } } } }
/** * Copyright (C) 2016 Rizki Mufrizal (https://rizkimufrizal.github.io/) (mufrizalrizki@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rizki.mufrizal.aplikasi.inventory.view; import com.rizki.mufrizal.aplikasi.inventory.controller.PenjualanController; import java.awt.Dimension; import java.awt.Toolkit; /** * * @author Rizki Mufrizal <mufrizalrizki@gmail.com> */ public class PenjualanSimpanView extends javax.swing.JInternalFrame { private final PenjualanController penjualanController = new PenjualanController(this); private final Dimension dimension; private static PenjualanSimpanView penjualanSimpanView; public static PenjualanSimpanView getInstancePenjualanSimpanView() { if (penjualanSimpanView == null) { penjualanSimpanView = new PenjualanSimpanView(); } return penjualanSimpanView; } @SuppressWarnings("OverridableMethodCallInConstructor") public PenjualanSimpanView() { initComponents(); dimension = Toolkit.getDefaultToolkit().getScreenSize(); setLocation((dimension.width / 2) - (getSize().width / 2), (dimension.height / 2) - (getSize().height / 2)); penjualanController.ambilDataBarang(); penjualanController.tampilPenjualanSementara(); } @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jPanel1 = new javax.swing.JPanel(); jScrollPane1 = new javax.swing.JScrollPane(); tabelBarang = new javax.swing.JTable(); value = new javax.swing.JTextField(); refresh = new javax.swing.JButton(); last = new javax.swing.JButton(); next = new javax.swing.JButton(); previous = new javax.swing.JButton(); first = new javax.swing.JButton(); perPage = new javax.swing.JComboBox<>(); labelPaging = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); key = new javax.swing.JComboBox<>(); jLabel1 = new javax.swing.JLabel(); jPanel2 = new javax.swing.JPanel(); jScrollPane2 = new javax.swing.JScrollPane(); tabelPenjualanSementara = new javax.swing.JTable(); transaksi = new javax.swing.JButton(); jLabel4 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); jLabel3 = new javax.swing.JLabel(); tanggalPenjualan = new com.toedter.calendar.JDateChooser(); namaPembeli = new javax.swing.JTextField(); jLabel5 = new javax.swing.JLabel(); setClosable(true); setIconifiable(true); jPanel1.setBorder(javax.swing.BorderFactory.createTitledBorder("")); tabelBarang.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null, null, null}, {null, null, null, null}, {null, null, null, null}, {null, null, null, null} }, new String [] { "Title 1", "Title 2", "Title 3", "Title 4" } )); tabelBarang.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_OFF); tabelBarang.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { tabelBarangMouseClicked(evt); } }); jScrollPane1.setViewportView(tabelBarang); value.addKeyListener(new java.awt.event.KeyAdapter() { public void keyReleased(java.awt.event.KeyEvent evt) { valueKeyReleased(evt); } }); refresh.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/refresh.png"))); // NOI18N refresh.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { refreshActionPerformed(evt); } }); last.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/last.png"))); // NOI18N last.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { lastActionPerformed(evt); } }); next.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/next.png"))); // NOI18N next.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { nextActionPerformed(evt); } }); previous.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/previous.png"))); // NOI18N previous.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { previousActionPerformed(evt); } }); first.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/first.png"))); // NOI18N first.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { firstActionPerformed(evt); } }); perPage.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "10", "50", "100" })); jLabel6.setText("Cari"); key.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "ID Barang", "Nama Barang" })); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane1) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(8, 8, 8) .addComponent(jLabel6) .addGap(18, 18, 18) .addComponent(value, javax.swing.GroupLayout.PREFERRED_SIZE, 251, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(key, javax.swing.GroupLayout.PREFERRED_SIZE, 113, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup() .addComponent(perPage, 0, 57, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(first) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(previous) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(labelPaging, javax.swing.GroupLayout.PREFERRED_SIZE, 141, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(next) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(last) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(refresh))) .addContainerGap()) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(value, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel6) .addComponent(key, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(10, 10, 10) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 164, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(first, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(previous, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(next) .addComponent(last) .addComponent(perPage, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(refresh, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addComponent(labelPaging, javax.swing.GroupLayout.PREFERRED_SIZE, 27, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jLabel1.setFont(new java.awt.Font("DejaVu Sans", 1, 18)); // NOI18N jLabel1.setText("Transaksi Penjualan"); jPanel2.setBorder(javax.swing.BorderFactory.createTitledBorder("")); tabelPenjualanSementara.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null, null, null}, {null, null, null, null}, {null, null, null, null}, {null, null, null, null} }, new String [] { "Title 1", "Title 2", "Title 3", "Title 4" } )); tabelPenjualanSementara.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_OFF); jScrollPane2.setViewportView(tabelPenjualanSementara); transaksi.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/save.png"))); // NOI18N transaksi.setText("Lakukan Transaksi"); transaksi.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); transaksi.setVerticalAlignment(javax.swing.SwingConstants.BOTTOM); transaksi.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); transaksi.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { transaksiMouseClicked(evt); } }); jLabel4.setFont(new java.awt.Font("DejaVu Sans", 1, 18)); // NOI18N jLabel4.setText("Daftar Barang Yang Dibeli"); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addComponent(transaksi) .addContainerGap()) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addComponent(jLabel4) .addGap(95, 95, 95)))) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel4) .addGap(15, 15, 15) .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 162, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(transaksi, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jLabel2.setText("Tanggal Penjualan"); jLabel3.setText("Nama Pembeli"); jLabel5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/penjualan.png"))); // NOI18N javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(16, 16, 16) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(10, 10, 10) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel2) .addComponent(jLabel3)) .addGap(18, 18, 18) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(tanggalPenjualan, javax.swing.GroupLayout.DEFAULT_SIZE, 175, Short.MAX_VALUE) .addComponent(namaPembeli)) .addGap(80, 80, 80) .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 115, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(141, 141, 141)) .addGroup(layout.createSequentialGroup() .addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addGroup(layout.createSequentialGroup() .addComponent(jLabel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGap(18, 18, 18)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(30, 30, 30) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tanggalPenjualan, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel2))) .addGroup(layout.createSequentialGroup() .addGap(9, 9, 9) .addComponent(jLabel1))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(namaPembeli, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(30, 30, 30))) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void refreshActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_refreshActionPerformed penjualanController.refreshBarang(); }//GEN-LAST:event_refreshActionPerformed private void lastActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_lastActionPerformed penjualanController.lastPagingBarang(); }//GEN-LAST:event_lastActionPerformed private void nextActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nextActionPerformed penjualanController.nextPagingBarang(); }//GEN-LAST:event_nextActionPerformed private void previousActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_previousActionPerformed penjualanController.PreviousPagingBarang(); }//GEN-LAST:event_previousActionPerformed private void firstActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_firstActionPerformed penjualanController.firstPagingBarang(); }//GEN-LAST:event_firstActionPerformed private void tabelBarangMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tabelBarangMouseClicked penjualanController.tambahPenjualanSementara(); }//GEN-LAST:event_tabelBarangMouseClicked private void transaksiMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_transaksiMouseClicked penjualanController.simpanTransaksi(); }//GEN-LAST:event_transaksiMouseClicked private void valueKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_valueKeyReleased penjualanController.cariDataBarang(); }//GEN-LAST:event_valueKeyReleased // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton first; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JComboBox<String> key; private javax.swing.JLabel labelPaging; private javax.swing.JButton last; private javax.swing.JTextField namaPembeli; private javax.swing.JButton next; private javax.swing.JComboBox<String> perPage; private javax.swing.JButton previous; private javax.swing.JButton refresh; private javax.swing.JTable tabelBarang; private javax.swing.JTable tabelPenjualanSementara; private com.toedter.calendar.JDateChooser tanggalPenjualan; private javax.swing.JButton transaksi; private javax.swing.JTextField value; // End of variables declaration//GEN-END:variables /** * @return the first */ public javax.swing.JButton getFirst() { return first; } /** * @return the labelPaging */ public javax.swing.JLabel getLabelPaging() { return labelPaging; } /** * @return the last */ public javax.swing.JButton getLast() { return last; } /** * @return the namaPembeli */ public javax.swing.JTextField getNamaPembeli() { return namaPembeli; } /** * @return the next */ public javax.swing.JButton getNext() { return next; } /** * @return the perPage */ public javax.swing.JComboBox<String> getPerPage() { return perPage; } /** * @return the previous */ public javax.swing.JButton getPrevious() { return previous; } /** * @return the tabelBarang */ public javax.swing.JTable getTabelBarang() { return tabelBarang; } /** * @return the tanggalPenjualan */ public com.toedter.calendar.JDateChooser getTanggalPenjualan() { return tanggalPenjualan; } /** * @return the tabelPenjualanSementara */ public javax.swing.JTable getTabelPenjualanSementara() { return tabelPenjualanSementara; } /** * @return the value */ public javax.swing.JTextField getValue() { return value; } /** * @return the key */ public javax.swing.JComboBox<String> getKey() { return key; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.io; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.BitSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ValidCompactorWriteIdList; import org.apache.hadoop.hive.common.ValidReadTxnList; import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.AcidUtils.AcidOperationalProperties; import org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile; import org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem; import org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath; import org.apache.hadoop.hive.ql.io.orc.TestOrcRawRecordMerger; import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId; import org.junit.Test; public class TestAcidUtils { @Test public void testCreateFilename() throws Exception { Path p = new Path("/tmp"); Configuration conf = new Configuration(); AcidOutputFormat.Options options = new AcidOutputFormat.Options(conf) .setOldStyle(true).bucket(1); assertEquals("/tmp/000001_0", AcidUtils.createFilename(p, options).toString()); options.bucket(123); assertEquals("/tmp/000123_0", AcidUtils.createFilename(p, options).toString()); options.bucket(23) .minimumWriteId(100) .maximumWriteId(200) .writingBase(true) .setOldStyle(false); assertEquals("/tmp/base_0000200/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingBase(false); assertEquals("/tmp/delta_0000100_0000200_0000/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(true); assertEquals("/tmp/delete_delta_0000100_0000200_0000/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(false); options.statementId(-1); assertEquals("/tmp/delta_0000100_0000200/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(true); assertEquals("/tmp/delete_delta_0000100_0000200/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(false); options.statementId(7); assertEquals("/tmp/delta_0000100_0000200_0007/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(true); assertEquals("/tmp/delete_delta_0000100_0000200_0007/bucket_00023", AcidUtils.createFilename(p, options).toString()); } @Test public void testCreateFilenameLargeIds() throws Exception { Path p = new Path("/tmp"); Configuration conf = new Configuration(); AcidOutputFormat.Options options = new AcidOutputFormat.Options(conf) .setOldStyle(true).bucket(123456789); assertEquals("/tmp/123456789_0", AcidUtils.createFilename(p, options).toString()); options.bucket(23) .minimumWriteId(1234567880) .maximumWriteId(1234567890) .writingBase(true) .setOldStyle(false); assertEquals("/tmp/base_1234567890/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingBase(false); assertEquals("/tmp/delta_1234567880_1234567890_0000/bucket_00023", AcidUtils.createFilename(p, options).toString()); } @Test public void testParsing() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, //new MockFile("mock:/tmp/base_000123/bucket_00001", 500, new byte[0]), new MockFile("mock:/tmp/delta_000005_000006/bucket_00001", 500, new byte[0]), new MockFile("mock:/tmp/delete_delta_000005_000006/bucket_00001", 500, new byte[0])); assertEquals(123, AcidUtils.ParsedBaseLight.parseBase(new Path("/tmp/base_000123")).getWriteId()); assertEquals(0, AcidUtils.ParsedBaseLight.parseBase(new Path("/tmp/base_000123")).getVisibilityTxnId()); Path dir = new Path("mock:/tmp/"); AcidOutputFormat.Options opts = AcidUtils.parseBaseOrDeltaBucketFilename(new Path(dir, "base_567/bucket_123"), conf); assertEquals(false, opts.getOldStyle()); assertEquals(true, opts.isWritingBase()); assertEquals(567, opts.getMaximumWriteId()); assertEquals(0, opts.getMinimumWriteId()); assertEquals(123, opts.getBucketId()); opts = AcidUtils.parseBaseOrDeltaBucketFilename( new MockPath(fs, dir + "/delta_000005_000006/bucket_00001"), conf); assertEquals(false, opts.getOldStyle()); assertEquals(false, opts.isWritingBase()); assertEquals(6, opts.getMaximumWriteId()); assertEquals(5, opts.getMinimumWriteId()); assertEquals(1, opts.getBucketId()); opts = AcidUtils.parseBaseOrDeltaBucketFilename( new MockPath(fs, dir + "/delete_delta_000005_000006/bucket_00001"), conf); assertEquals(false, opts.getOldStyle()); assertEquals(false, opts.isWritingBase()); assertEquals(6, opts.getMaximumWriteId()); assertEquals(5, opts.getMinimumWriteId()); assertEquals(1, opts.getBucketId()); opts = AcidUtils.parseBaseOrDeltaBucketFilename(new Path(dir, "000123_0"), conf); assertEquals(true, opts.getOldStyle()); assertEquals(true, opts.isWritingBase()); assertEquals(123, opts.getBucketId()); assertEquals(0, opts.getMinimumWriteId()); assertEquals(0, opts.getMaximumWriteId()); // Test handling files generated by LOAD DATA command opts = AcidUtils.parseBaseOrDeltaBucketFilename( new MockPath(fs, dir + "/delta_0000002_0000002_0000/000012_0"), conf); assertEquals(true, opts.getOldStyle()); assertEquals(false, opts.isWritingBase()); assertEquals(12, opts.getBucketId()); assertEquals(2, opts.getMinimumWriteId()); assertEquals(2, opts.getMaximumWriteId()); } @Test public void testOriginal() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "1", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "2", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0]), new MockFile("mock:/tbl/part1/000002_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/random", 500, new byte[0]), new MockFile("mock:/tbl/part1/_done", 0, new byte[0]), new MockFile("mock:/tbl/part1/subdir/000000_0", 0, new byte[0])); AcidDirectory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals(null, dir.getBaseDirectory()); assertEquals(0, dir.getCurrentDirectories().size()); assertEquals(0, dir.getObsolete().size()); List<HdfsFileStatusWithId> result = dir.getOriginalFiles(); assertEquals(7, result.size()); assertEquals("mock:/tbl/part1/000000_0", result.get(0).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "1", result.get(1).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "2", result.get(2).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000001_1", result.get(3).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000002_0", result.get(4).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/random", result.get(5).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/subdir/000000_0", result.get(6).getFileStatus().getPath().toString()); } @Test public void testOriginalDeltas() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0]), new MockFile("mock:/tbl/part1/000002_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/random", 500, new byte[0]), new MockFile("mock:/tbl/part1/_done", 0, new byte[0]), new MockFile("mock:/tbl/part1/subdir/000000_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_025/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_050_100/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101/bucket_0", 0, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals(null, dir.getBaseDirectory()); List<Path> obsolete = dir.getObsolete(); assertEquals(2, obsolete.size()); assertEquals("mock:/tbl/part1/delta_025_025", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_029_029", obsolete.get(1).toString()); List<HdfsFileStatusWithId> result = dir.getOriginalFiles(); assertEquals(5, result.size()); assertEquals("mock:/tbl/part1/000000_0", result.get(0).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000001_1", result.get(1).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000002_0", result.get(2).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/random", result.get(3).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/subdir/000000_0", result.get(4).getFileStatus().getPath().toString()); List<AcidUtils.ParsedDelta> deltas = dir.getCurrentDirectories(); assertEquals(2, deltas.size()); AcidUtils.ParsedDelta delt = deltas.get(0); assertEquals("mock:/tbl/part1/delta_025_030", delt.getPath().toString()); assertEquals(25, delt.getMinWriteId()); assertEquals(30, delt.getMaxWriteId()); delt = deltas.get(1); assertEquals("mock:/tbl/part1/delta_050_100", delt.getPath().toString()); assertEquals(50, delt.getMinWriteId()); assertEquals(100, delt.getMaxWriteId()); } @Test public void testBaseDeltas() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_49/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_025/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_050_105/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_90_120/bucket_0", 0, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals("mock:/tbl/part1/base_49", dir.getBaseDirectory().toString()); List<Path> obsoletes = dir.getObsolete(); assertEquals(5, obsoletes.size()); Set<String> obsoletePathNames = new HashSet<String>(); for (Path obsolete : obsoletes) { obsoletePathNames.add(obsolete.toString()); } assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_5")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_10")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_030")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_025")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_029_029")); assertEquals(0, dir.getOriginalFiles().size()); List<AcidUtils.ParsedDelta> deltas = dir.getCurrentDirectories(); assertEquals(1, deltas.size()); AcidUtils.ParsedDelta delt = deltas.get(0); assertEquals("mock:/tbl/part1/delta_050_105", delt.getPath().toString()); assertEquals(50, delt.getMinWriteId()); assertEquals(105, delt.getMaxWriteId()); } @Test public void testRecursiveDirListingIsReusedWhenSnapshotTrue() throws IOException { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_0/_orc_acid_version", 10, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList(), null, false); assertEquals("mock:/tbl/part1/base_0", dir.getBaseDirectory().toString()); assertEquals(0, dir.getObsolete().size()); assertEquals(0, dir.getOriginalFiles().size()); assertEquals(0, dir.getCurrentDirectories().size()); assertEquals(0, fs.getNumOpenFileCalls()); } @Test public void testObsoleteOriginals() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0])); Path part = new MockPath(fs, "/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:150:" + Long.MAX_VALUE + ":"), null, false); // Obsolete list should include the two original bucket files, and the old base dir List<Path> obsoletes = dir.getObsolete(); assertEquals(3, obsoletes.size()); assertEquals("mock:/tbl/part1/base_5", obsoletes.get(0).toString()); assertEquals("mock:/tbl/part1/base_10", dir.getBaseDirectory().toString()); } @Test public void testOverlapingDelta() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString()); List<Path> obsolete = dir.getObsolete(); assertEquals(2, obsolete.size()); assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_0060_60", obsolete.get(1).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(4, delts.size()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_00061_61", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62", delts.get(2).getPath().toString()); assertEquals("mock:/tbl/part1/delta_0000063_63", delts.get(3).getPath().toString()); } /** * Hive 1.3.0 delta dir naming scheme which supports multi-statement txns * @throws Exception */ @Test public void testOverlapingDelta2() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_4/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_7/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_058_58/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString()); List<Path> obsolete = dir.getObsolete(); assertEquals(5, obsolete.size()); assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_058_58", obsolete.get(1).toString()); assertEquals("mock:/tbl/part1/delta_0060_60_1", obsolete.get(2).toString()); assertEquals("mock:/tbl/part1/delta_0060_60_4", obsolete.get(3).toString()); assertEquals("mock:/tbl/part1/delta_0060_60_7", obsolete.get(4).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(5, delts.size()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_00061_61_0", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62_0", delts.get(2).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62_3", delts.get(3).getPath().toString()); assertEquals("mock:/tbl/part1/delta_0000063_63_0", delts.get(4).getPath().toString()); } @Test public void deltasWithOpenTxnInRead() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); //hypothetically, txn 50 is open and writing write ID 4 conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[] {50}, new BitSet(), 1000, 55).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:4:4"), null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(2, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_2_5", delts.get(1).getPath().toString()); } /** * @since 1.3.0 * @throws Exception */ @Test public void deltasWithOpenTxnInRead2() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101_1/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); //hypothetically, txn 50 is open and writing write ID 4 conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[] {50}, new BitSet(), 1000, 55).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:4:4"), null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(2, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_2_5", delts.get(1).getPath().toString()); } @Test public void deltasWithOpenTxnsNotInCompact() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidCompactorWriteIdList("tbl:4:" + Long.MAX_VALUE), null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(1, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); } @Test public void deltasWithOpenTxnsNotInCompact2() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0" + AcidUtils.DELTA_SIDE_FILE_SUFFIX, 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_6_10/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidCompactorWriteIdList("tbl:3:" + Long.MAX_VALUE), null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(1, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); } @Test public void testBaseWithDeleteDeltas() throws Exception { Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_49/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_025/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_050_105/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_050_105/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_110_110/bucket_0", 0, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals("mock:/tbl/part1/base_49", dir.getBaseDirectory().toString()); List<Path> obsoletes = dir.getObsolete(); assertEquals(7, obsoletes.size()); Set<String> obsoletePathNames = new HashSet<String>(); for (Path obsolete : obsoletes) { obsoletePathNames.add(obsolete.toString()); } assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_5")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_10")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delete_delta_025_030")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_030")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_025")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delete_delta_029_029")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_029_029")); assertEquals(0, dir.getOriginalFiles().size()); List<AcidUtils.ParsedDelta> deltas = dir.getCurrentDirectories(); assertEquals(2, deltas.size()); assertEquals("mock:/tbl/part1/delete_delta_050_105", deltas.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_050_105", deltas.get(1).getPath().toString()); // The delete_delta_110_110 should not be read because it is greater than the high watermark. } @Test public void testOverlapingDeltaAndDeleteDelta() throws Exception { Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_00064_64/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString()); List<Path> obsolete = dir.getObsolete(); assertEquals(3, obsolete.size()); assertEquals("mock:/tbl/part1/delete_delta_052_55", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(1).toString()); assertEquals("mock:/tbl/part1/delta_0060_60", obsolete.get(2).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(6, delts.size()); assertEquals("mock:/tbl/part1/delete_delta_40_60", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_00061_61", delts.get(2).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62", delts.get(3).getPath().toString()); assertEquals("mock:/tbl/part1/delta_0000063_63", delts.get(4).getPath().toString()); assertEquals("mock:/tbl/part1/delete_delta_00064_64", delts.get(5).getPath().toString()); } @Test public void testMinorCompactedDeltaMakesInBetweenDelteDeltaObsolete() throws Exception { // This test checks that if we have a minor compacted delta for the txn range [40,60] // then it will make any delete delta in that range as obsolete. Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidUtils.AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_50_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false); List<Path> obsolete = dir.getObsolete(); assertEquals(1, obsolete.size()); assertEquals("mock:/tbl/part1/delete_delta_50_50", obsolete.get(0).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(1, delts.size()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString()); } @Test public void deltasAndDeleteDeltasWithOpenTxnsNotInCompact() throws Exception { // This tests checks that appropriate delta and delete_deltas are included when minor // compactions specifies a valid open txn range. Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidUtils.AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_2_2/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0" + AcidUtils.DELTA_SIDE_FILE_SUFFIX, 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_7_7/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_6_10/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidCompactorWriteIdList("tbl:4:" + Long.MAX_VALUE + ":"), null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(2, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delete_delta_2_2", delts.get(1).getPath().toString()); } @Test public void deleteDeltasWithOpenTxnInRead() throws Exception { Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidUtils.AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_3_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101_1/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); //hypothetically, txn 50 is open and writing write ID 4 conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[] {50}, new BitSet(), 1000, 55).writeToString()); AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:4:4"), null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(3, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delete_delta_2_5", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_2_5", delts.get(2).getPath().toString()); // Note that delete_delta_3_3 should not be read, when a minor compacted // [delete_]delta_2_5 is present. } @Test public void testDeleteDeltaSubdirPathGeneration() throws Exception { String deleteDeltaSubdirPath = AcidUtils.deleteDeltaSubdir(1, 10); assertEquals("delete_delta_0000001_0000010", deleteDeltaSubdirPath); deleteDeltaSubdirPath = AcidUtils.deleteDeltaSubdir(1, 10, 5); assertEquals("delete_delta_0000001_0000010_0005", deleteDeltaSubdirPath); } @Test public void testDeleteEventDeltaDirPathFilter() throws Exception { Path positivePath = new Path("delete_delta_000001_000010"); Path negativePath = new Path("delta_000001_000010"); assertEquals(true, AcidUtils.deleteEventDeltaDirFilter.accept(positivePath)); assertEquals(false, AcidUtils.deleteEventDeltaDirFilter.accept(negativePath)); } @Test public void testAcidOperationalProperties() throws Exception { AcidUtils.AcidOperationalProperties testObj = AcidUtils.AcidOperationalProperties.getDefault(); assertsForAcidOperationalProperties(testObj, "default"); testObj = AcidUtils.AcidOperationalProperties.parseInt(1); assertsForAcidOperationalProperties(testObj, "split_update"); testObj = AcidUtils.AcidOperationalProperties.parseString("default"); assertsForAcidOperationalProperties(testObj, "default"); } private void assertsForAcidOperationalProperties(AcidUtils.AcidOperationalProperties testObj, String type) throws Exception { switch(type) { case "split_update": case "default": assertEquals(true, testObj.isSplitUpdate()); assertEquals(false, testObj.isHashBasedMerge()); assertEquals(1, testObj.toInt()); assertEquals("|split_update", testObj.toString()); break; default: break; } } @Test public void testAcidOperationalPropertiesSettersAndGetters() throws Exception { AcidUtils.AcidOperationalProperties oprProps = AcidUtils.AcidOperationalProperties.getDefault(); Configuration testConf = new Configuration(); // Test setter for configuration object. AcidUtils.setAcidOperationalProperties(testConf, true, oprProps); assertEquals(1, testConf.getInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, -1)); // Test getter for configuration object. assertEquals(oprProps.toString(), AcidUtils.getAcidOperationalProperties(testConf).toString()); Map<String, String> parameters = new HashMap<String, String>(); // Test setter for map object. AcidUtils.setAcidOperationalProperties(parameters, true, oprProps); assertEquals(oprProps.toString(), parameters.get(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname)); // Test getter for map object. assertEquals(1, AcidUtils.getAcidOperationalProperties(parameters).toInt()); parameters.put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES, oprProps.toString()); // Set the appropriate key in the map and test that we are able to read it back correctly. assertEquals(1, AcidUtils.getAcidOperationalProperties(parameters).toInt()); } /** * See {@link TestOrcRawRecordMerger#testGetLogicalLength()} */ @Test public void testGetLogicalLength() throws Exception { } }
package org.docksidestage.sqlite.dbflute.readonly.allcommon; import java.util.Collections; import java.util.Map; import java.util.HashMap; import java.lang.reflect.Method; import org.dbflute.Entity; import org.dbflute.dbmeta.DBMeta; import org.dbflute.dbmeta.DBMetaProvider; import org.dbflute.exception.DBMetaNotFoundException; import org.dbflute.helper.StringKeyMap; import org.dbflute.util.DfAssertUtil; /** * The handler of the instance of DB meta. * @author DBFlute(AutoGenerator) */ public class RoyDBMetaInstanceHandler implements DBMetaProvider { // =================================================================================== // Resource Map // ============ /** The map of DB meta instance by key 'table DB-name'. (NotNull, LazyLoaded) */ protected static final Map<String, DBMeta> _tableDbNameInstanceMap = newHashMap(); /** The map of DB meta instance by key 'entity type'. (NotNull, LazyLoaded) */ protected static final Map<Class<?>, DBMeta> _entityTypeInstanceMap = newHashMap(); /** The map of table DB name and DB meta class name. (NotNull) */ protected static final Map<String, String> _tableDbNameClassNameMap; static { final Map<String, String> tmpMap = newHashMap(); tmpMap.put("MEMBER", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberDbm"); tmpMap.put("MEMBER_ADDRESS", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberAddressDbm"); tmpMap.put("MEMBER_LOGIN", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberLoginDbm"); tmpMap.put("MEMBER_SECURITY", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberSecurityDbm"); tmpMap.put("MEMBER_SERVICE", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberServiceDbm"); tmpMap.put("MEMBER_STATUS", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberStatusDbm"); tmpMap.put("MEMBER_WITHDRAWAL", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyMemberWithdrawalDbm"); tmpMap.put("PRODUCT", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyProductDbm"); tmpMap.put("PRODUCT_CATEGORY", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyProductCategoryDbm"); tmpMap.put("PRODUCT_STATUS", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyProductStatusDbm"); tmpMap.put("PURCHASE", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyPurchaseDbm"); tmpMap.put("PURCHASE_PAYMENT", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyPurchasePaymentDbm"); tmpMap.put("REGION", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyRegionDbm"); tmpMap.put("SERVICE_RANK", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyServiceRankDbm"); tmpMap.put("SUMMARY_PRODUCT", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoySummaryProductDbm"); tmpMap.put("VENDOR_CHECK", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyVendorCheckDbm"); tmpMap.put("WITHDRAWAL_REASON", "org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.RoyWithdrawalReasonDbm"); _tableDbNameClassNameMap = Collections.unmodifiableMap(tmpMap); } /** The flexible map of table DB name for conversion in finding process. (NotNull) */ protected static final Map<String, String> _tableDbNameFlexibleMap = StringKeyMap.createAsFlexible(); static { for (String tableDbName : _tableDbNameClassNameMap.keySet()) { _tableDbNameFlexibleMap.put(tableDbName, tableDbName); } } /** * Get the unmodifiable map of DB meta. map:{tableDbName = DBMeta} * @return The unmodifiable map that contains all instances of DB meta. (NotNull, NotEmpty) */ public static Map<String, DBMeta> getUnmodifiableDBMetaMap() { initializeDBMetaMap(); synchronized (_tableDbNameInstanceMap) { return Collections.unmodifiableMap(_tableDbNameInstanceMap); } } /** * Initialize the map of DB meta. */ protected static void initializeDBMetaMap() { if (isInitialized()) { return; } synchronized (_tableDbNameInstanceMap) { for (String tableDbName : _tableDbNameClassNameMap.keySet()) { findDBMeta(tableDbName); // initialize } if (!isInitialized()) { String msg = "Failed to initialize tableDbNameInstanceMap: " + _tableDbNameInstanceMap; throw new IllegalStateException(msg); } } } protected static boolean isInitialized() { return _tableDbNameInstanceMap.size() == _tableDbNameClassNameMap.size(); } // =================================================================================== // Provider Singleton // ================== protected static final DBMetaProvider _provider = new RoyDBMetaInstanceHandler(); public static DBMetaProvider getProvider() { return _provider; } public DBMeta provideDBMeta(String tableFlexibleName) { return byTableFlexibleName(tableFlexibleName); } public DBMeta provideDBMeta(Class<?> entityType) { return byEntityType(entityType); } public DBMeta provideDBMetaChecked(String tableFlexibleName) { return findDBMeta(tableFlexibleName); } public DBMeta provideDBMetaChecked(Class<?> entityType) { return findDBMeta(entityType); } // =================================================================================== // Find DBMeta // =========== /** * Find DB meta by table flexible name. (accept quoted name and schema prefix) * @param tableFlexibleName The flexible name of table. (NotNull) * @return The instance of DB meta. (NotNull) * @throws org.dbflute.exception.DBMetaNotFoundException When the DB meta is not found. */ public static DBMeta findDBMeta(String tableFlexibleName) { DBMeta dbmeta = byTableFlexibleName(tableFlexibleName); if (dbmeta == null) { String msg = "The DB meta was not found by the table flexible name: key=" + tableFlexibleName; throw new DBMetaNotFoundException(msg); } return dbmeta; } /** * Find DB meta by entity type. * @param entityType The entity type of table, which should implement the {@link Entity} interface. (NotNull) * @return The instance of DB meta. (NotNull) * @throws org.dbflute.exception.DBMetaNotFoundException When the DB meta is not found. */ public static DBMeta findDBMeta(Class<?> entityType) { DBMeta dbmeta = byEntityType(entityType); if (dbmeta == null) { String msg = "The DB meta was not found by the entity type: key=" + entityType; throw new DBMetaNotFoundException(msg); } return dbmeta; } // =================================================================================== // by Table Name // ============= /** * @param tableFlexibleName The flexible name of table. (NotNull) * @return The instance of DB meta. (NullAllowed: If the DB meta is not found, it returns null) */ protected static DBMeta byTableFlexibleName(String tableFlexibleName) { assertStringNotNullAndNotTrimmedEmpty("tableFlexibleName", tableFlexibleName); String tableDbName = _tableDbNameFlexibleMap.get(tableFlexibleName); if (tableDbName == null) { tableDbName = retryByNormalizedName(tableFlexibleName); } return tableDbName != null ? byTableDbName(tableDbName) : null; } protected static String retryByNormalizedName(String tableFlexibleName) { String tableDbName = null; String pureName = normalizeTableFlexibleName(tableFlexibleName); String schema = extractSchemaIfExists(tableFlexibleName); if (schema != null) { // first, find by qualified name tableDbName = _tableDbNameFlexibleMap.get(schema + "." + pureName); } if (tableDbName == null) { // next, find by pure name tableDbName = _tableDbNameFlexibleMap.get(pureName); } return tableDbName; } protected static String normalizeTableFlexibleName(String tableFlexibleName) { return removeQuoteIfExists(removeSchemaIfExists(tableFlexibleName)); } protected static String removeQuoteIfExists(String name) { if (name.startsWith("\"") && name.endsWith("\"")) { return strip(name); } else if (name.startsWith("[") && name.endsWith("]")) { return strip(name); } return name; } protected static String removeSchemaIfExists(String name) { int dotLastIndex = name.lastIndexOf("."); return dotLastIndex >= 0 ? name.substring(dotLastIndex + ".".length()) : name; } protected static String extractSchemaIfExists(String name) { int dotLastIndex = name.lastIndexOf("."); return dotLastIndex >= 0 ? name.substring(0, dotLastIndex) : null; } protected static String strip(String name) { return name.substring(1, name.length() - 1); } /** * @param tableDbName The DB name of table. (NotNull) * @return The instance of DB meta. (NullAllowed: If the DB meta is not found, it returns null) */ protected static DBMeta byTableDbName(String tableDbName) { assertStringNotNullAndNotTrimmedEmpty("tableDbName", tableDbName); return getCachedDBMeta(tableDbName); } // =================================================================================== // by Entity Type // ============== /** * @param entityType The entity type of table, which should implement the entity interface. (NotNull) * @return The instance of DB meta. (NullAllowed: If the DB meta is not found, it returns null) */ protected static DBMeta byEntityType(Class<?> entityType) { assertObjectNotNull("entityType", entityType); return getCachedDBMeta(entityType); } // =================================================================================== // Cached DBMeta // ============= protected static DBMeta getCachedDBMeta(String tableDbName) { // lazy-load (thank you koyak!) DBMeta dbmeta = _tableDbNameInstanceMap.get(tableDbName); if (dbmeta != null) { return dbmeta; } synchronized (_tableDbNameInstanceMap) { dbmeta = _tableDbNameInstanceMap.get(tableDbName); if (dbmeta != null) { // an other thread might have initialized // or reading might failed by same-time writing return dbmeta; } String dbmetaName = _tableDbNameClassNameMap.get(tableDbName); if (dbmetaName == null) { return null; } _tableDbNameInstanceMap.put(tableDbName, toDBMetaInstance(dbmetaName)); return _tableDbNameInstanceMap.get(tableDbName); } } protected static DBMeta toDBMetaInstance(String dbmetaName) { try { Class<?> dbmetaType = Class.forName(dbmetaName); Method method = dbmetaType.getMethod("getInstance", (Class[])null); Object result = method.invoke(null, (Object[])null); return (DBMeta)result; } catch (Exception e) { String msg = "Failed to get the instance: " + dbmetaName; throw new IllegalStateException(msg, e); } } protected static DBMeta getCachedDBMeta(Class<?> entityType) { // lazy-load same as by-name DBMeta dbmeta = _entityTypeInstanceMap.get(entityType); if (dbmeta != null) { return dbmeta; } synchronized (_entityTypeInstanceMap) { dbmeta = _entityTypeInstanceMap.get(entityType); if (dbmeta != null) { // an other thread might have initialized // or reading might failed by same-time writing return dbmeta; } if (Entity.class.isAssignableFrom(entityType)) { // required Entity entity = newEntity(entityType); dbmeta = getCachedDBMeta(entity.asTableDbName()); } if (dbmeta == null) { return null; } _entityTypeInstanceMap.put(entityType, dbmeta); return _entityTypeInstanceMap.get(entityType); } } protected static Entity newEntity(Class<?> entityType) { try { return (Entity)entityType.getDeclaredConstructor().newInstance(); } catch (Exception e) { String msg = "Failed to new the instance: " + entityType; throw new IllegalStateException(msg, e); } } // =================================================================================== // General Helper // ============== protected static <KEY, VALUE> HashMap<KEY, VALUE> newHashMap() { return new HashMap<KEY, VALUE>(); } // ----------------------------------------------------- // Assert Object // ------------- protected static void assertObjectNotNull(String variableName, Object value) { DfAssertUtil.assertObjectNotNull(variableName, value); } // ----------------------------------------------------- // Assert String // ------------- protected static void assertStringNotNullAndNotTrimmedEmpty(String variableName, String value) { DfAssertUtil.assertStringNotNullAndNotTrimmedEmpty(variableName, value); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * SpotInstanceRequestIdSetType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * SpotInstanceRequestIdSetType bean class */ public class SpotInstanceRequestIdSetType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = SpotInstanceRequestIdSetType Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for Item * This was an Array! */ protected com.amazon.ec2.SpotInstanceRequestIdSetItemType[] localItem ; /* This tracker boolean wil be used to detect whether the user called the set method * for this attribute. It will be used to determine whether to include this field * in the serialized XML */ protected boolean localItemTracker = false ; /** * Auto generated getter method * @return com.amazon.ec2.SpotInstanceRequestIdSetItemType[] */ public com.amazon.ec2.SpotInstanceRequestIdSetItemType[] getItem(){ return localItem; } /** * validate the array for Item */ protected void validateItem(com.amazon.ec2.SpotInstanceRequestIdSetItemType[] param){ } /** * Auto generated setter method * @param param Item */ public void setItem(com.amazon.ec2.SpotInstanceRequestIdSetItemType[] param){ validateItem(param); if (param != null){ //update the setting tracker localItemTracker = true; } else { localItemTracker = false; } this.localItem=param; } /** * Auto generated add method for the array for convenience * @param param com.amazon.ec2.SpotInstanceRequestIdSetItemType */ public void addItem(com.amazon.ec2.SpotInstanceRequestIdSetItemType param){ if (localItem == null){ localItem = new com.amazon.ec2.SpotInstanceRequestIdSetItemType[]{}; } //update the setting tracker localItemTracker = true; java.util.List list = org.apache.axis2.databinding.utils.ConverterUtil.toList(localItem); list.add(param); this.localItem = (com.amazon.ec2.SpotInstanceRequestIdSetItemType[])list.toArray( new com.amazon.ec2.SpotInstanceRequestIdSetItemType[list.size()]); } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { SpotInstanceRequestIdSetType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":SpotInstanceRequestIdSetType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "SpotInstanceRequestIdSetType", xmlWriter); } } if (localItemTracker){ if (localItem!=null){ for (int i = 0;i < localItem.length;i++){ if (localItem[i] != null){ localItem[i].serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","item"), factory,xmlWriter); } else { // we don't have to do any thing since minOccures is zero } } } else { throw new org.apache.axis2.databinding.ADBException("item cannot be null!!"); } } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); if (localItemTracker){ if (localItem!=null) { for (int i = 0;i < localItem.length;i++){ if (localItem[i] != null){ elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "item")); elementList.add(localItem[i]); } else { // nothing to do } } } else { throw new org.apache.axis2.databinding.ADBException("item cannot be null!!"); } } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static SpotInstanceRequestIdSetType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ SpotInstanceRequestIdSetType object = new SpotInstanceRequestIdSetType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"SpotInstanceRequestIdSetType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (SpotInstanceRequestIdSetType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); java.util.ArrayList list1 = new java.util.ArrayList(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","item").equals(reader.getName())){ // Process the array and step past its final element's end. list1.add(com.amazon.ec2.SpotInstanceRequestIdSetItemType.Factory.parse(reader)); //loop until we find a start element that is not part of this array boolean loopDone1 = false; while(!loopDone1){ // We should be at the end element, but make sure while (!reader.isEndElement()) reader.next(); // Step out of this element reader.next(); // Step to next element event. while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isEndElement()){ //two continuous end elements means we are exiting the xml structure loopDone1 = true; } else { if (new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","item").equals(reader.getName())){ list1.add(com.amazon.ec2.SpotInstanceRequestIdSetItemType.Factory.parse(reader)); }else{ loopDone1 = true; } } } // call the converter utility to convert and set the array object.setItem((com.amazon.ec2.SpotInstanceRequestIdSetItemType[]) org.apache.axis2.databinding.utils.ConverterUtil.convertToArray( com.amazon.ec2.SpotInstanceRequestIdSetItemType.class, list1)); } // End of if for expected property start element else { } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/** * Copyright 2003-2007 Jive Software. * * All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.workgroup.util; import java.util.*; /** * Utility methods frequently used by data classes and design-time classes. */ public final class ModelUtil { private ModelUtil() { // Prevents instantiation. } /** * This is a utility method that compares two objects when one or both of the objects might be * <CODE>null</CODE> The result of this method is determined as follows: * <OL> * <LI>If <CODE>o1</CODE> and <CODE>o2</CODE> are the same object according to the * <CODE>==</CODE> operator, return <CODE>true</CODE>. * <LI>Otherwise, if either <CODE>o1</CODE> or <CODE>o2</CODE> is <CODE>null</CODE>, return * <CODE>false</CODE>. * <LI>Otherwise, return <CODE>o1.equals(o2)</CODE>. * </OL> * <p/> * This method produces the exact logically inverted result as the * {@link #areDifferent(Object, Object)} method. * <P> * <p/> * For array types, one of the <CODE>equals</CODE> methods in {@link java.util.Arrays} should be * used instead of this method. Note that arrays with more than one dimension will require some * custom code in order to implement <CODE>equals</CODE> properly. */ public static final boolean areEqual(Object o1, Object o2) { if (o1 == o2) { return true; } else if (o1 == null || o2 == null) { return false; } else { return o1.equals(o2); } } /** * This is a utility method that compares two Booleans when one or both of the objects might be * <CODE>null</CODE> The result of this method is determined as follows: * <OL> * <LI>If <CODE>b1</CODE> and <CODE>b2</CODE> are both TRUE or neither <CODE>b1</CODE> nor * <CODE>b2</CODE> is TRUE, return <CODE>true</CODE>. * <LI>Otherwise, return <CODE>false</CODE>. * </OL> * <p/> */ public static final boolean areBooleansEqual(Boolean b1, Boolean b2) { // !jwetherb treat NULL the same as Boolean.FALSE return (b1 == Boolean.TRUE && b2 == Boolean.TRUE) || (b1 != Boolean.TRUE && b2 != Boolean.TRUE); } /** * This is a utility method that compares two objects when one or both of the objects might be * <CODE>null</CODE>. The result returned by this method is determined as follows: * <OL> * <LI>If <CODE>o1</CODE> and <CODE>o2</CODE> are the same object according to the * <CODE>==</CODE> operator, return <CODE>false</CODE>. * <LI>Otherwise, if either <CODE>o1</CODE> or <CODE>o2</CODE> is <CODE>null</CODE>, return * <CODE>true</CODE>. * <LI>Otherwise, return <CODE>!o1.equals(o2)</CODE>. * </OL> * <p/> * This method produces the exact logically inverted result as the * {@link #areEqual(Object, Object)} method. * <P> * <p/> * For array types, one of the <CODE>equals</CODE> methods in {@link java.util.Arrays} should be * used instead of this method. Note that arrays with more than one dimension will require some * custom code in order to implement <CODE>equals</CODE> properly. */ public static final boolean areDifferent(Object o1, Object o2) { return !areEqual(o1, o2); } /** * This is a utility method that compares two Booleans when one or both of the objects might be * <CODE>null</CODE> The result of this method is determined as follows: * <OL> * <LI>If <CODE>b1</CODE> and <CODE>b2</CODE> are both TRUE or neither <CODE>b1</CODE> nor * <CODE>b2</CODE> is TRUE, return <CODE>false</CODE>. * <LI>Otherwise, return <CODE>true</CODE>. * </OL> * <p/> * This method produces the exact logically inverted result as the * {@link #areBooleansEqual(Boolean, Boolean)} method. * <P> */ public static final boolean areBooleansDifferent(Boolean b1, Boolean b2) { return !areBooleansEqual(b1, b2); } /** * Returns <CODE>true</CODE> if the specified array is not null and contains a non-null element. * Returns <CODE>false</CODE> if the array is null or if all the array elements are null. */ public static final boolean hasNonNullElement(Object[] array) { if (array != null) { final int n = array.length; for (int i = 0; i < n; i++) { if (array[i] != null) { return true; } } } return false; } /** * Returns a single string that is the concatenation of all the strings in the specified string * array. A single space is put between each string array element. Null array elements are * skipped. If the array itself is null, the empty string is returned. This method is guaranteed * to return a non-null value, if no expections are thrown. */ public static final String concat(String[] strs) { return concat(strs, " "); // NOTRANS } /** * Returns a single string that is the concatenation of all the strings in the specified string * array. The strings are separated by the specified delimiter. Null array elements are skipped. * If the array itself is null, the empty string is returned. This method is guaranteed to * return a non-null value, if no expections are thrown. */ public static final String concat(String[] strs, String delim) { if (strs != null) { final StringBuilder buf = new StringBuilder(); final int n = strs.length; for (int i = 0; i < n; i++) { final String str = strs[i]; if (str != null) { buf.append(str).append(delim); } } final int length = buf.length(); if (length > 0) { // Trim trailing space. buf.setLength(length - 1); } return buf.toString(); } else { return ""; // NOTRANS } } /** * Returns <CODE>true</CODE> if the specified {@link String} is not <CODE>null</CODE> and has a * length greater than zero. This is a very frequently occurring check. */ public static final boolean hasLength(String s) { return (s != null && s.length() > 0); } /** * Returns <CODE>null</CODE> if the specified string is empty or <CODE>null</CODE>. Otherwise * the string itself is returned. */ public static final String nullifyIfEmpty(String s) { return ModelUtil.hasLength(s) ? s : null; } /** * Returns <CODE>null</CODE> if the specified object is null or if its <CODE>toString()</CODE> * representation is empty. Otherwise, the <CODE>toString()</CODE> representation of the object * itself is returned. */ public static final String nullifyingToString(Object o) { return o != null ? nullifyIfEmpty(o.toString()) : null; } /** * Determines if a string has been changed. * @param oldString is the initial value of the String * @param newString is the new value of the String * @return true If both oldString and newString are null or if they are both not null and equal * to each other. Otherwise returns false. */ public static boolean hasStringChanged(String oldString, String newString) { if (oldString == null && newString == null) { return false; } else if ((oldString == null && newString != null) || (oldString != null && newString == null)) { return true; } else { return !oldString.equals(newString); } } public static String getTimeFromLong(long diff) { final String HOURS = "h"; final String MINUTES = "min"; final String SECONDS = "sec"; final long MS_IN_A_DAY = 1000 * 60 * 60 * 24; final long MS_IN_AN_HOUR = 1000 * 60 * 60; final long MS_IN_A_MINUTE = 1000 * 60; final long MS_IN_A_SECOND = 1000; long numDays = diff / MS_IN_A_DAY; diff = diff % MS_IN_A_DAY; long numHours = diff / MS_IN_AN_HOUR; diff = diff % MS_IN_AN_HOUR; long numMinutes = diff / MS_IN_A_MINUTE; diff = diff % MS_IN_A_MINUTE; long numSeconds = diff / MS_IN_A_SECOND; diff = diff % MS_IN_A_SECOND; long numMilliseconds = diff; StringBuilder buf = new StringBuilder(); if (numHours > 0) { buf.append(numHours + " " + HOURS + ", "); } if (numMinutes > 0) { buf.append(numMinutes + " " + MINUTES + ", "); } buf.append(numSeconds + " " + SECONDS); String result = buf.toString(); return result; } /** * Build a List of all elements in an Iterator. */ public static List iteratorAsList(Iterator i) { ArrayList list = new ArrayList(10); while (i.hasNext()) { list.add(i.next()); } return list; } /** * Creates an Iterator that is the reverse of a ListIterator. */ public static Iterator reverseListIterator(ListIterator i) { return new ReverseListIterator(i); } } /** * An Iterator that is the reverse of a ListIterator. */ class ReverseListIterator implements Iterator { private ListIterator _i; ReverseListIterator(ListIterator i) { _i = i; while (_i.hasNext()) _i.next(); } @Override public boolean hasNext() { return _i.hasPrevious(); } @Override public Object next() { return _i.previous(); } @Override public void remove() { _i.remove(); } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.service.pagination; import com.flowpowered.math.GenericMath; import com.google.common.annotations.VisibleForTesting; import net.minecraft.util.text.ITextComponent; import net.minecraft.util.text.TextComponentString; import net.minecraft.util.text.TextComponentTranslation; import ninja.leaping.configurate.ConfigurationNode; import ninja.leaping.configurate.commented.CommentedConfigurationNode; import ninja.leaping.configurate.hocon.HoconConfigurationLoader; import ninja.leaping.configurate.loader.ConfigurationLoader; import ninja.leaping.configurate.loader.HeaderMode; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.channel.MessageReceiver; import org.spongepowered.common.interfaces.text.IMixinTextComponent; import org.spongepowered.common.text.SpongeTexts; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.PrimitiveIterator; /** * Pagination calculator for players. Handles calculation of text widths, * centering text, adding padding, adding spacing, and more. */ class PaginationCalculator { private static final String NON_UNICODE_CHARS; private static final int[] NON_UNICODE_CHAR_WIDTHS; private static final byte[] UNICODE_CHAR_WIDTHS; private static final int LINE_WIDTH = 320; private final int linesPerPage; /** * Constructs a new pagination calculator. * * @param linesPerPage The amount of lines per page there should be */ PaginationCalculator(int linesPerPage) { this.linesPerPage = linesPerPage; } static { ConfigurationLoader<CommentedConfigurationNode> loader = HoconConfigurationLoader.builder() .setURL(PaginationCalculator.class.getResource("font-sizes.json")) .setHeaderMode(HeaderMode.NONE) .build(); try { ConfigurationNode node = loader.load(); NON_UNICODE_CHARS = node.getNode("non-unicode").getString(); List<? extends ConfigurationNode> charWidths = node.getNode("char-widths").getChildrenList(); int[] nonUnicodeCharWidths = new int[charWidths.size()]; for (int i = 0; i < nonUnicodeCharWidths.length; ++i) { nonUnicodeCharWidths[i] = charWidths.get(i).getInt(); } NON_UNICODE_CHAR_WIDTHS = nonUnicodeCharWidths; List<? extends ConfigurationNode> glyphWidths = node.getNode("glyph-widths").getChildrenList(); byte[] unicodeCharWidths = new byte[glyphWidths.size()]; for (int i = 0; i < unicodeCharWidths.length; ++i) { unicodeCharWidths[i] = (byte) glyphWidths.get(i).getInt(); } UNICODE_CHAR_WIDTHS = unicodeCharWidths; } catch (IOException e) { throw new ExceptionInInitializerError(e); } } int getLinesPerPage(MessageReceiver source) { return this.linesPerPage; } /** * Gets the number of lines the specified text flows into. * * @param text The text to calculate the number of lines for * @return The number of lines that this text flows into */ int getLines(Text text) { //TODO: this needs fixing as well. return (int) Math.ceil((double) this.getWidth(text) / LINE_WIDTH); } /** * Gets the width of a character with the specified code * point, accounting for if its text is bold our not. * * @param codePoint The code point of the character * @param isBold Whether or not the character is bold or not * @return The width of the character at the code point */ @VisibleForTesting int getWidth(int codePoint, boolean isBold) { int nonUnicodeIdx = NON_UNICODE_CHARS.indexOf(codePoint); int width; if (codePoint == 32) { width = 4; } else if (codePoint > 0 && nonUnicodeIdx != -1) { width = NON_UNICODE_CHAR_WIDTHS[nonUnicodeIdx]; } else if (UNICODE_CHAR_WIDTHS[codePoint] != 0) { //from 1.9 & 255 to avoid strange signed int math ruining things. //https://bugs.mojang.com/browse/MC-7181 final int temp = UNICODE_CHAR_WIDTHS[codePoint] & 255; // Split into high and low nibbles. //bit digits //87654321 >>> 4 = 00008765 int startColumn = temp >>> 4; //87654321 & 00001111 = 00004321 int endColumn = temp & 15; width = (endColumn + 1) - startColumn; //Why does this scaling happen? //I believe it makes unicode fonts skinnier to better match the character widths of the default Minecraft // font however there is a int math vs float math bug in the Minecraft FontRenderer. //The float math is adjusted for rendering, they attempt to do the same thing for calculating string widths //using integer math, this has potential rounding errors, but we should copy it and use ints as well. width = (width / 2) + 1; } else { width = 0; } //if bolded width gets 1 added. if(isBold && width > 0) { width = width + 1; } return width; } /** * Calculates the width of a given text as the number of character * pixels/columns the line takes up. * * @param text The text to get the width of * @return The amount of character pixels/columns the text takes up */ @VisibleForTesting int getWidth(Text text) { ITextComponent component = SpongeTexts.toComponent(text); Iterable<ITextComponent> children = ((IMixinTextComponent) component).withChildren(); int total = 0; for(ITextComponent child : children) { PrimitiveIterator.OfInt i_it; if(child instanceof TextComponentString || child instanceof TextComponentTranslation) { i_it = child.getUnformattedComponentText().codePoints().iterator(); } else { continue; } boolean bold = child.getStyle().getBold(); Integer cp; boolean newLine = false; while(i_it.hasNext()){ cp = i_it.next(); if (cp == '\n') { // if the previous character is a '\n' if (newLine) { total += LINE_WIDTH; } else { total = ((int) Math.ceil((double) total / LINE_WIDTH)) * LINE_WIDTH; newLine = true; } } else { int width = getWidth(cp, bold); total += width; newLine = false; } } } return total; } /** * Centers a text within the middle of the chat box. * * <p>Generally used for titles and footers.</p> * * <p>To use no heading, just pass in a 0 width text for * the first argument.</p> * * @param text The text to center * @param padding A padding character with a width >1 * @return The centered text, or if too big, the original text */ //TODO: Probably should completely rewrite this to not compute padding, but loop until the padding is done, unless //we can get accurate computation of padding ahead of time. Text center(Text text, Text padding) { int inputLength = getWidth(text); //Minecraft breaks lines when the next character would be > then LINE_WIDTH, this seems most graceful way to fail if (inputLength >= LINE_WIDTH) { return text; } final Text textWithSpaces = addSpaces(Text.of(" "), text); //Minecraft breaks lines when the next character would be > then LINE_WIDTH boolean addSpaces = getWidth(textWithSpaces) <= LINE_WIDTH; //TODO: suspect, why are we changing the style of the padding, they may want different styles on the padding. Text styledPadding = withStyle(padding, text); int paddingLength = getWidth(styledPadding); final Text.Builder output = Text.builder(); //Using 0 width unicode symbols as padding throws us into an unending loop, replace them with the default padding if(paddingLength < 1) { padding = Text.of("="); styledPadding = withColor(withStyle(padding, text), text); paddingLength = getWidth(styledPadding); } //if we only need padding if (inputLength == 0) { addPadding(padding, output, GenericMath.floor((double) LINE_WIDTH / paddingLength)); } else { if(addSpaces) { text = textWithSpaces; inputLength = getWidth(textWithSpaces); } int paddingNecessary = LINE_WIDTH - inputLength; int paddingCount = GenericMath.floor(paddingNecessary / paddingLength); //pick a halfway point int beforePadding = GenericMath.floor(paddingCount / 2.0); //Do not use ceil, this prevents floating point errors. int afterPadding = paddingCount - beforePadding; addPadding(styledPadding, output, beforePadding); output.append(text); addPadding(styledPadding, output, afterPadding); } return this.finalizeBuilder(text, output); } /** * Gives the first text argument the style of the second. * * @param text The text to stylize * @param styled The styled text * @return The original text now stylized */ private Text withStyle(Text text, Text styled) { return text.toBuilder() .style(styled.getStyle()) .build(); } /** * Gives the first text argument the color of the second. * * @param text The text to color * @param colored The colored text * @return The original text now colored */ private Text withColor(Text text, Text colored) { return text.toBuilder() .color(colored.getColor()) .build(); } /** * Finalizes the builder used in centering text. * * @param text The text to get the style from * @param build The work in progress text builder * @return The finalized, properly styled text. */ private Text finalizeBuilder(Text text, Text.Builder build) { return build.style(text.getStyle()).build(); } /** * Adds spaces to both sides of the specified text. * * <p>Overrides all color and style with the * text's color and style.</p> * * @param spaces The spaces to use * @param text The text to add to * @return The text with the added spaces */ private Text addSpaces(Text spaces, Text text) { return Text.builder() .append(spaces) .append(text) .append(spaces) .color(text.getColor()) .style(text.getStyle()) .build(); } /** * Adds the specified padding text to a piece of text being built * up to a certain amount specified by a count. * * @param padding The padding text to use * @param build The work in progress text to add to * @param count The amount of padding to add */ private void addPadding(Text padding, Text.Builder build, int count) { if (count > 0) { build.append(Collections.nCopies(count, padding)); } } }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.github.os72.protobuf261; import protobuf_unittest.UnittestProto.BoolMessage; import protobuf_unittest.UnittestProto.Int32Message; import protobuf_unittest.UnittestProto.Int64Message; import protobuf_unittest.UnittestProto.TestAllTypes; import protobuf_unittest.UnittestProto.TestRecursiveMessage; import junit.framework.TestCase; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; /** * Unit test for {@link CodedInputStream}. * * @author kenton@google.com Kenton Varda */ public class CodedInputStreamTest extends TestCase { /** * Helper to construct a byte array from a bunch of bytes. The inputs are * actually ints so that I can use hex notation and not get stupid errors * about precision. */ private byte[] bytes(int... bytesAsInts) { byte[] bytes = new byte[bytesAsInts.length]; for (int i = 0; i < bytesAsInts.length; i++) { bytes[i] = (byte) bytesAsInts[i]; } return bytes; } /** * An InputStream which limits the number of bytes it reads at a time. * We use this to make sure that CodedInputStream doesn't screw up when * reading in small blocks. */ private static final class SmallBlockInputStream extends FilterInputStream { private final int blockSize; public SmallBlockInputStream(byte[] data, int blockSize) { this(new ByteArrayInputStream(data), blockSize); } public SmallBlockInputStream(InputStream in, int blockSize) { super(in); this.blockSize = blockSize; } public int read(byte[] b) throws IOException { return super.read(b, 0, Math.min(b.length, blockSize)); } public int read(byte[] b, int off, int len) throws IOException { return super.read(b, off, Math.min(len, blockSize)); } } private void assertDataConsumed(byte[] data, CodedInputStream input) throws IOException { assertEquals(data.length, input.getTotalBytesRead()); assertTrue(input.isAtEnd()); } /** * Parses the given bytes using readRawVarint32() and readRawVarint64() and * checks that the result matches the given value. */ private void assertReadVarint(byte[] data, long value) throws Exception { CodedInputStream input = CodedInputStream.newInstance(data); assertEquals((int) value, input.readRawVarint32()); assertDataConsumed(data, input); input = CodedInputStream.newInstance(data); assertEquals(value, input.readRawVarint64()); assertDataConsumed(data, input); input = CodedInputStream.newInstance(data); assertEquals(value, input.readRawVarint64SlowPath()); assertDataConsumed(data, input); input = CodedInputStream.newInstance(data); assertTrue(input.skipField(WireFormat.WIRETYPE_VARINT)); assertDataConsumed(data, input); // Try different block sizes. for (int blockSize = 1; blockSize <= 16; blockSize *= 2) { input = CodedInputStream.newInstance( new SmallBlockInputStream(data, blockSize)); assertEquals((int) value, input.readRawVarint32()); assertDataConsumed(data, input); input = CodedInputStream.newInstance( new SmallBlockInputStream(data, blockSize)); assertEquals(value, input.readRawVarint64()); assertDataConsumed(data, input); input = CodedInputStream.newInstance( new SmallBlockInputStream(data, blockSize)); assertEquals(value, input.readRawVarint64SlowPath()); assertDataConsumed(data, input); input = CodedInputStream.newInstance( new SmallBlockInputStream(data, blockSize)); assertTrue(input.skipField(WireFormat.WIRETYPE_VARINT)); assertDataConsumed(data, input); } // Try reading direct from an InputStream. We want to verify that it // doesn't read past the end of the input, so we copy to a new, bigger // array first. byte[] longerData = new byte[data.length + 1]; System.arraycopy(data, 0, longerData, 0, data.length); InputStream rawInput = new ByteArrayInputStream(longerData); assertEquals((int) value, CodedInputStream.readRawVarint32(rawInput)); assertEquals(1, rawInput.available()); } /** * Parses the given bytes using readRawVarint32() and readRawVarint64() and * expects them to fail with an InvalidProtocolBufferException whose * description matches the given one. */ private void assertReadVarintFailure( InvalidProtocolBufferException expected, byte[] data) throws Exception { CodedInputStream input = CodedInputStream.newInstance(data); try { input.readRawVarint32(); fail("Should have thrown an exception."); } catch (InvalidProtocolBufferException e) { assertEquals(expected.getMessage(), e.getMessage()); } input = CodedInputStream.newInstance(data); try { input.readRawVarint64(); fail("Should have thrown an exception."); } catch (InvalidProtocolBufferException e) { assertEquals(expected.getMessage(), e.getMessage()); } input = CodedInputStream.newInstance(data); try { input.readRawVarint64SlowPath(); fail("Should have thrown an exception."); } catch (InvalidProtocolBufferException e) { assertEquals(expected.getMessage(), e.getMessage()); } // Make sure we get the same error when reading direct from an InputStream. try { CodedInputStream.readRawVarint32(new ByteArrayInputStream(data)); fail("Should have thrown an exception."); } catch (InvalidProtocolBufferException e) { assertEquals(expected.getMessage(), e.getMessage()); } } /** Tests readRawVarint32() and readRawVarint64(). */ public void testReadVarint() throws Exception { assertReadVarint(bytes(0x00), 0); assertReadVarint(bytes(0x01), 1); assertReadVarint(bytes(0x7f), 127); // 14882 assertReadVarint(bytes(0xa2, 0x74), (0x22 << 0) | (0x74 << 7)); // 2961488830 assertReadVarint(bytes(0xbe, 0xf7, 0x92, 0x84, 0x0b), (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | (0x0bL << 28)); // 64-bit // 7256456126 assertReadVarint(bytes(0xbe, 0xf7, 0x92, 0x84, 0x1b), (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | (0x1bL << 28)); // 41256202580718336 assertReadVarint( bytes(0x80, 0xe6, 0xeb, 0x9c, 0xc3, 0xc9, 0xa4, 0x49), (0x00 << 0) | (0x66 << 7) | (0x6b << 14) | (0x1c << 21) | (0x43L << 28) | (0x49L << 35) | (0x24L << 42) | (0x49L << 49)); // 11964378330978735131 assertReadVarint( bytes(0x9b, 0xa8, 0xf9, 0xc2, 0xbb, 0xd6, 0x80, 0x85, 0xa6, 0x01), (0x1b << 0) | (0x28 << 7) | (0x79 << 14) | (0x42 << 21) | (0x3bL << 28) | (0x56L << 35) | (0x00L << 42) | (0x05L << 49) | (0x26L << 56) | (0x01L << 63)); // Failures assertReadVarintFailure( InvalidProtocolBufferException.malformedVarint(), bytes(0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x00)); assertReadVarintFailure( InvalidProtocolBufferException.truncatedMessage(), bytes(0x80)); } /** * Parses the given bytes using readRawLittleEndian32() and checks * that the result matches the given value. */ private void assertReadLittleEndian32(byte[] data, int value) throws Exception { CodedInputStream input = CodedInputStream.newInstance(data); assertEquals(value, input.readRawLittleEndian32()); assertTrue(input.isAtEnd()); // Try different block sizes. for (int blockSize = 1; blockSize <= 16; blockSize *= 2) { input = CodedInputStream.newInstance( new SmallBlockInputStream(data, blockSize)); assertEquals(value, input.readRawLittleEndian32()); assertTrue(input.isAtEnd()); } } /** * Parses the given bytes using readRawLittleEndian64() and checks * that the result matches the given value. */ private void assertReadLittleEndian64(byte[] data, long value) throws Exception { CodedInputStream input = CodedInputStream.newInstance(data); assertEquals(value, input.readRawLittleEndian64()); assertTrue(input.isAtEnd()); // Try different block sizes. for (int blockSize = 1; blockSize <= 16; blockSize *= 2) { input = CodedInputStream.newInstance( new SmallBlockInputStream(data, blockSize)); assertEquals(value, input.readRawLittleEndian64()); assertTrue(input.isAtEnd()); } } /** Tests readRawLittleEndian32() and readRawLittleEndian64(). */ public void testReadLittleEndian() throws Exception { assertReadLittleEndian32(bytes(0x78, 0x56, 0x34, 0x12), 0x12345678); assertReadLittleEndian32(bytes(0xf0, 0xde, 0xbc, 0x9a), 0x9abcdef0); assertReadLittleEndian64( bytes(0xf0, 0xde, 0xbc, 0x9a, 0x78, 0x56, 0x34, 0x12), 0x123456789abcdef0L); assertReadLittleEndian64( bytes(0x78, 0x56, 0x34, 0x12, 0xf0, 0xde, 0xbc, 0x9a), 0x9abcdef012345678L); } /** Test decodeZigZag32() and decodeZigZag64(). */ public void testDecodeZigZag() throws Exception { assertEquals( 0, CodedInputStream.decodeZigZag32(0)); assertEquals(-1, CodedInputStream.decodeZigZag32(1)); assertEquals( 1, CodedInputStream.decodeZigZag32(2)); assertEquals(-2, CodedInputStream.decodeZigZag32(3)); assertEquals(0x3FFFFFFF, CodedInputStream.decodeZigZag32(0x7FFFFFFE)); assertEquals(0xC0000000, CodedInputStream.decodeZigZag32(0x7FFFFFFF)); assertEquals(0x7FFFFFFF, CodedInputStream.decodeZigZag32(0xFFFFFFFE)); assertEquals(0x80000000, CodedInputStream.decodeZigZag32(0xFFFFFFFF)); assertEquals( 0, CodedInputStream.decodeZigZag64(0)); assertEquals(-1, CodedInputStream.decodeZigZag64(1)); assertEquals( 1, CodedInputStream.decodeZigZag64(2)); assertEquals(-2, CodedInputStream.decodeZigZag64(3)); assertEquals(0x000000003FFFFFFFL, CodedInputStream.decodeZigZag64(0x000000007FFFFFFEL)); assertEquals(0xFFFFFFFFC0000000L, CodedInputStream.decodeZigZag64(0x000000007FFFFFFFL)); assertEquals(0x000000007FFFFFFFL, CodedInputStream.decodeZigZag64(0x00000000FFFFFFFEL)); assertEquals(0xFFFFFFFF80000000L, CodedInputStream.decodeZigZag64(0x00000000FFFFFFFFL)); assertEquals(0x7FFFFFFFFFFFFFFFL, CodedInputStream.decodeZigZag64(0xFFFFFFFFFFFFFFFEL)); assertEquals(0x8000000000000000L, CodedInputStream.decodeZigZag64(0xFFFFFFFFFFFFFFFFL)); } /** Tests reading and parsing a whole message with every field type. */ public void testReadWholeMessage() throws Exception { TestAllTypes message = TestUtil.getAllSet(); byte[] rawBytes = message.toByteArray(); assertEquals(rawBytes.length, message.getSerializedSize()); TestAllTypes message2 = TestAllTypes.parseFrom(rawBytes); TestUtil.assertAllFieldsSet(message2); // Try different block sizes. for (int blockSize = 1; blockSize < 256; blockSize *= 2) { message2 = TestAllTypes.parseFrom( new SmallBlockInputStream(rawBytes, blockSize)); TestUtil.assertAllFieldsSet(message2); } } /** Tests skipField(). */ public void testSkipWholeMessage() throws Exception { TestAllTypes message = TestUtil.getAllSet(); byte[] rawBytes = message.toByteArray(); // Create two parallel inputs. Parse one as unknown fields while using // skipField() to skip each field on the other. Expect the same tags. CodedInputStream input1 = CodedInputStream.newInstance(rawBytes); CodedInputStream input2 = CodedInputStream.newInstance(rawBytes); UnknownFieldSet.Builder unknownFields = UnknownFieldSet.newBuilder(); while (true) { int tag = input1.readTag(); assertEquals(tag, input2.readTag()); if (tag == 0) { break; } unknownFields.mergeFieldFrom(tag, input1); input2.skipField(tag); } } /** * Test that a bug in skipRawBytes() has been fixed: if the skip skips * exactly up to a limit, this should not break things. */ public void testSkipRawBytesBug() throws Exception { byte[] rawBytes = new byte[] { 1, 2 }; CodedInputStream input = CodedInputStream.newInstance(rawBytes); int limit = input.pushLimit(1); input.skipRawBytes(1); input.popLimit(limit); assertEquals(2, input.readRawByte()); } /** * Test that a bug in skipRawBytes() has been fixed: if the skip skips * past the end of a buffer with a limit that has been set past the end of * that buffer, this should not break things. */ public void testSkipRawBytesPastEndOfBufferWithLimit() throws Exception { byte[] rawBytes = new byte[] { 1, 2, 3, 4, 5 }; CodedInputStream input = CodedInputStream.newInstance( new SmallBlockInputStream(rawBytes, 3)); int limit = input.pushLimit(4); // In order to expose the bug we need to read at least one byte to prime the // buffer inside the CodedInputStream. assertEquals(1, input.readRawByte()); // Skip to the end of the limit. input.skipRawBytes(3); assertTrue(input.isAtEnd()); input.popLimit(limit); assertEquals(5, input.readRawByte()); } public void testReadHugeBlob() throws Exception { // Allocate and initialize a 1MB blob. byte[] blob = new byte[1 << 20]; for (int i = 0; i < blob.length; i++) { blob[i] = (byte) i; } // Make a message containing it. TestAllTypes.Builder builder = TestAllTypes.newBuilder(); TestUtil.setAllFields(builder); builder.setOptionalBytes(ByteString.copyFrom(blob)); TestAllTypes message = builder.build(); // Serialize and parse it. Make sure to parse from an InputStream, not // directly from a ByteString, so that CodedInputStream uses buffered // reading. TestAllTypes message2 = TestAllTypes.parseFrom(message.toByteString().newInput()); assertEquals(message.getOptionalBytes(), message2.getOptionalBytes()); // Make sure all the other fields were parsed correctly. TestAllTypes message3 = TestAllTypes.newBuilder(message2) .setOptionalBytes(TestUtil.getAllSet().getOptionalBytes()) .build(); TestUtil.assertAllFieldsSet(message3); } public void testReadMaliciouslyLargeBlob() throws Exception { ByteString.Output rawOutput = ByteString.newOutput(); CodedOutputStream output = CodedOutputStream.newInstance(rawOutput); int tag = WireFormat.makeTag(1, WireFormat.WIRETYPE_LENGTH_DELIMITED); output.writeRawVarint32(tag); output.writeRawVarint32(0x7FFFFFFF); output.writeRawBytes(new byte[32]); // Pad with a few random bytes. output.flush(); CodedInputStream input = rawOutput.toByteString().newCodedInput(); assertEquals(tag, input.readTag()); try { input.readBytes(); fail("Should have thrown an exception!"); } catch (InvalidProtocolBufferException e) { // success. } } private TestRecursiveMessage makeRecursiveMessage(int depth) { if (depth == 0) { return TestRecursiveMessage.newBuilder().setI(5).build(); } else { return TestRecursiveMessage.newBuilder() .setA(makeRecursiveMessage(depth - 1)).build(); } } private void assertMessageDepth(TestRecursiveMessage message, int depth) { if (depth == 0) { assertFalse(message.hasA()); assertEquals(5, message.getI()); } else { assertTrue(message.hasA()); assertMessageDepth(message.getA(), depth - 1); } } public void testMaliciousRecursion() throws Exception { ByteString data64 = makeRecursiveMessage(64).toByteString(); ByteString data65 = makeRecursiveMessage(65).toByteString(); assertMessageDepth(TestRecursiveMessage.parseFrom(data64), 64); try { TestRecursiveMessage.parseFrom(data65); fail("Should have thrown an exception!"); } catch (InvalidProtocolBufferException e) { // success. } CodedInputStream input = data64.newCodedInput(); input.setRecursionLimit(8); try { TestRecursiveMessage.parseFrom(input); fail("Should have thrown an exception!"); } catch (InvalidProtocolBufferException e) { // success. } } private void checkSizeLimitExceeded(InvalidProtocolBufferException e) { assertEquals( InvalidProtocolBufferException.sizeLimitExceeded().getMessage(), e.getMessage()); } public void testSizeLimit() throws Exception { CodedInputStream input = CodedInputStream.newInstance( new SmallBlockInputStream( TestUtil.getAllSet().toByteString().newInput(), 16)); input.setSizeLimit(16); try { TestAllTypes.parseFrom(input); fail("Should have thrown an exception!"); } catch (InvalidProtocolBufferException expected) { checkSizeLimitExceeded(expected); } } public void testResetSizeCounter() throws Exception { CodedInputStream input = CodedInputStream.newInstance( new SmallBlockInputStream(new byte[256], 8)); input.setSizeLimit(16); input.readRawBytes(16); assertEquals(16, input.getTotalBytesRead()); try { input.readRawByte(); fail("Should have thrown an exception!"); } catch (InvalidProtocolBufferException expected) { checkSizeLimitExceeded(expected); } input.resetSizeCounter(); assertEquals(0, input.getTotalBytesRead()); input.readRawByte(); // No exception thrown. input.resetSizeCounter(); assertEquals(0, input.getTotalBytesRead()); input.readRawBytes(16); assertEquals(16, input.getTotalBytesRead()); input.resetSizeCounter(); try { input.readRawBytes(17); // Hits limit again. fail("Should have thrown an exception!"); } catch (InvalidProtocolBufferException expected) { checkSizeLimitExceeded(expected); } } public void testSizeLimitMultipleMessages() throws Exception { byte[] bytes = new byte[256]; for (int i = 0; i < bytes.length; i++) { bytes[i] = (byte) i; } CodedInputStream input = CodedInputStream.newInstance( new SmallBlockInputStream(bytes, 7)); input.setSizeLimit(16); for (int i = 0; i < 256 / 16; i++) { byte[] message = input.readRawBytes(16); for (int j = 0; j < message.length; j++) { assertEquals(i * 16 + j, message[j] & 0xff); } assertEquals(16, input.getTotalBytesRead()); input.resetSizeCounter(); assertEquals(0, input.getTotalBytesRead()); } } /** * Tests that if we readString invalid UTF-8 bytes, no exception * is thrown. Instead, the invalid bytes are replaced with the Unicode * "replacement character" U+FFFD. */ public void testReadStringInvalidUtf8() throws Exception { ByteString.Output rawOutput = ByteString.newOutput(); CodedOutputStream output = CodedOutputStream.newInstance(rawOutput); int tag = WireFormat.makeTag(1, WireFormat.WIRETYPE_LENGTH_DELIMITED); output.writeRawVarint32(tag); output.writeRawVarint32(1); output.writeRawBytes(new byte[] { (byte) 0x80 }); output.flush(); CodedInputStream input = rawOutput.toByteString().newCodedInput(); assertEquals(tag, input.readTag()); String text = input.readString(); assertEquals(0xfffd, text.charAt(0)); } /** * Tests that if we readStringRequireUtf8 invalid UTF-8 bytes, an * InvalidProtocolBufferException is thrown. */ public void testReadStringRequireUtf8InvalidUtf8() throws Exception { ByteString.Output rawOutput = ByteString.newOutput(); CodedOutputStream output = CodedOutputStream.newInstance(rawOutput); int tag = WireFormat.makeTag(1, WireFormat.WIRETYPE_LENGTH_DELIMITED); output.writeRawVarint32(tag); output.writeRawVarint32(1); output.writeRawBytes(new byte[] { (byte) 0x80 }); output.flush(); CodedInputStream input = rawOutput.toByteString().newCodedInput(); assertEquals(tag, input.readTag()); try { input.readStringRequireUtf8(); fail("Expected invalid UTF-8 exception."); } catch (InvalidProtocolBufferException exception) { assertEquals("Protocol message had invalid UTF-8.", exception.getMessage()); } } public void testReadFromSlice() throws Exception { byte[] bytes = bytes(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); CodedInputStream in = CodedInputStream.newInstance(bytes, 3, 5); assertEquals(0, in.getTotalBytesRead()); for (int i = 3; i < 8; i++) { assertEquals(i, in.readRawByte()); assertEquals(i - 2, in.getTotalBytesRead()); } // eof assertEquals(0, in.readTag()); assertEquals(5, in.getTotalBytesRead()); } public void testInvalidTag() throws Exception { // Any tag number which corresponds to field number zero is invalid and // should throw InvalidProtocolBufferException. for (int i = 0; i < 8; i++) { try { CodedInputStream.newInstance(bytes(i)).readTag(); fail("Should have thrown an exception."); } catch (InvalidProtocolBufferException e) { assertEquals(InvalidProtocolBufferException.invalidTag().getMessage(), e.getMessage()); } } } public void testReadByteArray() throws Exception { ByteString.Output rawOutput = ByteString.newOutput(); CodedOutputStream output = CodedOutputStream.newInstance(rawOutput); // Zero-sized bytes field. output.writeRawVarint32(0); // One one-byte bytes field output.writeRawVarint32(1); output.writeRawBytes(new byte[] { (byte) 23 }); // Another one-byte bytes field output.writeRawVarint32(1); output.writeRawBytes(new byte[] { (byte) 45 }); // A bytes field large enough that won't fit into the 4K buffer. final int bytesLength = 16 * 1024; byte[] bytes = new byte[bytesLength]; bytes[0] = (byte) 67; bytes[bytesLength - 1] = (byte) 89; output.writeRawVarint32(bytesLength); output.writeRawBytes(bytes); output.flush(); CodedInputStream inputStream = rawOutput.toByteString().newCodedInput(); byte[] result = inputStream.readByteArray(); assertEquals(0, result.length); result = inputStream.readByteArray(); assertEquals(1, result.length); assertEquals((byte) 23, result[0]); result = inputStream.readByteArray(); assertEquals(1, result.length); assertEquals((byte) 45, result[0]); result = inputStream.readByteArray(); assertEquals(bytesLength, result.length); assertEquals((byte) 67, result[0]); assertEquals((byte) 89, result[bytesLength - 1]); } public void testReadByteBuffer() throws Exception { ByteString.Output rawOutput = ByteString.newOutput(); CodedOutputStream output = CodedOutputStream.newInstance(rawOutput); // Zero-sized bytes field. output.writeRawVarint32(0); // One one-byte bytes field output.writeRawVarint32(1); output.writeRawBytes(new byte[]{(byte) 23}); // Another one-byte bytes field output.writeRawVarint32(1); output.writeRawBytes(new byte[]{(byte) 45}); // A bytes field large enough that won't fit into the 4K buffer. final int bytesLength = 16 * 1024; byte[] bytes = new byte[bytesLength]; bytes[0] = (byte) 67; bytes[bytesLength - 1] = (byte) 89; output.writeRawVarint32(bytesLength); output.writeRawBytes(bytes); output.flush(); CodedInputStream inputStream = rawOutput.toByteString().newCodedInput(); ByteBuffer result = inputStream.readByteBuffer(); assertEquals(0, result.capacity()); result = inputStream.readByteBuffer(); assertEquals(1, result.capacity()); assertEquals((byte) 23, result.get()); result = inputStream.readByteBuffer(); assertEquals(1, result.capacity()); assertEquals((byte) 45, result.get()); result = inputStream.readByteBuffer(); assertEquals(bytesLength, result.capacity()); assertEquals((byte) 67, result.get()); result.position(bytesLength - 1); assertEquals((byte) 89, result.get()); } public void testReadByteBufferAliasing() throws Exception { ByteArrayOutputStream byteArrayStream = new ByteArrayOutputStream(); CodedOutputStream output = CodedOutputStream.newInstance(byteArrayStream); // Zero-sized bytes field. output.writeRawVarint32(0); // One one-byte bytes field output.writeRawVarint32(1); output.writeRawBytes(new byte[]{(byte) 23}); // Another one-byte bytes field output.writeRawVarint32(1); output.writeRawBytes(new byte[]{(byte) 45}); // A bytes field large enough that won't fit into the 4K buffer. final int bytesLength = 16 * 1024; byte[] bytes = new byte[bytesLength]; bytes[0] = (byte) 67; bytes[bytesLength - 1] = (byte) 89; output.writeRawVarint32(bytesLength); output.writeRawBytes(bytes); output.flush(); byte[] data = byteArrayStream.toByteArray(); // Without aliasing CodedInputStream inputStream = CodedInputStream.newInstance(data); ByteBuffer result = inputStream.readByteBuffer(); assertEquals(0, result.capacity()); result = inputStream.readByteBuffer(); assertTrue(result.array() != data); assertEquals(1, result.capacity()); assertEquals((byte) 23, result.get()); result = inputStream.readByteBuffer(); assertTrue(result.array() != data); assertEquals(1, result.capacity()); assertEquals((byte) 45, result.get()); result = inputStream.readByteBuffer(); assertTrue(result.array() != data); assertEquals(bytesLength, result.capacity()); assertEquals((byte) 67, result.get()); result.position(bytesLength - 1); assertEquals((byte) 89, result.get()); // Enable aliasing inputStream = CodedInputStream.newInstance(data); inputStream.enableAliasing(true); result = inputStream.readByteBuffer(); assertEquals(0, result.capacity()); result = inputStream.readByteBuffer(); assertTrue(result.array() == data); assertEquals(1, result.capacity()); assertEquals((byte) 23, result.get()); result = inputStream.readByteBuffer(); assertTrue(result.array() == data); assertEquals(1, result.capacity()); assertEquals((byte) 45, result.get()); result = inputStream.readByteBuffer(); assertTrue(result.array() == data); assertEquals(bytesLength, result.capacity()); assertEquals((byte) 67, result.get()); result.position(bytesLength - 1); assertEquals((byte) 89, result.get()); } public void testCompatibleTypes() throws Exception { long data = 0x100000000L; Int64Message message = Int64Message.newBuilder().setData(data).build(); ByteString serialized = message.toByteString(); // Test int64(long) is compatible with bool(boolean) BoolMessage msg2 = BoolMessage.parseFrom(serialized); assertTrue(msg2.getData()); // Test int64(long) is compatible with int32(int) Int32Message msg3 = Int32Message.parseFrom(serialized); assertEquals((int) data, msg3.getData()); } }
/* This file is part of the db4o object database http://www.db4o.com Copyright (C) 2004 - 2010 Versant Corporation http://www.versant.com db4o is free software; you can redistribute it and/or modify it under the terms of version 3 of the GNU General Public License as published by the Free Software Foundation. db4o is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/. */ package com.db4o.internal; import java.io.*; import com.db4o.*; import com.db4o.defragment.*; import com.db4o.foundation.*; import com.db4o.internal.encoding.*; import com.db4o.internal.mapping.*; import com.db4o.internal.marshall.*; import com.db4o.internal.slots.*; import com.db4o.marshall.*; import com.db4o.typehandlers.*; /** * @exclude */ public final class DefragmentContextImpl implements ReadWriteBuffer, DefragmentContext { private ByteArrayBuffer _source; private ByteArrayBuffer _target; private DefragmentServices _services; private final ObjectHeader _objectHeader; private int _declaredAspectCount; private int _currentParentSourceID; public DefragmentContextImpl(ByteArrayBuffer source, DefragmentContextImpl context) { this(source, context._services, context._objectHeader); } public DefragmentContextImpl(ByteArrayBuffer source,DefragmentServices services) { this(source, services, null); } public DefragmentContextImpl(ByteArrayBuffer source, DefragmentServices services, ObjectHeader header){ _source = source; _services=services; _target = new ByteArrayBuffer(length()); _source.copyTo(_target, 0, 0, length()); _objectHeader = header; } public DefragmentContextImpl(DefragmentContextImpl parentContext, ObjectHeader header){ _source = parentContext._source; _target = parentContext._target; _services = parentContext._services; _objectHeader = header; } public int offset() { return _source.offset(); } public void seek(int offset) { _source.seek(offset); _target.seek(offset); } public void incrementOffset(int numBytes) { _source.incrementOffset(numBytes); _target.incrementOffset(numBytes); } public void incrementIntSize() { incrementOffset(Const4.INT_LENGTH); } public int copySlotlessID() { return copyUnindexedId(false); } public int copyUnindexedID() { return copyUnindexedId(true); } private int copyUnindexedId(boolean doRegister){ int orig=_source.readInt(); // TODO: There is no test case for the zero case if(orig == 0){ _target.writeInt(0); return 0; } int mapped=-1; try { mapped=_services.strictMappedID(orig); } catch (MappingNotFoundException exc) { mapped=_services.targetNewId(); _services.mapIDs(orig,mapped, false); if(doRegister){ _services.registerUnindexed(orig); } } _target.writeInt(mapped); return mapped; } public int copyID() { // This code is slightly redundant. // The profiler shows it's a hotspot. // The following would be non-redudant. // return copy(false, false); int id = _source.readInt(); return writeMappedID(id); } public int copyID(boolean flipNegative) { int id=_source.readInt(); return internalCopyID(flipNegative, id); } public int copyIDReturnOriginalID() { return copyIDReturnOriginalID(false); } public int copyIDReturnOriginalID(boolean flipNegative) { int id=_source.readInt(); internalCopyID(flipNegative, id); boolean flipped = flipNegative && (id < 0); if(flipped) { return -id; } return id; } private int internalCopyID(boolean flipNegative, int id) { boolean flipped = flipNegative && (id < 0); if(flipped) { id=-id; } int mapped=_services.mappedID(id); if(flipped) { mapped=-mapped; } _target.writeInt(mapped); return mapped; } public void readBegin(byte identifier) { _source.readBegin(identifier); _target.readBegin(identifier); } public byte readByte() { byte value=_source.readByte(); _target.incrementOffset(1); return value; } public void readBytes(byte[] bytes) { _source.readBytes(bytes); _target.incrementOffset(bytes.length); } public int readInt() { int value=_source.readInt(); _target.incrementOffset(Const4.INT_LENGTH); return value; } public void writeInt(int value) { _source.incrementOffset(Const4.INT_LENGTH); _target.writeInt(value); } public void write(LocalObjectContainer file,int address) { file.writeBytes(_target,address,0); } public void incrementStringOffset(LatinStringIO sio) { incrementStringOffset(sio, _source); incrementStringOffset(sio, _target); } private void incrementStringOffset(LatinStringIO sio, ByteArrayBuffer buffer) { sio.readLengthAndString(buffer); } public ByteArrayBuffer sourceBuffer() { return _source; } public ByteArrayBuffer targetBuffer() { return _target; } public IDMapping mapping() { return _services; } public Transaction systemTrans() { return transaction(); } public DefragmentServices services() { return _services; } public static void processCopy(DefragmentServices context, int sourceID,SlotCopyHandler command) { ByteArrayBuffer sourceReader = context.sourceBufferByID(sourceID); processCopy(context, sourceID, command, sourceReader); } public static void processCopy(DefragmentServices services, int sourceID,SlotCopyHandler command, ByteArrayBuffer sourceReader) { int targetID=services.strictMappedID(sourceID); Slot targetSlot = services.allocateTargetSlot(sourceReader.length()); services.mapping().mapId(targetID, targetSlot); DefragmentContextImpl context=new DefragmentContextImpl(sourceReader,services); command.processCopy(context); services.targetWriteBytes(context,targetSlot.address()); } public void writeByte(byte value) { _source.incrementOffset(1); _target.writeByte(value); } public long readLong() { long value=_source.readLong(); _target.incrementOffset(Const4.LONG_LENGTH); return value; } public void writeLong(long value) { _source.incrementOffset(Const4.LONG_LENGTH); _target.writeLong(value); } public BitMap4 readBitMap(int bitCount) { BitMap4 value=_source.readBitMap(bitCount); _target.incrementOffset(value.marshalledLength()); return value; } public void readEnd() { _source.readEnd(); _target.readEnd(); } public int writeMappedID(int originalID) { int mapped=_services.mappedID(originalID); _target.writeInt(mapped); return mapped; } public int length() { return _source.length(); } public Transaction transaction() { return services().systemTrans(); } public ObjectContainerBase container() { return transaction().container(); } public TypeHandler4 typeHandlerForId(int id) { return container().typeHandlerForClassMetadataID(id); } public int handlerVersion(){ return _objectHeader.handlerVersion(); } public boolean isLegacyHandlerVersion() { return handlerVersion() == 0; } public int mappedID(int origID) { return mapping().strictMappedID(origID); } public ObjectContainer objectContainer() { return container(); } /** * only used by old handlers: OpenTypeHandler0, StringHandler0, ArrayHandler0. * Doesn't need to work with modern IdSystems. */ public Slot allocateTargetSlot(int length) { return _services.allocateTargetSlot(length); } /** * only used by old handlers: OpenTypeHandler0, StringHandler0, ArrayHandler0. * Doesn't need to work with modern IdSystems. */ public Slot allocateMappedTargetSlot(int sourceAddress, int length) { Slot slot = allocateTargetSlot(length); _services.mapIDs(sourceAddress, slot.address(), false); return slot; } public int copySlotToNewMapped(int sourceAddress, int length) throws IOException { Slot slot = allocateMappedTargetSlot(sourceAddress, length); ByteArrayBuffer sourceBuffer = sourceBufferByAddress(sourceAddress, length); targetWriteBytes(slot.address(), sourceBuffer); return slot.address(); } public void targetWriteBytes(int address, ByteArrayBuffer buffer) { _services.targetWriteBytes(buffer, address); } public ByteArrayBuffer sourceBufferByAddress(int sourceAddress, int length) throws IOException { ByteArrayBuffer sourceBuffer = _services.sourceBufferByAddress(sourceAddress, length); return sourceBuffer; } public ByteArrayBuffer sourceBufferById(int sourceId) throws IOException { ByteArrayBuffer sourceBuffer = _services.sourceBufferByID(sourceId); return sourceBuffer; } public void writeToTarget(int address) { _services.targetWriteBytes(this, address); } public void writeBytes(byte[] bytes) { _target.writeBytes(bytes); _source.incrementOffset(bytes.length); } public ReadBuffer buffer() { return _source; } public void defragment(TypeHandler4 handler) { final TypeHandler4 typeHandler = HandlerRegistry.correctHandlerVersion(this, handler); if(Handlers4.useDedicatedSlot(this, typeHandler)){ if(Handlers4.hasClassIndex(typeHandler)){ copyID(); } else { copyUnindexedID(); } return; } typeHandler.defragment(DefragmentContextImpl.this); } public void beginSlot() { // do nothing } public ClassMetadata classMetadata() { return _objectHeader.classMetadata(); } public boolean isNull(int fieldIndex) { return _objectHeader._headerAttributes.isNull(fieldIndex); } public int declaredAspectCount() { return _declaredAspectCount; } public void declaredAspectCount(int count) { _declaredAspectCount = count; } public SlotFormat slotFormat() { return SlotFormat.forHandlerVersion(handlerVersion()); } public void currentParentSourceID(int id) { _currentParentSourceID = id; } public int consumeCurrentParentSourceID() { int id = _currentParentSourceID; _currentParentSourceID = 0; return id; } public void copyAddress() { int sourceEntryAddress = _source.readInt(); int sourceId = consumeCurrentParentSourceID(); int sourceObjectAddress = _services.sourceAddressByID(sourceId); int entryOffset = sourceEntryAddress - sourceObjectAddress; int targetObjectAddress = _services.targetAddressByID(_services.strictMappedID(sourceId)); _target.writeInt(targetObjectAddress + entryOffset); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.nodetype; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newTreeMap; import static java.util.Collections.emptyList; import static org.apache.jackrabbit.JcrConstants.JCR_CHILDNODEDEFINITION; import static org.apache.jackrabbit.JcrConstants.JCR_HASORDERABLECHILDNODES; import static org.apache.jackrabbit.JcrConstants.JCR_ISMIXIN; import static org.apache.jackrabbit.JcrConstants.JCR_MIXINTYPES; import static org.apache.jackrabbit.JcrConstants.JCR_NODETYPENAME; import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYITEMNAME; import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; import static org.apache.jackrabbit.JcrConstants.JCR_PROPERTYDEFINITION; import static org.apache.jackrabbit.JcrConstants.JCR_SUPERTYPES; import static org.apache.jackrabbit.JcrConstants.JCR_UUID; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_ABSTRACT; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_QUERYABLE; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_DECLARING_NODE_TYPE; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_MIXIN_TYPES; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_CHILD_NODE_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_PROPERTY_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_PRIMARY_TYPE; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_CHILD_NODE_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_PROPERTY_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_UUID; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.RESIDUAL_NAME; import java.io.IOException; import java.io.StringWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.nodetype.ItemDefinition; import javax.jcr.nodetype.NoSuchNodeTypeException; import javax.jcr.nodetype.NodeDefinition; import javax.jcr.nodetype.NodeType; import javax.jcr.nodetype.NodeTypeDefinition; import javax.jcr.nodetype.NodeTypeIterator; import javax.jcr.nodetype.PropertyDefinition; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.jackrabbit.commons.cnd.CompactNodeTypeDefWriter; import org.apache.jackrabbit.commons.iterator.NodeTypeIteratorAdapter; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.namepath.JcrNameParser; import org.apache.jackrabbit.oak.namepath.JcrPathParser; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager; import org.apache.jackrabbit.oak.plugins.nodetype.constraint.Constraints; import org.apache.jackrabbit.oak.util.TreeUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <pre> * [nt:nodeType] * - jcr:nodeTypeName (NAME) protected mandatory * - jcr:supertypes (NAME) protected multiple * - jcr:isAbstract (BOOLEAN) protected mandatory * - jcr:isQueryable (BOOLEAN) protected mandatory * - jcr:isMixin (BOOLEAN) protected mandatory * - jcr:hasOrderableChildNodes (BOOLEAN) protected mandatory * - jcr:primaryItemName (NAME) protected * + jcr:propertyDefinition (nt:propertyDefinition) = nt:propertyDefinition protected sns * + jcr:childNodeDefinition (nt:childNodeDefinition) = nt:childNodeDefinition protected sns * </pre> */ class NodeTypeImpl extends AbstractTypeDefinition implements NodeType { private static final Logger log = LoggerFactory.getLogger(NodeTypeImpl.class); /** * Name pattern for the property and child node definition nodes. * Used to pick out the SNS indices from the names so the definitions * can be sorted in the same order they were created. This in turn * makes accessing node type information more deterministic. */ private static final Pattern DEFINITION_PATTERN = Pattern.compile( "(" + JCR_PROPERTYDEFINITION + "|" + JCR_CHILDNODEDEFINITION + ")\\[([1-9][0-9]*)\\]"); private static final PropertyDefinition[] NO_PROPERTY_DEFINITIONS = new PropertyDefinition[0]; private static final NodeDefinition[] NO_NODE_DEFINITIONS = new NodeDefinition[0]; private static final NodeType[] NO_NODE_TYPES = new NodeType[0]; private static final String[] NO_NAMES = new String[0]; NodeTypeImpl(Tree type, NamePathMapper mapper) { super(type, mapper); } private String getOakName() { return getOakName(definition); } private String getOakName(Tree tree) { PropertyState property = tree.getProperty(JCR_NODETYPENAME); if (property != null) { return property.getValue(Type.NAME); } else { return tree.getName(); } } //-----------------------------------------------------------< NodeType >--- @Override public String getName() { return mapper.getJcrName(getOakName()); } @Override public String[] getDeclaredSupertypeNames() { String[] names = getNames(JCR_SUPERTYPES); if (names != null) { for (int i = 0; i < names.length; i++) { names[i] = mapper.getJcrName(names[i]); } } else { names = NO_NAMES; } return names; } @Override public boolean isAbstract() { return getBoolean(JCR_IS_ABSTRACT); } @Override public boolean isMixin() { return getBoolean(JCR_ISMIXIN); } @Override public boolean hasOrderableChildNodes() { return getBoolean(JCR_HASORDERABLECHILDNODES); } @Override public boolean isQueryable() { return getBoolean(JCR_IS_QUERYABLE); } @Override public String getPrimaryItemName() { String oakName = getName(JCR_PRIMARYITEMNAME); if (oakName != null) { return mapper.getJcrName(oakName); } else { return null; } } /** * Returns the declared property definitions in their original order. * * @return declared property definitions */ @Override @Nonnull public PropertyDefinition[] getDeclaredPropertyDefinitions() { Map<Integer, PropertyDefinition> definitions = newTreeMap(); for (Tree child : definition.getChildren()) { Matcher matcher = DEFINITION_PATTERN.matcher(child.getName()); if (matcher.matches() && JCR_PROPERTYDEFINITION.equals(matcher.group(1))) { definitions.put( Integer.valueOf(matcher.group(2)), new PropertyDefinitionImpl(child, this, mapper)); } } return definitions.values().toArray(NO_PROPERTY_DEFINITIONS); } /** * Returns the declared child node definitions in their original order. * * @return declared child node definitions */ @Override @Nonnull public NodeDefinition[] getDeclaredChildNodeDefinitions() { Map<Integer, NodeDefinition> definitions = newTreeMap(); for (Tree child : definition.getChildren()) { Matcher matcher = DEFINITION_PATTERN.matcher(child.getName()); if (matcher.matches() && JCR_CHILDNODEDEFINITION.equals(matcher.group(1))) { definitions.put( Integer.valueOf(matcher.group(2)), new NodeDefinitionImpl(child, this, mapper)); } } return definitions.values().toArray(NO_NODE_DEFINITIONS); } @Override public NodeType[] getSupertypes() { Map<String, NodeType> supertypes = Maps.newLinkedHashMap(); addSupertypes(definition, supertypes); return supertypes.values().toArray(NO_NODE_TYPES); } private void addSupertypes(Tree type, Map<String, NodeType> supertypes) { PropertyState property = type.getProperty(JCR_SUPERTYPES); if (property != null) { Tree root = definition.getParent(); for (String oakName : property.getValue(Type.NAMES)) { if (!supertypes.containsKey(oakName)) { Tree supertype = root.getChild(oakName); checkState(supertype.exists()); supertypes.put( oakName, new NodeTypeImpl(supertype, mapper)); addSupertypes(supertype, supertypes); } } } } @Override public NodeType[] getDeclaredSupertypes() { NodeType[] supertypes = NO_NODE_TYPES; String[] oakNames = getNames(JCR_SUPERTYPES); if (oakNames != null && oakNames.length > 0) { supertypes = new NodeType[oakNames.length]; Tree root = definition.getParent(); for (int i = 0; i < oakNames.length; i++) { Tree type = root.getChild(oakNames[i]); checkState(type.exists()); supertypes[i] = new NodeTypeImpl(type, mapper); } } return supertypes; } @Override public NodeTypeIterator getSubtypes() { Map<String, Set<String>> inheritance = Maps.newHashMap(); Tree root = definition.getParent(); for (Tree child : root.getChildren()) { String oakName = getOakName(child); PropertyState supertypes = child.getProperty(JCR_SUPERTYPES); if (supertypes != null) { for (String supername : supertypes.getValue(Type.NAMES)) { Set<String> subtypes = inheritance.get(supername); if (subtypes == null) { subtypes = Sets.newHashSet(); inheritance.put(supername, subtypes); } subtypes.add(oakName); } } } Map<String, NodeType> subtypes = Maps.newHashMap(); addSubtypes(getOakName(), subtypes, root, inheritance); return new NodeTypeIteratorAdapter(subtypes.values()); } private void addSubtypes( String typeName, Map<String, NodeType> subtypes, Tree root, Map<String, Set<String>> inheritance) { Set<String> subnames = inheritance.get(typeName); if (subnames != null) { for (String subname : subnames) { if (!subtypes.containsKey(subname)) { Tree tree = root.getChild(subname); subtypes.put(subname, new NodeTypeImpl(tree, mapper)); } } } } @Override public NodeTypeIterator getDeclaredSubtypes() { List<NodeType> subtypes = Lists.newArrayList(); String oakName = getOakName(); Tree root = definition.getParent(); for (Tree child : root.getChildren()) { PropertyState supertypes = child.getProperty(JCR_SUPERTYPES); if (supertypes != null) { for (String name : supertypes.getValue(Type.NAMES)) { if (oakName.equals(name)) { subtypes.add(new NodeTypeImpl(child, mapper)); break; } } } } return new NodeTypeIteratorAdapter(subtypes); } @Override public boolean isNodeType(String nodeTypeName) { String oakName = mapper.getOakNameOrNull(nodeTypeName); return internalIsNodeType(oakName); } @Override public PropertyDefinition[] getPropertyDefinitions() { Collection<PropertyDefinition> definitions = internalGetPropertyDefinitions(); return definitions.toArray(new PropertyDefinition[definitions.size()]); } @Override public NodeDefinition[] getChildNodeDefinitions() { Collection<NodeDefinition> definitions = internalGetChildDefinitions(); return definitions.toArray(new NodeDefinition[definitions.size()]); } @Override public boolean canSetProperty(String propertyName, Value value) { if (value == null) { return canRemoveProperty(propertyName); } try { EffectiveNodeType effective = new EffectiveNodeType(this, getManager()); PropertyDefinition def = effective.getPropertyDefinition( propertyName, false, value.getType(), false); return !def.isProtected() && meetsTypeConstraints(value, def.getRequiredType()) && meetsValueConstraints(value, def.getValueConstraints()); } catch (RepositoryException e) { // TODO don't use exceptions for flow control. Use internal method in ReadOnlyNodeTypeManager instead. log.debug(e.getMessage()); return false; } } @Override public boolean canSetProperty(String propertyName, Value[] values) { if (values == null) { return canRemoveProperty(propertyName); } try { int type = (values.length == 0) ? PropertyType.STRING : values[0].getType(); EffectiveNodeType effective = new EffectiveNodeType(this, getManager()); PropertyDefinition def = effective.getPropertyDefinition( propertyName, true, type, false); return !def.isProtected() && meetsTypeConstraints(values, def.getRequiredType()) && meetsValueConstraints(values, def.getValueConstraints()); } catch (RepositoryException e) { // TODO don't use exceptions for flow control. Use internal method in ReadOnlyNodeTypeManager instead. log.debug(e.getMessage()); return false; } } @Override public boolean canAddChildNode(String childNodeName) { // FIXME: properly calculate matching definition for (NodeDefinition definition : getChildNodeDefinitions()) { String name = definition.getName(); if (matches(childNodeName, name) || RESIDUAL_NAME.equals(name)) { return !definition.isProtected() && definition.getDefaultPrimaryType() != null; } } return false; } @Override public boolean canAddChildNode(String childNodeName, String nodeTypeName) { NodeType type; try { type = getManager().getNodeType(nodeTypeName); if (type.isAbstract()) { return false; } } catch (NoSuchNodeTypeException e) { return false; } catch (RepositoryException e) { log.warn("Unable to access node type " + nodeTypeName, e); return false; } // FIXME: properly calculate matching definition for (NodeDefinition definition : getChildNodeDefinitions()) { String name = definition.getName(); if (matches(childNodeName, name) || RESIDUAL_NAME.equals(name)) { if (definition.isProtected()) { return false; } for (String required : definition.getRequiredPrimaryTypeNames()) { if (type.isNodeType(required)) { return true; } } } } return false; } @Override public boolean canRemoveItem(String itemName) { List<ItemDefinition> definitions = Lists.newArrayList(); definitions.addAll(Arrays.asList(getChildNodeDefinitions())); definitions.addAll(Arrays.asList(getPropertyDefinitions())); return internalCanRemoveItem(itemName, definitions); } @Override public boolean canRemoveNode(String nodeName) { return internalCanRemoveItem(nodeName, Arrays.asList(getChildNodeDefinitions())); } @Override public boolean canRemoveProperty(String propertyName) { return internalCanRemoveItem(propertyName, Arrays.asList(getPropertyDefinitions())); } /** * Returns the namespace neutral CND of the given node type definition. * @param def the node type definition * @return the CND */ private static String getCnd(NodeTypeDefinition def) { StringWriter out = new StringWriter(); CompactNodeTypeDefWriter cndWriter = new CompactNodeTypeDefWriter(out, new CompactNodeTypeDefWriter.NamespaceMapping(){ @Override public String getNamespaceURI(String s) { return s; } }, false); try { cndWriter.write(def); } catch (IOException e) { // should never occur log.error("Error generating CND of " + def, e); throw new IllegalStateException(e); } return out.toString(); } //-------------------------------------------------------------< Object >--- @Override public String toString() { return getName(); } @Override public boolean equals(Object o) { return this == o || o instanceof NodeType && getCnd(this).equals(getCnd((NodeType) o)); } @Override public int hashCode() { return getCnd(this).hashCode(); } //-----------------------------------------------------------< internal >--- private boolean internalCanRemoveItem(String itemName, Iterable<? extends ItemDefinition> definitions) { // FIXME: should properly calculate matching definition taking residual definitions into account. for (ItemDefinition definition : definitions) { String name = definition.getName(); if (matches(itemName, name)) { if (definition.isMandatory() || definition.isProtected()) { return false; } } } return definitions.iterator().hasNext(); } private ReadOnlyNodeTypeManager getManager() { final Tree types = definition.getParent(); return new ReadOnlyNodeTypeManager() { @Override @CheckForNull protected Tree getTypes() { return types; } }; } boolean internalIsNodeType(String oakName) { if (getOakName().equals(oakName)) { return true; } for (NodeType type : getDeclaredSupertypes()) { if (((NodeTypeImpl) type).internalIsNodeType(oakName)) { return true; } } return false; } Collection<NodeDefinition> internalGetChildDefinitions() { // TODO distinguish between additive and overriding node definitions. See 3.7.6.8 Item Definitions in Subtypes Collection<NodeDefinition> definitions = new ArrayList<NodeDefinition>(); definitions.addAll(Arrays.asList(getDeclaredChildNodeDefinitions())); for (NodeType type : getSupertypes()) { definitions.addAll(Arrays.asList(type.getDeclaredChildNodeDefinitions())); } return definitions; } Collection<PropertyDefinition> internalGetPropertyDefinitions() { // TODO distinguish between additive and overriding property definitions. See 3.7.6.8 Item Definitions in Subtypes Collection<PropertyDefinition> definitions = new ArrayList<PropertyDefinition>(); definitions.addAll(Arrays.asList(getDeclaredPropertyDefinitions())); for (NodeType type : getSupertypes()) { definitions.addAll(Arrays.asList(type.getDeclaredPropertyDefinitions())); } return definitions; } List<PropertyDefinition> getDeclaredNamedPropertyDefinitions(String oakName) { String escapedName = oakName; if (JCR_PRIMARYTYPE.equals(oakName)) { escapedName = REP_PRIMARY_TYPE; } else if (JCR_MIXINTYPES.equals(oakName)) { escapedName = REP_MIXIN_TYPES; } else if (JCR_UUID.equals(oakName)) { escapedName = REP_UUID; } return getDeclaredPropertyDefs(definition .getChild(REP_NAMED_PROPERTY_DEFINITIONS) .getChild(escapedName)); } List<PropertyDefinition> getDeclaredResidualPropertyDefinitions() { return getDeclaredPropertyDefs(definition .getChild(REP_RESIDUAL_PROPERTY_DEFINITIONS)); } List<NodeDefinition> getDeclaredNamedNodeDefinitions(String oakName) { return getDeclaredNodeDefs(definition .getChild(REP_NAMED_CHILD_NODE_DEFINITIONS) .getChild(oakName)); } List<NodeDefinition> getDeclaredResidualNodeDefinitions() { return getDeclaredNodeDefs(definition .getChild(REP_RESIDUAL_CHILD_NODE_DEFINITIONS)); } private List<PropertyDefinition> getDeclaredPropertyDefs(Tree definitions) { if (definitions.exists()) { List<PropertyDefinition> list = newArrayList(); String typeName = getOakName(); for (Tree def : definitions.getChildren()) { String declaringTypeName = TreeUtil.getName(def, REP_DECLARING_NODE_TYPE); if (typeName.equals(declaringTypeName)) { list.add(new PropertyDefinitionImpl(def, this, mapper)); } } return list; } else { return emptyList(); } } private List<NodeDefinition> getDeclaredNodeDefs(Tree defs) { if (defs.exists()) { List<NodeDefinition> list = newArrayList(); String typeName = getOakName(); for (Tree def : defs.getChildren()) { String declaringTypeName = TreeUtil.getName(def, REP_DECLARING_NODE_TYPE); if (typeName.equals(declaringTypeName)) { list.add(new NodeDefinitionImpl(def, this, mapper)); } } return list; } else { return emptyList(); } } //-------------------------------------------------------------------------- private static boolean meetsTypeConstraints(Value value, int requiredType) { try { switch (requiredType) { case PropertyType.STRING: value.getString(); return true; case PropertyType.BINARY: value.getBinary(); return true; case PropertyType.LONG: value.getLong(); return true; case PropertyType.DOUBLE: value.getDouble(); return true; case PropertyType.DATE: value.getDate(); return true; case PropertyType.BOOLEAN: value.getBoolean(); return true; case PropertyType.NAME: { int type = value.getType(); return type != PropertyType.DOUBLE && type != PropertyType.LONG && type != PropertyType.BOOLEAN && JcrNameParser.validate(value.getString()); } case PropertyType.PATH: { int type = value.getType(); return type != PropertyType.DOUBLE && type != PropertyType.LONG && type != PropertyType.BOOLEAN && JcrPathParser.validate(value.getString()); } case PropertyType.REFERENCE: case PropertyType.WEAKREFERENCE: return IdentifierManager.isValidUUID(value.getString()); case PropertyType.URI: new URI(value.getString()); return true; case PropertyType.DECIMAL: value.getDecimal(); return true; case PropertyType.UNDEFINED: return true; default: log.warn("Invalid property type value: " + requiredType); return false; } } catch (RepositoryException e) { return false; } catch (URISyntaxException e) { return false; } } private static boolean meetsTypeConstraints(Value[] values, int requiredType) { // Constraints must be met by all values for (Value value : values) { if (!meetsTypeConstraints(value, requiredType)) { return false; } } return true; } private static boolean meetsValueConstraints(Value value, String[] constraints) { if (constraints == null || constraints.length == 0) { return true; } // Any of the constraints must be met for (String constraint : constraints) { if (Constraints.valueConstraint(value.getType(), constraint).apply(value)) { return true; } } return false; } private static boolean meetsValueConstraints(Value[] values, String[] constraints) { if (constraints == null || constraints.length == 0) { return true; } // Constraints must be met by all values for (Value value : values) { if (!meetsValueConstraints(value, constraints)) { return false; } } return true; } private boolean matches(String childNodeName, String name) { String oakChildName = mapper.getOakNameOrNull(childNodeName); String oakName = mapper.getOakNameOrNull(name); // TODO need a better way to handle SNS return oakChildName != null && oakChildName.startsWith(oakName); } }
/* * Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.maven.registry; import org.apache.maven.model.Plugin; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.wso2.developerstudio.eclipse.utils.file.FileUtils; import org.wso2.maven.capp.model.Artifact; import org.wso2.maven.capp.mojo.AbstractPOMGenMojo; import org.wso2.maven.capp.utils.WSO2MavenPluginConstantants; import org.wso2.maven.core.utils.MavenUtils; import org.wso2.maven.registry.beans.RegistryCollection; import org.wso2.maven.registry.beans.RegistryElement; import org.wso2.maven.registry.beans.RegistryItem; import org.wso2.maven.registry.utils.GeneralProjectMavenUtils; import java.io.File; import java.io.IOException; import java.sql.Time; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Map; /** * This is the Maven Mojo used for generating a pom for a sequence artifact * from the old CApp project structure * * @goal pom-gen * */ public class RegistryResourcePOMGenMojo extends AbstractPOMGenMojo { /** * @parameter default-value="${project}" */ private MavenProject project; /** * Maven ProjectHelper. * * @component */ private MavenProjectHelper projectHelper; /** * The path of the location to output the pom * * @parameter expression="${project.build.directory}/artifacts" */ private File outputLocation; /** * The resulting extension of the file * * @parameter */ private File artifactLocation; /** * POM location for the module project * * @parameter expression="${project.build.directory}/pom.xml" */ private File moduleProject; /** * Group id to use for the generated pom * * @parameter */ private String groupId; private static final String ARTIFACT_TYPE="registry/resource"; private List<RegistryArtifact> artifacts; private Map<Artifact, RegistryArtifact> artifactToRegArtifactMap; private List<RegistryArtifact> retrieveArtifacts() { return GeneralProjectMavenUtils.retrieveArtifacts(getArtifactLocation()); } public void execute() throws MojoExecutionException, MojoFailureException { //Retrieving all the existing ESB Artifacts for the given Maven project if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Starting Artifacts list retrieval process."); } artifacts = retrieveArtifacts(); if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Artifacts list retrieval completed"); } //Artifact list List<Artifact> mappedArtifacts=new ArrayList<Artifact>(); //Initializing Artifacts to Registry Artifacts Map. artifactToRegArtifactMap = new Hashtable<Artifact, RegistryArtifact>(); //Mapping ESBArtifacts to C-App artifacts so that we can reuse the maven-sequence-plugin for (RegistryArtifact registryArtifact : artifacts) { Artifact artifact=new Artifact(); artifact.setName(registryArtifact.getName()); artifact.setVersion(registryArtifact.getVersion()); artifact.setType(registryArtifact.getType()); artifact.setServerRole(registryArtifact.getServerRole()); artifact.setFile("registry-info.xml"); artifact.setSource(new File(getArtifactLocation(),"artifact.xml")); mappedArtifacts.add(artifact); //Add the mapping between C-App Artifact and Registry Artifact artifactToRegArtifactMap.put(artifact, registryArtifact); } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Artifact model mapping completed"); } //Calling the process artifacts method of super type to continue the sequence. if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Starting Artifact Processing"); } super.processArtifacts(mappedArtifacts); } protected void copyResources(MavenProject project, File projectLocation, Artifact artifact)throws IOException { //POM file and Registry-info.xml in the outside //Creating the registry info file outdide if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Starting to process the artifact copy process"); } File regInfoFile = new File(projectLocation, "registry-info.xml"); RegistryInfo regInfo=new RegistryInfo(); regInfo.setSource(regInfoFile); //Filling info sections if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Starting generation of Registry Resource Metadata"); } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Reusing the previously collected Artifacts details."); } RegistryArtifact mappedRegistryArtifact = artifactToRegArtifactMap.get(artifact); if(mappedRegistryArtifact != null){ if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" C-App artifact to Registry Artifact Mapping available."); } //This is the correct registry artifact for this C-App artifact. List<RegistryElement> allRegistryItems = mappedRegistryArtifact.getAllRegistryItems(); for (RegistryElement registryItem : allRegistryItems) { regInfo.addESBArtifact(registryItem); } }else{ if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" C-App artifact to Registry Artifact Mapping not available."); } for (RegistryArtifact registryArtifact : artifacts) { if(registryArtifact.getName().equalsIgnoreCase(artifact.getName()) && this.getProject().getVersion().equalsIgnoreCase(artifact.getVersion()) && registryArtifact.getType().equalsIgnoreCase(artifact.getType()) && registryArtifact.getServerRole().equalsIgnoreCase(artifact.getServerRole())){ //This is the correct registry artifact for this artifact:Yes this is reverse artifact to registry artifact mapping List<RegistryElement> allRegistryItems = registryArtifact.getAllRegistryItems(); for (RegistryElement registryItem : allRegistryItems) { regInfo.addESBArtifact(registryItem); } break; } } } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Registry Resource Metadata collection is complete."); } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Starting serialization of Registry Resource Metadata"); } try { regInfo.toFile(); } catch (Exception e) { } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Completed serialization of Registry Resource Metadata"); } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Start copying the Registry Resource Process"); } List<RegistryElement> allESBArtifacts = regInfo.getAllESBArtifacts(); for (RegistryElement registryItem : allESBArtifacts) { File file = null; if (registryItem instanceof RegistryItem) { file = new File(artifact.getSource().getParentFile().getPath(), ((RegistryItem) registryItem).getFile()); ((RegistryItem) registryItem).setFile(file.getName()); } else if (registryItem instanceof RegistryCollection) { file = new File(artifact.getSource().getParentFile().getPath(), ((RegistryCollection) registryItem).getDirectory()); ((RegistryCollection) registryItem).setDirectory(file.getName()); } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Metadata processing complete. Copying artifacts."); } // If resource is a file if (file.isFile()) { File processedFile = processTokenReplacement(file); FileUtils.copy(processedFile, new File(projectLocation, "resources" + File.separator + file.getName())); } else { FileUtils.copyDirectory(file, new File(projectLocation, "resources" + File.separator + file.getName())); } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Artifact Copying complete."); } try { regInfo.toFile(); } catch (Exception e) { } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Metadata file serialization completed."); } } if(getLog().isDebugEnabled()){ getLog().debug(new Time(System.currentTimeMillis())+" Artifact copy process is completed"); } } protected void addPlugins(MavenProject artifactMavenProject, Artifact artifact) { Plugin plugin = MavenUtils.createPluginEntry(artifactMavenProject,"org.wso2.maven","maven-registry-plugin",WSO2MavenPluginConstantants.MAVEN_REGISTRY_PLUGIN_VERSION,true); Xpp3Dom configuration = (Xpp3Dom)plugin.getConfiguration(); //add configuration Xpp3Dom aritfact = MavenUtils.createConfigurationNode(configuration,"artifact"); aritfact.setValue(artifact.getFile().getName()); } protected String getArtifactType() { return ARTIFACT_TYPE; } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.hash; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.Beta; import com.google.common.annotations.VisibleForTesting; import com.google.common.primitives.UnsignedInts; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.util.Iterator; /** * Static methods to obtain {@link HashFunction} instances, and other static * hashing-related utilities. * * @author Kevin Bourrillion * @author Dimitris Andreou * @author Kurt Alfred Kluever * @since 11.0 */ @Beta public final class Hashing { private Hashing() {} /** * Used to randomize {@link #goodFastHash} instances, so that programs which persist anything * dependent on hashcodes of those, will fail sooner than later. */ private static final int GOOD_FAST_HASH_SEED = (int) System.currentTimeMillis(); // Used by goodFastHash when minimumBits == 32. private static final HashFunction GOOD_FAST_HASH_FUNCTION_32 = murmur3_32(GOOD_FAST_HASH_SEED); // Used by goodFastHash when 32 < minimumBits <= 128. private static final HashFunction GOOD_FAST_HASH_FUNCTION_128 = murmur3_128(GOOD_FAST_HASH_SEED); /** * Returns a general-purpose, <b>non-cryptographic-strength</b>, streaming hash function that * produces hash codes of length at least {@code minimumBits}. Users without specific * compatibility requirements and who do not persist the hash codes are encouraged to * choose this hash function. * * <p>Repeated calls to {@link #goodFastHash} with the same {@code minimumBits} value will * return {@link HashFunction} instances with identical behavior (but not necessarily the * same instance) for the duration of the current virtual machine. * * <p><b>Warning: the implementation is unspecified and is subject to change.</b> * * @throws IllegalArgumentException if {@code minimumBits} is not positive */ public static HashFunction goodFastHash(int minimumBits) { int bits = checkPositiveAndMakeMultipleOf32(minimumBits); if (bits == 32) { return GOOD_FAST_HASH_FUNCTION_32; } if (bits <= 128) { return GOOD_FAST_HASH_FUNCTION_128; } // Otherwise, join together some 128-bit murmur3s int hashFunctionsNeeded = (bits + 127) / 128; HashFunction[] hashFunctions = new HashFunction[hashFunctionsNeeded]; hashFunctions[0] = GOOD_FAST_HASH_FUNCTION_128; int seed = GOOD_FAST_HASH_SEED; for (int i = 1; i < hashFunctionsNeeded; i++) { seed += 1500450271; // a prime; shouldn't matter hashFunctions[i] = murmur3_128(seed); } return new ConcatenatedHashFunction(hashFunctions); } /** * Returns a hash function implementing the * <a href="http://smhasher.googlecode.com/svn/trunk/MurmurHash3.cpp">32-bit murmur3 * algorithm</a> (little-endian variant), using the given seed value. */ public static HashFunction murmur3_32(int seed) { return new Murmur3_32HashFunction(seed); } /** * Returns a hash function implementing the * <a href="http://smhasher.googlecode.com/svn/trunk/MurmurHash3.cpp">32-bit murmur3 * algorithm</a> (little-endian variant), using a seed value of zero. */ public static HashFunction murmur3_32() { return MURMUR3_32; } private static final Murmur3_32HashFunction MURMUR3_32 = new Murmur3_32HashFunction(0); /** * Returns a hash function implementing the * <a href="http://smhasher.googlecode.com/svn/trunk/MurmurHash3.cpp"> * 128-bit murmur3 algorithm, x64 variant</a> (little-endian variant), using the given seed * value. */ public static HashFunction murmur3_128(int seed) { return new Murmur3_128HashFunction(seed); } /** * Returns a hash function implementing the * <a href="http://smhasher.googlecode.com/svn/trunk/MurmurHash3.cpp"> * 128-bit murmur3 algorithm, x64 variant</a> (little-endian variant), using a seed value * of zero. */ public static HashFunction murmur3_128() { return MURMUR3_128; } private static final Murmur3_128HashFunction MURMUR3_128 = new Murmur3_128HashFunction(0); /** * Returns a hash function implementing the MD5 hash algorithm (128 hash bits) by delegating to * the MD5 {@link MessageDigest}. */ public static HashFunction md5() { return MD5; } private static final HashFunction MD5 = new MessageDigestHashFunction("MD5"); /** * Returns a hash function implementing the SHA-1 algorithm (160 hash bits) by delegating to the * SHA-1 {@link MessageDigest}. */ public static HashFunction sha1() { return SHA_1; } private static final HashFunction SHA_1 = new MessageDigestHashFunction("SHA-1"); /** * Returns a hash function implementing the SHA-256 algorithm (256 hash bits) by delegating to * the SHA-256 {@link MessageDigest}. */ public static HashFunction sha256() { return SHA_256; } private static final HashFunction SHA_256 = new MessageDigestHashFunction("SHA-256"); /** * Returns a hash function implementing the SHA-512 algorithm (512 hash bits) by delegating to the * SHA-512 {@link MessageDigest}. */ public static HashFunction sha512() { return SHA_512; } private static final HashFunction SHA_512 = new MessageDigestHashFunction("SHA-512"); // Lazy initiliazation holder class idiom. /** * If {@code hashCode} has enough bits, returns {@code hashCode.asLong()}, otherwise * returns a {@code long} value with {@code hashCode.asInt()} as the least-significant * four bytes and {@code 0x00} as each of the most-significant four bytes. */ public static long padToLong(HashCode hashCode) { return (hashCode.bits() < 64) ? UnsignedInts.toLong(hashCode.asInt()) : hashCode.asLong(); } /** * Assigns to {@code hashCode} a "bucket" in the range {@code [0, buckets)}, in a uniform * manner that minimizes the need for remapping as {@code buckets} grows. That is, * {@code consistentHash(h, n)} equals: * * <ul> * <li>{@code n - 1}, with approximate probability {@code 1/n} * <li>{@code consistentHash(h, n - 1)}, otherwise (probability {@code 1 - 1/n}) * </ul> * * <p>See the <a href="http://en.wikipedia.org/wiki/Consistent_hashing">wikipedia * article on consistent hashing</a> for more information. * <p> * If you might want to have weights for the buckets in the future, take a look at * {@code weightedConsistentHash}. */ public static int consistentHash(HashCode hashCode, int buckets) { return consistentHash(padToLong(hashCode), buckets); } /** * Assigns to {@code input} a "bucket" in the range {@code [0, buckets)}, in a uniform * manner that minimizes the need for remapping as {@code buckets} grows. That is, * {@code consistentHash(h, n)} equals: * * <ul> * <li>{@code n - 1}, with approximate probability {@code 1/n} * <li>{@code consistentHash(h, n - 1)}, otherwise (probability {@code 1 - 1/n}) * </ul> * * <p>See the <a href="http://en.wikipedia.org/wiki/Consistent_hashing">wikipedia * article on consistent hashing</a> for more information. * <p> * If you might want to have weights for the buckets in the future, take a look at * {@code weightedConsistentHash}. */ public static int consistentHash(long input, int buckets) { checkArgument(buckets > 0, "buckets must be positive: %s", buckets); LinearCongruentialGenerator generator = new LinearCongruentialGenerator(input); int candidate = 0; int next; // Jump from bucket to bucket until we go out of range while (true) { next = (int) ((candidate + 1) / generator.nextDouble()); if (next >= 0 && next < buckets) { candidate = next; } else { return candidate; } } } /** * Returns a hash code, having the same bit length as each of the input hash codes, * that combines the information of these hash codes in an ordered fashion. That * is, whenever two equal hash codes are produced by two calls to this method, it * is <i>as likely as possible</i> that each was computed from the <i>same</i> * input hash codes in the <i>same</i> order. * * @throws IllegalArgumentException if {@code hashCodes} is empty, or the hash codes * do not all have the same bit length */ public static HashCode combineOrdered(Iterable<HashCode> hashCodes) { Iterator<HashCode> iterator = hashCodes.iterator(); checkArgument(iterator.hasNext(), "Must be at least 1 hash code to combine."); int bits = iterator.next().bits(); byte[] resultBytes = new byte[bits / 8]; for (HashCode hashCode : hashCodes) { byte[] nextBytes = hashCode.asBytes(); checkArgument(nextBytes.length == resultBytes.length, "All hashcodes must have the same bit length."); for (int i = 0; i < nextBytes.length; i++) { resultBytes[i] = (byte) (resultBytes[i] * 37 ^ nextBytes[i]); } } return HashCodes.fromBytesNoCopy(resultBytes); } /** * Returns a hash code, having the same bit length as each of the input hash codes, * that combines the information of these hash codes in an unordered fashion. That * is, whenever two equal hash codes are produced by two calls to this method, it * is <i>as likely as possible</i> that each was computed from the <i>same</i> * input hash codes in <i>some</i> order. * * @throws IllegalArgumentException if {@code hashCodes} is empty, or the hash codes * do not all have the same bit length */ public static HashCode combineUnordered(Iterable<HashCode> hashCodes) { Iterator<HashCode> iterator = hashCodes.iterator(); checkArgument(iterator.hasNext(), "Must be at least 1 hash code to combine."); byte[] resultBytes = new byte[iterator.next().bits() / 8]; for (HashCode hashCode : hashCodes) { byte[] nextBytes = hashCode.asBytes(); checkArgument(nextBytes.length == resultBytes.length, "All hashcodes must have the same bit length."); for (int i = 0; i < nextBytes.length; i++) { resultBytes[i] += nextBytes[i]; } } return HashCodes.fromBytesNoCopy(resultBytes); } /** * Checks that the passed argument is positive, and ceils it to a multiple of 32. */ static int checkPositiveAndMakeMultipleOf32(int bits) { checkArgument(bits > 0, "Number of bits must be positive"); return (bits + 31) & ~31; } // TODO(kevinb): Maybe expose this class via a static Hashing method? @VisibleForTesting static final class ConcatenatedHashFunction extends AbstractCompositeHashFunction { private final int bits; ConcatenatedHashFunction(HashFunction... functions) { super(functions); int bitSum = 0; for (HashFunction function : functions) { bitSum += function.bits(); } this.bits = bitSum; } @Override HashCode makeHash(Hasher[] hashers) { // TODO(user): Get rid of the ByteBuffer here? byte[] bytes = new byte[bits / 8]; ByteBuffer buffer = ByteBuffer.wrap(bytes); for (Hasher hasher : hashers) { buffer.put(hasher.hash().asBytes()); } return HashCodes.fromBytesNoCopy(bytes); } @Override public int bits() { return bits; } } private static final class LinearCongruentialGenerator { private long state; public LinearCongruentialGenerator(long seed) { this.state = seed; } public double nextDouble() { state = 2862933555777941757L * state + 1; return ((double) ((int) (state >>> 33) + 1)) / (0x1.0p31); } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.services.resources.account; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.NotFoundException; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.jboss.resteasy.annotations.cache.NoCache; import org.jboss.resteasy.spi.HttpRequest; import org.keycloak.common.ClientConnection; import org.keycloak.common.Profile; import org.keycloak.common.enums.AccountRestApiVersion; import org.keycloak.common.util.StringPropertyReplacer; import org.keycloak.events.Details; import org.keycloak.events.EventBuilder; import org.keycloak.events.EventStoreProvider; import org.keycloak.events.EventType; import org.keycloak.models.AccountRoles; import org.keycloak.models.AuthenticatedClientSessionModel; import org.keycloak.models.ClientModel; import org.keycloak.models.ClientScopeModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserConsentModel; import org.keycloak.models.UserModel; import org.keycloak.provider.ConfiguredProvider; import org.keycloak.representations.account.ClientRepresentation; import org.keycloak.representations.account.ConsentRepresentation; import org.keycloak.representations.account.ConsentScopeRepresentation; import org.keycloak.representations.account.UserProfileAttributeMetadata; import org.keycloak.representations.account.UserProfileMetadata; import org.keycloak.representations.account.UserRepresentation; import org.keycloak.representations.idm.ErrorRepresentation; import org.keycloak.services.ErrorResponse; import org.keycloak.services.managers.Auth; import org.keycloak.services.managers.UserConsentManager; import org.keycloak.services.messages.Messages; import org.keycloak.services.resources.account.resources.ResourcesService; import org.keycloak.services.util.ResolveRelative; import org.keycloak.storage.ReadOnlyException; import org.keycloak.theme.Theme; import org.keycloak.userprofile.AttributeMetadata; import org.keycloak.userprofile.AttributeValidatorMetadata; import org.keycloak.userprofile.Attributes; import org.keycloak.userprofile.UserProfile; import org.keycloak.userprofile.UserProfileContext; import org.keycloak.userprofile.UserProfileProvider; import org.keycloak.userprofile.EventAuditingAttributeChangeListener; import org.keycloak.userprofile.ValidationException; import org.keycloak.userprofile.ValidationException.Error; import org.keycloak.validate.Validators; /** * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> */ public class AccountRestService { @Context private HttpRequest request; @Context protected HttpHeaders headers; @Context protected ClientConnection clientConnection; private final KeycloakSession session; private final ClientModel client; private final EventBuilder event; private EventStoreProvider eventStore; private Auth auth; private final RealmModel realm; private final UserModel user; private final Locale locale; private final AccountRestApiVersion version; public AccountRestService(KeycloakSession session, Auth auth, ClientModel client, EventBuilder event, AccountRestApiVersion version) { this.session = session; this.auth = auth; this.realm = auth.getRealm(); this.user = auth.getUser(); this.client = client; this.event = event; this.locale = session.getContext().resolveLocale(user); this.version = version; } public void init() { eventStore = session.getProvider(EventStoreProvider.class); } /** * Get account information. * * @return */ @Path("/") @GET @Produces(MediaType.APPLICATION_JSON) @NoCache public UserRepresentation account(final @PathParam("userProfileMetadata") Boolean userProfileMetadata) { auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.VIEW_PROFILE); UserModel user = auth.getUser(); UserRepresentation rep = new UserRepresentation(); rep.setId(user.getId()); rep.setUsername(user.getUsername()); rep.setFirstName(user.getFirstName()); rep.setLastName(user.getLastName()); rep.setEmail(user.getEmail()); rep.setEmailVerified(user.isEmailVerified()); UserProfileProvider provider = session.getProvider(UserProfileProvider.class); UserProfile profile = provider.create(UserProfileContext.ACCOUNT, user); rep.setAttributes(profile.getAttributes().getReadable(false)); if(userProfileMetadata == null || userProfileMetadata.booleanValue()) rep.setUserProfileMetadata(createUserProfileMetadata(profile)); return rep; } private UserProfileMetadata createUserProfileMetadata(final UserProfile profile) { Map<String, List<String>> am = profile.getAttributes().getReadable(); if(am == null) return null; List<UserProfileAttributeMetadata> attributes = am.keySet().stream() .map(name -> profile.getAttributes().getMetadata(name)) .filter(Objects::nonNull) .sorted((a,b) -> Integer.compare(a.getGuiOrder(), b.getGuiOrder())) .map(sam -> toRestMetadata(sam, profile)) .collect(Collectors.toList()); return new UserProfileMetadata(attributes); } private UserProfileAttributeMetadata toRestMetadata(AttributeMetadata am, UserProfile profile) { return new UserProfileAttributeMetadata(am.getName(), am.getAttributeDisplayName(), profile.getAttributes().isRequired(am.getName()), profile.getAttributes().isReadOnly(am.getName()), am.getAnnotations(), toValidatorMetadata(am)); } private Map<String, Map<String, Object>> toValidatorMetadata(AttributeMetadata am){ // we return only validators which are instance of ConfiguredProvider. Others are expected as internal. return am.getValidators() == null ? null : am.getValidators().stream() .filter(avm -> (Validators.validator(session, avm.getValidatorId()) instanceof ConfiguredProvider)) .collect(Collectors.toMap(AttributeValidatorMetadata::getValidatorId, AttributeValidatorMetadata::getValidatorConfig)); } @Path("/") @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @NoCache public Response updateAccount(UserRepresentation rep) { auth.require(AccountRoles.MANAGE_ACCOUNT); event.event(EventType.UPDATE_PROFILE).client(auth.getClient()).user(auth.getUser()).detail(Details.CONTEXT, UserProfileContext.ACCOUNT.name()); UserProfileProvider profileProvider = session.getProvider(UserProfileProvider.class); UserProfile profile = profileProvider.create(UserProfileContext.ACCOUNT, rep.toAttributes(), auth.getUser()); try { profile.update(new EventAuditingAttributeChangeListener(profile, event)); event.success(); return Response.noContent().build(); } catch (ValidationException pve) { List<ErrorRepresentation> errors = new ArrayList<>(); for(Error err: pve.getErrors()) { errors.add(new ErrorRepresentation(err.getAttribute(), err.getMessage(), validationErrorParamsToString(err.getMessageParameters(), profile.getAttributes()))); } return ErrorResponse.errors(errors, pve.getStatusCode(), false); } catch (ReadOnlyException e) { return ErrorResponse.error(Messages.READ_ONLY_USER, Response.Status.BAD_REQUEST); } } private String[] validationErrorParamsToString(Object[] messageParameters, Attributes userProfileAttributes) { if(messageParameters == null) return null; String[] ret = new String[messageParameters.length]; int i = 0; for(Object p: messageParameters) { if(p != null) { //first parameter is user profile attribute name, we have to take Display Name for it if(i==0) { AttributeMetadata am = userProfileAttributes.getMetadata(p.toString()); if(am != null) ret[i++] = am.getAttributeDisplayName(); else ret[i++] = p.toString(); } else { ret[i++] = p.toString(); } } else { i++; } } return ret; } /** * Get session information. * * @return */ @Path("/sessions") public SessionResource sessions() { checkAccountApiEnabled(); auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.VIEW_PROFILE); return new SessionResource(session, auth, request); } @Path("/credentials") public AccountCredentialResource credentials() { checkAccountApiEnabled(); return new AccountCredentialResource(session, user, auth); } @Path("/resources") public ResourcesService resources() { checkAccountApiEnabled(); auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.VIEW_PROFILE); return new ResourcesService(session, user, auth, request); } private ClientRepresentation modelToRepresentation(ClientModel model, List<String> inUseClients, List<String> offlineClients, Map<String, UserConsentModel> consents) { ClientRepresentation representation = new ClientRepresentation(); representation.setClientId(model.getClientId()); representation.setClientName(StringPropertyReplacer.replaceProperties(model.getName(), getProperties())); representation.setDescription(model.getDescription()); representation.setUserConsentRequired(model.isConsentRequired()); representation.setInUse(inUseClients.contains(model.getClientId())); representation.setOfflineAccess(offlineClients.contains(model.getClientId())); representation.setRootUrl(model.getRootUrl()); representation.setBaseUrl(model.getBaseUrl()); representation.setEffectiveUrl(ResolveRelative.resolveRelativeUri(session, model.getRootUrl(), model.getBaseUrl())); UserConsentModel consentModel = consents.get(model.getClientId()); if(consentModel != null) { representation.setConsent(modelToRepresentation(consentModel)); representation.setLogoUri(model.getAttribute(ClientModel.LOGO_URI)); representation.setPolicyUri(model.getAttribute(ClientModel.POLICY_URI)); representation.setTosUri(model.getAttribute(ClientModel.TOS_URI)); } return representation; } private ConsentRepresentation modelToRepresentation(UserConsentModel model) { List<ConsentScopeRepresentation> grantedScopes = model.getGrantedClientScopes().stream() .map(m -> new ConsentScopeRepresentation(m.getId(), m.getName(), StringPropertyReplacer.replaceProperties(m.getConsentScreenText(), getProperties()))) .collect(Collectors.toList()); return new ConsentRepresentation(grantedScopes, model.getCreatedDate(), model.getLastUpdatedDate()); } private Properties getProperties() { try { return session.theme().getTheme(Theme.Type.ACCOUNT).getMessages(locale); } catch (IOException e) { return null; } } /** * Returns the consent for the client with the given client id. * * @param clientId client id to return the consent for * @return consent of the client */ @Path("/applications/{clientId}/consent") @GET @Produces(MediaType.APPLICATION_JSON) public Response getConsent(final @PathParam("clientId") String clientId) { checkAccountApiEnabled(); auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.VIEW_CONSENT, AccountRoles.MANAGE_CONSENT); ClientModel client = realm.getClientByClientId(clientId); if (client == null) { return ErrorResponse.error("No client with clientId: " + clientId + " found.", Response.Status.NOT_FOUND); } UserConsentModel consent = session.users().getConsentByClient(realm, user.getId(), client.getId()); if (consent == null) { return Response.noContent().build(); } return Response.ok(modelToRepresentation(consent)).build(); } /** * Deletes the consent for the client with the given client id. * * @param clientId client id to delete a consent for * @return returns 202 if deleted */ @Path("/applications/{clientId}/consent") @DELETE public Response revokeConsent(final @PathParam("clientId") String clientId) { checkAccountApiEnabled(); auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.MANAGE_CONSENT); event.event(EventType.REVOKE_GRANT); ClientModel client = realm.getClientByClientId(clientId); if (client == null) { event.event(EventType.REVOKE_GRANT_ERROR); String msg = String.format("No client with clientId: %s found.", clientId); event.error(msg); return ErrorResponse.error(msg, Response.Status.NOT_FOUND); } UserConsentManager.revokeConsentToClient(session, client, user); event.success(); return Response.noContent().build(); } /** * Creates or updates the consent of the given, requested consent for * the client with the given client id. Returns the appropriate REST response. * * @param clientId client id to set a consent for * @param consent requested consent for the client * @return the created or updated consent */ @Path("/applications/{clientId}/consent") @POST @Produces(MediaType.APPLICATION_JSON) public Response grantConsent(final @PathParam("clientId") String clientId, final ConsentRepresentation consent) { return upsert(clientId, consent); } /** * Creates or updates the consent of the given, requested consent for * the client with the given client id. Returns the appropriate REST response. * * @param clientId client id to set a consent for * @param consent requested consent for the client * @return the created or updated consent */ @Path("/applications/{clientId}/consent") @PUT @Produces(MediaType.APPLICATION_JSON) public Response updateConsent(final @PathParam("clientId") String clientId, final ConsentRepresentation consent) { return upsert(clientId, consent); } /** * Creates or updates the consent of the given, requested consent for * the client with the given client id. Returns the appropriate REST response. * * @param clientId client id to set a consent for * @param consent requested consent for the client * @return response to return to the caller */ private Response upsert(String clientId, ConsentRepresentation consent) { checkAccountApiEnabled(); auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.MANAGE_CONSENT); event.event(EventType.GRANT_CONSENT); ClientModel client = realm.getClientByClientId(clientId); if (client == null) { event.event(EventType.GRANT_CONSENT_ERROR); String msg = String.format("No client with clientId: %s found.", clientId); event.error(msg); return ErrorResponse.error(msg, Response.Status.NOT_FOUND); } try { UserConsentModel grantedConsent = createConsent(client, consent); if (session.users().getConsentByClient(realm, user.getId(), client.getId()) == null) { session.users().addConsent(realm, user.getId(), grantedConsent); } else { session.users().updateConsent(realm, user.getId(), grantedConsent); } event.success(); grantedConsent = session.users().getConsentByClient(realm, user.getId(), client.getId()); return Response.ok(modelToRepresentation(grantedConsent)).build(); } catch (IllegalArgumentException e) { return ErrorResponse.error(e.getMessage(), Response.Status.BAD_REQUEST); } } /** * Create a new consent model object from the requested consent object * for the given client model. * * @param client client to create a consent for * @param requested list of client scopes that the new consent should contain * @return newly created consent model * @throws IllegalArgumentException throws an exception if the scope id is not available */ private UserConsentModel createConsent(ClientModel client, ConsentRepresentation requested) throws IllegalArgumentException { UserConsentModel consent = new UserConsentModel(client); Map<String, ClientScopeModel> availableGrants = realm.getClientScopesStream() .collect(Collectors.toMap(ClientScopeModel::getId, Function.identity())); if (client.isConsentRequired()) { availableGrants.put(client.getId(), client); } for (ConsentScopeRepresentation scopeRepresentation : requested.getGrantedScopes()) { ClientScopeModel scopeModel = availableGrants.get(scopeRepresentation.getId()); if (scopeModel == null) { String msg = String.format("Scope id %s does not exist for client %s.", scopeRepresentation, consent.getClient().getName()); event.error(msg); throw new IllegalArgumentException(msg); } else { consent.addGrantedClientScope(scopeModel); } } return consent; } @Path("/linked-accounts") public LinkedAccountsResource linkedAccounts() { return new LinkedAccountsResource(session, request, client, auth, event, user); } @Path("/applications") @GET @Produces(MediaType.APPLICATION_JSON) @NoCache public Stream<ClientRepresentation> applications(@QueryParam("name") String name) { checkAccountApiEnabled(); auth.requireOneOf(AccountRoles.MANAGE_ACCOUNT, AccountRoles.VIEW_APPLICATIONS); Set<ClientModel> clients = new HashSet<>(); List<String> inUseClients = new LinkedList<>(); clients.addAll(session.sessions().getUserSessionsStream(realm, user) .flatMap(s -> s.getAuthenticatedClientSessions().values().stream()) .map(AuthenticatedClientSessionModel::getClient) .peek(client -> inUseClients.add(client.getClientId())) .collect(Collectors.toSet())); List<String> offlineClients = new LinkedList<>(); clients.addAll(session.sessions().getOfflineUserSessionsStream(realm, user) .flatMap(s -> s.getAuthenticatedClientSessions().values().stream()) .map(AuthenticatedClientSessionModel::getClient) .peek(client -> offlineClients.add(client.getClientId())) .collect(Collectors.toSet())); Map<String, UserConsentModel> consentModels = new HashMap<>(); clients.addAll(session.users().getConsentsStream(realm, user.getId()) .peek(consent -> consentModels.put(consent.getClient().getClientId(), consent)) .map(UserConsentModel::getClient) .collect(Collectors.toSet())); realm.getAlwaysDisplayInConsoleClientsStream().forEach(clients::add); return clients.stream().filter(client -> !client.isBearerOnly() && !client.getClientId().isEmpty()) .filter(client -> matches(client, name)) .map(client -> modelToRepresentation(client, inUseClients, offlineClients, consentModels)); } private boolean matches(ClientModel client, String name) { if(name == null) return true; else if(client.getName() == null) return false; else return client.getName().toLowerCase().contains(name.toLowerCase()); } // TODO Logs private static void checkAccountApiEnabled() { if (!Profile.isFeatureEnabled(Profile.Feature.ACCOUNT_API)) { throw new NotFoundException(); } } }
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.flume.conf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.Map; import java.util.Map.Entry; import org.apache.log4j.Logger; import com.cloudera.flume.ExampleData; import com.cloudera.flume.core.Event; import com.cloudera.flume.core.EventImpl; import com.cloudera.flume.core.EventSink; import com.cloudera.flume.core.EventSource; import com.cloudera.flume.core.EventUtil; import com.cloudera.flume.core.connector.DirectDriver; import com.cloudera.flume.handlers.debug.MemorySinkSource; import com.cloudera.flume.reporter.ReportManager; import com.cloudera.flume.reporter.aggregator.AccumulatorSink; import com.cloudera.util.Pair; import org.junit.Test; /** * These are essentially the same tests as found in TestFactories, but use the * parser and builder infrastructure. * * TODO (jon) eventually build code generator so we just test * parse/generate/parse. */ public class TestFlumeBuilderFunctional implements ExampleData { final static Logger LOG = Logger.getLogger(TestFlumeBuilderFunctional.class .getName()); final String SOURCE = "asciisynth(25,100)"; final static int LINES = 25; @Test public void testBuildConsole() throws IOException, FlumeSpecException { EventSink snk = FlumeBuilder.buildSink(new Context(), "console"); snk.open(); snk.append(new EventImpl("test".getBytes())); snk.close(); } @Test public void testBuildTextSource() throws IOException, FlumeSpecException { LOG.info("Working Dir path: " + new File(".").getAbsolutePath()); EventSource src = FlumeBuilder.buildSource(SOURCE); src.open(); Event e = null; int cnt = 0; while ((e = src.next()) != null) { LOG.info(e); cnt++; } src.close(); assertEquals(LINES, cnt); } @Test public void testConnector() throws IOException, InterruptedException, FlumeSpecException { EventSink snk = FlumeBuilder.buildSink(new Context(), "console"); snk.open(); EventSource src = FlumeBuilder.buildSource(SOURCE); src.open(); DirectDriver conn = new DirectDriver(src, snk); conn.start(); conn.join(); snk.close(); src.close(); assertTrue(conn.getError() == null); } @Test public void testMultiSink() throws IOException, FlumeSpecException { LOG.info("== multi test start"); String multi = "[ console , accumulator(\"count\") ]"; EventSource src = FlumeBuilder.buildSource(SOURCE); EventSink snk = FlumeBuilder.buildSink(new ReportTestingContext(), multi); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); AccumulatorSink cnt = (AccumulatorSink) ReportManager.get().getReportable( "count"); assertEquals(LINES, cnt.getCount()); LOG.info("== multi test stop"); } @Test public void testDecorated() throws IOException, FlumeSpecException { LOG.info("== Decorated start"); String decorated = "{ intervalSampler(5) => accumulator(\"count\")}"; // String decorated = "{ intervalSampler(5) => console }"; EventSource src = FlumeBuilder.buildSource(SOURCE); EventSink snk = FlumeBuilder.buildSink(new ReportTestingContext(), decorated); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); AccumulatorSink cnt = (AccumulatorSink) ReportManager.get().getReportable( "count"); assertEquals(LINES / 5, cnt.getCount()); LOG.info("== Decorated stop"); } @Test public void testFailover() throws IOException, FlumeSpecException { LOG.info("== failover start"); // the primary is 90% flakey String multi = "< { flakeyAppend(.9,1337) => console } ? accumulator(\"count\") >"; EventSource src = FlumeBuilder.buildSource(SOURCE); EventSink snk = FlumeBuilder.buildSink(new ReportTestingContext(), multi); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); AccumulatorSink cnt = (AccumulatorSink) ReportManager.get().getReportable( "count"); assertEquals(LINES, cnt.getCount()); LOG.info("== failover stop"); } /** * Functionally tests a let expression. First we use let to create a counter * called count. Then we have a failover sink -- the primary is 50% flakey, * but goes to count and and the backup is reliable and the same instance of * count. 100 messages are sent and the count should count all 100 regardless * of the path the message took (primary or backup). * * This also checks that open and close is handled correctly. Open should open * the let part of the expression, and not attempt to reopen them in the body * section. * * An accumulator must be used in these tests. Here's why: When a failure is * detected on the primary of a failover sink, it periodically attempts to * *reopen* the primary sink in order to use it again. The behavior of counter * when opened is to reset which is a problem. * * How we discovered this: When all tests are run in the same jvm, the backoff * timing of the failover gets set to 0 (no backoff) by another test and these * tests end up reopening the primary and resetting which restarts the counter * at 0. An accumulator just keeps counting. */ @Test public void testLet() throws IOException, FlumeSpecException { LOG.info("== let and failover start"); // the primary is 50% flakey but the accumulator should get all the // messages. String letcount = "let count := accumulator(\"count\") in < { flakeyAppend(.5,1337) => count} ? count >"; EventSource src = MemorySinkSource.cannedData("canned data ", 100); EventSink snk = FlumeBuilder.buildSink(new ReportTestingContext(), letcount); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); AccumulatorSink ctr = (AccumulatorSink) ReportManager.get().getReportable( "count"); assertEquals(100, ctr.getCount()); LOG.info("== let and failover stop"); } /** * Lets allow for shadowing, sane semantics dictate the inner scope wins */ @Test public void testLetShadow() throws IOException, FlumeSpecException { LOG.info("== let shadowing start"); String let = "let foo := accumulator(\"foo\") in let foo := accumulator(\"bar\") in foo"; EventSource src = MemorySinkSource.cannedData("canned data ", 100); EventSink snk = FlumeBuilder.buildSink(new ReportTestingContext(), let); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); AccumulatorSink fooctr = (AccumulatorSink) ReportManager.get() .getReportable("foo"); AccumulatorSink barctr = (AccumulatorSink) ReportManager.get() .getReportable("bar"); assertEquals(0, fooctr.getCount()); assertEquals(100, barctr.getCount()); LOG.info("== let and failover stop"); } @Test public void testNode() throws IOException, FlumeSpecException { LOG.info("== node start"); String multi = "localhost : " + SOURCE + " | < { flakeyAppend(.9,1337) => console } ? accumulator(\"count\") > ;"; Map<String, Pair<EventSource, EventSink>> cfg = FlumeBuilder.build( new ReportTestingContext(), multi); for (Entry<String, Pair<EventSource, EventSink>> e : cfg.entrySet()) { // String name = e.getKey(); EventSource src = e.getValue().getLeft(); EventSink snk = e.getValue().getRight(); src.open(); snk.open(); EventUtil.dumpAll(src, snk); src.close(); snk.close(); } AccumulatorSink cnt = (AccumulatorSink) ReportManager.get().getReportable( "count"); assertEquals(LINES, cnt.getCount()); LOG.info("== node stop"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.runtime.evaluators.functions.temporal; import java.io.DataOutput; import org.apache.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer; import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider; import org.apache.asterix.om.base.ADuration; import org.apache.asterix.om.base.AMutableDuration; import org.apache.asterix.om.base.ANull; import org.apache.asterix.om.base.temporal.DurationArithmeticOperations; import org.apache.asterix.om.base.temporal.GregorianCalendarSystem; import org.apache.asterix.om.functions.AsterixBuiltinFunctions; import org.apache.asterix.om.functions.IFunctionDescriptor; import org.apache.asterix.om.functions.IFunctionDescriptorFactory; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.BuiltinType; import org.apache.asterix.om.types.EnumDeserializer; import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluator; import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory; import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.data.std.api.IDataOutputProvider; import org.apache.hyracks.data.std.util.ArrayBackedValueStorage; import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference; public class CalendarDuartionFromDateDescriptor extends AbstractScalarFunctionDynamicDescriptor { private final static long serialVersionUID = 1L; public final static FunctionIdentifier FID = AsterixBuiltinFunctions.CALENDAR_DURATION_FROM_DATE; // allowed input types private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize(); private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize(); private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize(); public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() { @Override public IFunctionDescriptor createFunctionDescriptor() { return new CalendarDuartionFromDateDescriptor(); } }; /* (non-Javadoc) * @see org.apache.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[]) */ @Override public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException { return new ICopyEvaluatorFactory() { private static final long serialVersionUID = 1L; @Override public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException { return new ICopyEvaluator() { private DataOutput out = output.getDataOutput(); private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage(); private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage(); private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0); private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1); // possible output types @SuppressWarnings("unchecked") private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.ANULL); @SuppressWarnings("unchecked") private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE .getSerializerDeserializer(BuiltinType.ADURATION); private AMutableDuration aDuration = new AMutableDuration(0, 0); private GregorianCalendarSystem calInstanct = GregorianCalendarSystem.getInstance(); @Override public void evaluate(IFrameTupleReference tuple) throws AlgebricksException { argOut0.reset(); eval0.evaluate(tuple); argOut1.reset(); eval1.evaluate(tuple); try { if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) { nullSerde.serialize(ANull.NULL, out); return; } if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) { throw new AlgebricksException(FID.getName() + ": expects type DATE/NULL for parameter 0 but got " + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])); } if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) { throw new AlgebricksException(FID.getName() + ": expects type DURATION/NULL for parameter 1 but got " + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])); } int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth( argOut1.getByteArray(), 1); long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime( argOut1.getByteArray(), 1); long startingTimePoint = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1) * GregorianCalendarSystem.CHRONON_OF_DAY; long endingTimePoint = DurationArithmeticOperations.addDuration(startingTimePoint, yearMonthDurationInMonths, dayTimeDurationInMs, false); if (startingTimePoint == endingTimePoint) { aDuration.setValue(0, 0); } else { boolean negative = false; if (endingTimePoint < startingTimePoint) { negative = true; // swap the starting and ending time, so that ending time is always larger than the starting time. long tmpTime = endingTimePoint; endingTimePoint = startingTimePoint; startingTimePoint = tmpTime; } int year0 = calInstanct.getYear(startingTimePoint); int month0 = calInstanct.getMonthOfYear(startingTimePoint, year0); int year1 = calInstanct.getYear(endingTimePoint); int month1 = calInstanct.getMonthOfYear(endingTimePoint, year1); int year = year1 - year0; int month = month1 - month0; int day = calInstanct.getDayOfMonthYear(endingTimePoint, year1, month1) - calInstanct.getDayOfMonthYear(startingTimePoint, year0, month0); int hour = calInstanct.getHourOfDay(endingTimePoint) - calInstanct.getHourOfDay(startingTimePoint); int min = calInstanct.getMinOfHour(endingTimePoint) - calInstanct.getMinOfHour(startingTimePoint); int sec = calInstanct.getSecOfMin(endingTimePoint) - calInstanct.getSecOfMin(startingTimePoint); int ms = calInstanct.getMillisOfSec(endingTimePoint) - calInstanct.getMillisOfSec(startingTimePoint); if (ms < 0) { ms += GregorianCalendarSystem.CHRONON_OF_SECOND; sec -= 1; } if (sec < 0) { sec += GregorianCalendarSystem.CHRONON_OF_MINUTE / GregorianCalendarSystem.CHRONON_OF_SECOND; min -= 1; } if (min < 0) { min += GregorianCalendarSystem.CHRONON_OF_HOUR / GregorianCalendarSystem.CHRONON_OF_MINUTE; hour -= 1; } if (hour < 0) { hour += GregorianCalendarSystem.CHRONON_OF_DAY / GregorianCalendarSystem.CHRONON_OF_HOUR; day -= 1; } if (day < 0) { boolean isLeapYear = calInstanct.isLeapYear(year1); // need to "borrow" the days in previous month to make the day positive; when month is 1 (Jan), Dec will be borrowed day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12]) : (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[(12 + month1 - 2) % 12]); month -= 1; } if (month < 0) { month += GregorianCalendarSystem.MONTHS_IN_A_YEAR; year -= 1; } if (negative) { aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month), -1 * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour * GregorianCalendarSystem.CHRONON_OF_HOUR + min * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec * GregorianCalendarSystem.CHRONON_OF_SECOND + ms)); } else { aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day * GregorianCalendarSystem.CHRONON_OF_DAY + hour * GregorianCalendarSystem.CHRONON_OF_HOUR + min * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec * GregorianCalendarSystem.CHRONON_OF_SECOND + ms); } } durationSerde.serialize(aDuration, out); } catch (HyracksDataException hex) { throw new AlgebricksException(hex); } } }; } }; } /* (non-Javadoc) * @see org.apache.asterix.om.functions.IFunctionDescriptor#getIdentifier() */ @Override public FunctionIdentifier getIdentifier() { return FID; } }
/* * Copyright 2011 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.base.evaluators; import org.drools.base.BaseEvaluator; import org.drools.base.ValueType; import org.drools.common.InternalFactHandle; import org.drools.common.InternalWorkingMemory; import org.drools.factmodel.traits.*; import org.drools.rule.VariableRestriction.VariableContextEntry; import org.drools.spi.Evaluator; import org.drools.spi.FieldValue; import org.drools.spi.InternalReadAccessor; import org.kie.runtime.ObjectFilter; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Collection; import java.util.Iterator; /** * <p>The implementation of the 'str' evaluator definition.</p> * * <p>The <b><code>str</code></b> compares two string values.</p> * * <p>Lets look at some examples:</p> * * <pre>$m : Message( routingValue str[startsWith] "R1" )</pre> * <pre>$m : Message( routingValue str[endsWith] "R2" )</pre> * <pre>$m : Message( routingValue str[length] 17 )</pre> */ public class IsAEvaluatorDefinition implements EvaluatorDefinition { public static final Operator ISA = Operator.addOperatorToRegistry( "isA", false); public static final Operator NOT_ISA = Operator .addOperatorToRegistry("isA", true); protected static final String[] SUPPORTED_IDS = { ISA .getOperatorString() }; private Evaluator[] evaluator; /** * @inheridDoc */ public Evaluator getEvaluator(ValueType type, Operator operator) { return this.getEvaluator(type, operator.getOperatorString(), operator .isNegated(), null); } /** * @inheridDoc */ public Evaluator getEvaluator(ValueType type, Operator operator, String parameterText) { return this.getEvaluator(type, operator.getOperatorString(), operator .isNegated(), parameterText); } /** * @inheridDoc */ public Evaluator getEvaluator(ValueType type, String operatorId, boolean isNegated, String parameterText) { return getEvaluator(type, operatorId, isNegated, parameterText, Target.FACT, Target.FACT); } /** * @inheridDoc */ public Evaluator getEvaluator(ValueType type, String operatorId, boolean isNegated, String parameterText, Target leftTarget, Target rightTarget) { IsAEvaluator evaluator = new IsAEvaluator(type, isNegated); evaluator.setParameterText(parameterText); return evaluator; } /** * @inheridDoc */ public String[] getEvaluatorIds() { return SUPPORTED_IDS; } /** * @inheridDoc */ public Target getTarget() { return Target.FACT; } /** * @inheridDoc */ public boolean isNegatable() { return true; } /** * @inheridDoc */ public boolean supportsType(ValueType type) { return (type.equals( ValueType.TRAIT_TYPE ) ); } /** * @inheridDoc */ public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { evaluator = (Evaluator[]) in.readObject(); } /** * @inheridDoc */ public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(evaluator); } public static class IsAEvaluator extends BaseEvaluator { public void setParameterText(String parameterText) { } public IsAEvaluator(final ValueType type, final boolean isNegated) { super(type, isNegated ? NOT_ISA : ISA ); } /** * @inheridDoc */ public boolean evaluate(InternalWorkingMemory workingMemory, InternalReadAccessor extractor, InternalFactHandle handle, FieldValue value) { final Object objectValue = extractor.getValue(workingMemory, handle.getObject()); Object typeName = value.getValue(); if ( typeName instanceof Class ) { typeName = ((Class) typeName).getName(); } TraitableBean core = null; if ( objectValue instanceof Thing ) { Thing thing = (Thing) objectValue; core = (TraitableBean) thing.getCore(); return this.getOperator().isNegated() ^ core.hasTrait(typeName.toString() ); } else if ( objectValue instanceof TraitableBean ) { core = (TraitableBean) objectValue; return this.getOperator().isNegated() ^ core.hasTrait( typeName.toString() ); } else { core = lookForWrapper( objectValue, workingMemory ); return ( core == null && this.getOperator().isNegated() ) || ( core != null && this.getOperator().isNegated() ^ core.hasTrait( typeName.toString() ) ); } } protected TraitableBean lookForWrapper( final Object objectValue, InternalWorkingMemory workingMemory) { Iterator iter = workingMemory.getObjectStore().iterateObjects( new ObjectFilter() { public boolean accept(Object object) { if ( object instanceof TraitProxy ) { Object core = ((TraitProxy) object).getObject(); if ( core instanceof CoreWrapper ) { core = ((CoreWrapper) core).getCore(); } return core == objectValue; } else { return false; } } }); if ( iter.hasNext() ) { return (TraitableBean) ((TraitProxy) iter.next()).getObject(); } else { return null; // throw new RuntimeException(" Error : the isA operator must be used on a trait-type, was applied to " + objectValue ); } } public boolean evaluate(InternalWorkingMemory workingMemory, InternalReadAccessor leftExtractor, InternalFactHandle left, InternalReadAccessor rightExtractor, InternalFactHandle right) { final Object value1 = leftExtractor.getValue(workingMemory, left); final Object value2 = rightExtractor.getValue(workingMemory, right); Object target = value1; Object source = value2; return compare( source, target, workingMemory ); } public boolean evaluateCachedLeft(InternalWorkingMemory workingMemory, VariableContextEntry context, InternalFactHandle right) { Object target = right.getObject(); Object source = context.getObject(); return compare( source, target, workingMemory ); } public boolean evaluateCachedRight(InternalWorkingMemory workingMemory, VariableContextEntry context, InternalFactHandle left) { Object target = left.getObject(); Object source = context.getObject(); return compare( source, target, workingMemory ); } private boolean compare(Object source, Object target, InternalWorkingMemory workingMemory ) { Collection sourceTraits = null; Collection targetTraits = null; if ( source instanceof Thing) { sourceTraits = ((TraitableBean) ((Thing) source).getCore()).getTraits(); } else if ( source instanceof TraitableBean ) { sourceTraits = ((TraitableBean) source).getTraits(); } else { TraitableBean tbean = lookForWrapper( source, workingMemory); if ( tbean != null ) { sourceTraits = tbean.getTraits(); } } if ( target instanceof Thing) { targetTraits = ((TraitableBean) ((Thing) target).getCore()).getTraits(); } else if ( target instanceof TraitableBean ) { targetTraits = ((TraitableBean) target).getTraits(); } else { TraitableBean tbean = lookForWrapper( target, workingMemory); if ( tbean != null ) { targetTraits = tbean.getTraits(); } } return ( targetTraits != null && sourceTraits != null && ( this.getOperator().isNegated() ^ sourceTraits.containsAll( targetTraits ) ) ) || ( sourceTraits == null && this.getOperator().isNegated() ) ; } @Override public String toString() { return "IsAEvaluatorDefinition isA"; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.util.Collection; import java.util.Iterator; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.common.HdfsConstants.ReplicaState; import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.junit.Test; import org.junit.Assert; /** Test if a datanode can correctly upgrade itself */ public class TestDatanodeRestart { // test finalized replicas persist across DataNode restarts @Test public void testFinalizedReplicas() throws Exception { // bring up a cluster of 3 Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024L); conf.setInt(DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY, 512); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); try { // test finalized replicas final String TopDir = "/test"; DFSTestUtil util = new DFSTestUtil("TestCrcCorruption", 2, 3, 8*1024); util.createFiles(fs, TopDir, (short)3); util.waitReplication(fs, TopDir, (short)3); util.checkFiles(fs, TopDir); cluster.restartDataNodes(); cluster.waitActive(); util.checkFiles(fs, TopDir); } finally { cluster.shutdown(); } } // test rbw replicas persist across DataNode restarts public void testRbwReplicas() throws IOException { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024L); conf.setInt(DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY, 512); conf.setBoolean("dfs.support.append", true); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); cluster.waitActive(); try { testRbwReplicas(cluster, false); testRbwReplicas(cluster, true); } finally { cluster.shutdown(); } } private void testRbwReplicas(MiniDFSCluster cluster, boolean isCorrupt) throws IOException { FSDataOutputStream out = null; FileSystem fs = cluster.getFileSystem(); final Path src = new Path("/test.txt"); try { final int fileLen = 515; // create some rbw replicas on disk byte[] writeBuf = new byte[fileLen]; new Random().nextBytes(writeBuf); out = fs.create(src); out.write(writeBuf); out.hflush(); DataNode dn = cluster.getDataNodes().get(0); for (FSVolume volume : ((FSDataset)dn.data).volumes.volumes) { File currentDir = volume.getDir().getParentFile(); File rbwDir = new File(currentDir, "rbw"); for (File file : rbwDir.listFiles()) { if (isCorrupt && Block.isBlockFilename(file)) { new RandomAccessFile(file, "rw").setLength(fileLen-1); // corrupt } } } cluster.restartDataNodes(); cluster.waitActive(); dn = cluster.getDataNodes().get(0); // check volumeMap: one rwr replica ReplicasMap replicas = ((FSDataset)(dn.data)).volumeMap; Assert.assertEquals(1, replicas.size()); ReplicaInfo replica = replicas.replicas().iterator().next(); Assert.assertEquals(ReplicaState.RWR, replica.getState()); if (isCorrupt) { Assert.assertEquals((fileLen-1)/512*512, replica.getNumBytes()); } else { Assert.assertEquals(fileLen, replica.getNumBytes()); } dn.data.invalidate(new Block[]{replica}); } finally { IOUtils.closeStream(out); if (fs.exists(src)) { fs.delete(src, false); } fs.close(); } } // test recovering unlinked tmp replicas @Test public void testRecoverReplicas() throws IOException { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024L); conf.setInt(DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY, 512); conf.setBoolean("dfs.support.append", true); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitActive(); try { FileSystem fs = cluster.getFileSystem(); for (int i=0; i<4; i++) { Path fileName = new Path("/test"+i); DFSTestUtil.createFile(fs, fileName, 1, (short)1, 0L); DFSTestUtil.waitReplication(fs, fileName, (short)1); } DataNode dn = cluster.getDataNodes().get(0); Iterator<ReplicaInfo> replicasItor = ((FSDataset)dn.data).volumeMap.replicas().iterator(); ReplicaInfo replica = replicasItor.next(); createUnlinkTmpFile(replica, true, true); // rename block file createUnlinkTmpFile(replica, false, true); // rename meta file replica = replicasItor.next(); createUnlinkTmpFile(replica, true, false); // copy block file createUnlinkTmpFile(replica, false, false); // copy meta file replica = replicasItor.next(); createUnlinkTmpFile(replica, true, true); // rename block file createUnlinkTmpFile(replica, false, false); // copy meta file cluster.restartDataNodes(); cluster.waitActive(); dn = cluster.getDataNodes().get(0); // check volumeMap: 4 finalized replica Collection<ReplicaInfo> replicas = ((FSDataset)(dn.data)).volumeMap.replicas(); Assert.assertEquals(4, replicas.size()); replicasItor = replicas.iterator(); while (replicasItor.hasNext()) { Assert.assertEquals(ReplicaState.FINALIZED, replicasItor.next().getState()); } } finally { cluster.shutdown(); } } private static void createUnlinkTmpFile(ReplicaInfo replicaInfo, boolean changeBlockFile, boolean isRename) throws IOException { File src; if (changeBlockFile) { src = replicaInfo.getBlockFile(); } else { src = replicaInfo.getMetaFile(); } File dst = FSDataset.getUnlinkTmpFile(src); if (isRename) { src.renameTo(dst); } else { FileInputStream in = new FileInputStream(src); try { FileOutputStream out = new FileOutputStream(dst); try { IOUtils.copyBytes(in, out, 1); } finally { out.close(); } } finally { in.close(); } } } }
package io.github.mthli.Bitocle.Repo; import android.content.Context; import android.database.SQLException; import android.os.AsyncTask; import android.widget.ListView; import android.widget.SimpleAdapter; import com.github.johnpersano.supertoasts.SuperToast; import com.github.johnpersano.supertoasts.util.Style; import io.github.mthli.Bitocle.Database.Repo.RAction; import io.github.mthli.Bitocle.Database.Repo.Repo; import io.github.mthli.Bitocle.Main.MainFragment; import io.github.mthli.Bitocle.R; import org.eclipse.egit.github.core.Repository; import org.eclipse.egit.github.core.client.GitHubClient; import org.eclipse.egit.github.core.service.RepositoryService; import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshLayout; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.*; public class AddTask extends AsyncTask<Void, Integer, Boolean> { private MainFragment fragment; private Context context; private String query; private RepositoryService service; private PullToRefreshLayout pull; private ListView listView; private RepoItemAdapter adapter; private List<RepoItem> list; private String git; public AddTask(MainFragment fragment, String query) { this.fragment = fragment; this.query = query; } @Override protected void onPreExecute() { context = fragment.getContentView().getContext(); GitHubClient client = fragment.getClient(); service = new RepositoryService(client); pull = fragment.getPull(); listView = fragment.getListView(); adapter = fragment.getRepoItemAdapter(); list = fragment.getRepoItemList(); pull.setRefreshing(true); } @Override protected Boolean doInBackground(Void... params) { String[] arr = query.split("/"); if (arr.length < 2) { return false; } String owner = arr[0].toLowerCase(); String name = arr[1].toLowerCase(); Repository r; try { r = service.getRepository(owner, name); } catch (IOException i) { return false; } if (isCancelled()) { return false; } RAction action = new RAction(context); try { action.openDatabase(true); } catch (SQLException s) { action.closeDatabase(); return false; } SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd"); if (!action.checkRepo(r.getGitUrl())) { Repo repo = new Repo(); repo.setName(r.getName()); repo.setDate(format.format(r.getCreatedAt())); repo.setDescription(r.getDescription()); repo.setLang(r.getLanguage()); repo.setStar(r.getWatchers()); repo.setFork(r.getForks()); repo.setOwner(r.getOwner().getLogin()); repo.setGit(r.getGitUrl()); git = r.getGitUrl(); action.addRepo(repo); } action.closeDatabase(); if (isCancelled()) { return false; } return true; } @Override protected void onCancelled() { pull.setRefreshing(false); SuperToast.create( fragment.getActivity(), context.getString(R.string.repo_add_successful), SuperToast.Duration.VERY_SHORT, Style.getStyle(Style.BLUE) ).show(); } @Override protected void onProgressUpdate(Integer... values) { /* Do nothing */ } @Override protected void onPostExecute(Boolean result) { pull.setRefreshing(false); if (result) { RAction action = new RAction(context); try { action.openDatabase(true); } catch (SQLException s) { SuperToast.create( fragment.getActivity(), context.getString(R.string.repo_add_failed), SuperToast.Duration.VERY_SHORT, Style.getStyle(Style.RED) ).show(); return; } List<Repo> repos = action.listRepos(); Collections.sort(repos); List<Map<String, String>> autoList = new ArrayList<Map<String, String>>(); list.clear(); autoList.clear(); for (Repo r : repos) { list.add( new RepoItem( r.getName(), r.getDate(), r.getDescription(), r.getLang(), r.getStar(), r.getFork(), r.getOwner(), r.getGit() ) ); Map<String, String> map = new HashMap<String, String>(); map.put("owner", r.getOwner()); map.put("name", r.getName()); autoList.add(map); } action.closeDatabase(); SimpleAdapter autoAdapter = new SimpleAdapter( context, autoList, R.layout.auto_item, new String[] {"owner", "name"}, new int[] {R.id.auto_item_owner, R.id.auto_item_name} ); autoAdapter.notifyDataSetChanged(); fragment.getSearch().setAdapter(autoAdapter); if (list.size() <= 0) { fragment.setContentEmpty(true); fragment.setEmptyText(R.string.repo_empty_list); fragment.setContentShown(true); } else { int position = 0; for (RepoItem r : list) { if (r.getGit().equals(git)) { break; } position++; } fragment.setContentEmpty(false); adapter.notifyDataSetChanged(); fragment.setContentShown(true); listView.smoothScrollToPosition(position); SuperToast.create( fragment.getActivity(), context.getString(R.string.repo_add_successful), SuperToast.Duration.VERY_SHORT, Style.getStyle(Style.BLUE) ).show(); } } else { SuperToast.create( fragment.getActivity(), context.getString(R.string.repo_add_failed), SuperToast.Duration.VERY_SHORT, Style.getStyle(Style.RED) ).show(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.13.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.zeppelin.interpreter.thrift; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2021-03-09") public class AppOutputAppendEvent implements org.apache.thrift.TBase<AppOutputAppendEvent, AppOutputAppendEvent._Fields>, java.io.Serializable, Cloneable, Comparable<AppOutputAppendEvent> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AppOutputAppendEvent"); private static final org.apache.thrift.protocol.TField NOTE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("noteId", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphId", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField APP_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("appId", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField INDEX_FIELD_DESC = new org.apache.thrift.protocol.TField("index", org.apache.thrift.protocol.TType.I32, (short)4); private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("data", org.apache.thrift.protocol.TType.STRING, (short)5); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new AppOutputAppendEventStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new AppOutputAppendEventTupleSchemeFactory(); public @org.apache.thrift.annotation.Nullable java.lang.String noteId; // required public @org.apache.thrift.annotation.Nullable java.lang.String paragraphId; // required public @org.apache.thrift.annotation.Nullable java.lang.String appId; // required public int index; // required public @org.apache.thrift.annotation.Nullable java.lang.String data; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NOTE_ID((short)1, "noteId"), PARAGRAPH_ID((short)2, "paragraphId"), APP_ID((short)3, "appId"), INDEX((short)4, "index"), DATA((short)5, "data"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NOTE_ID return NOTE_ID; case 2: // PARAGRAPH_ID return PARAGRAPH_ID; case 3: // APP_ID return APP_ID; case 4: // INDEX return INDEX; case 5: // DATA return DATA; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __INDEX_ISSET_ID = 0; private byte __isset_bitfield = 0; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NOTE_ID, new org.apache.thrift.meta_data.FieldMetaData("noteId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData("paragraphId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.APP_ID, new org.apache.thrift.meta_data.FieldMetaData("appId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.INDEX, new org.apache.thrift.meta_data.FieldMetaData("index", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData("data", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AppOutputAppendEvent.class, metaDataMap); } public AppOutputAppendEvent() { } public AppOutputAppendEvent( java.lang.String noteId, java.lang.String paragraphId, java.lang.String appId, int index, java.lang.String data) { this(); this.noteId = noteId; this.paragraphId = paragraphId; this.appId = appId; this.index = index; setIndexIsSet(true); this.data = data; } /** * Performs a deep copy on <i>other</i>. */ public AppOutputAppendEvent(AppOutputAppendEvent other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetNoteId()) { this.noteId = other.noteId; } if (other.isSetParagraphId()) { this.paragraphId = other.paragraphId; } if (other.isSetAppId()) { this.appId = other.appId; } this.index = other.index; if (other.isSetData()) { this.data = other.data; } } public AppOutputAppendEvent deepCopy() { return new AppOutputAppendEvent(this); } @Override public void clear() { this.noteId = null; this.paragraphId = null; this.appId = null; setIndexIsSet(false); this.index = 0; this.data = null; } @org.apache.thrift.annotation.Nullable public java.lang.String getNoteId() { return this.noteId; } public AppOutputAppendEvent setNoteId(@org.apache.thrift.annotation.Nullable java.lang.String noteId) { this.noteId = noteId; return this; } public void unsetNoteId() { this.noteId = null; } /** Returns true if field noteId is set (has been assigned a value) and false otherwise */ public boolean isSetNoteId() { return this.noteId != null; } public void setNoteIdIsSet(boolean value) { if (!value) { this.noteId = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getParagraphId() { return this.paragraphId; } public AppOutputAppendEvent setParagraphId(@org.apache.thrift.annotation.Nullable java.lang.String paragraphId) { this.paragraphId = paragraphId; return this; } public void unsetParagraphId() { this.paragraphId = null; } /** Returns true if field paragraphId is set (has been assigned a value) and false otherwise */ public boolean isSetParagraphId() { return this.paragraphId != null; } public void setParagraphIdIsSet(boolean value) { if (!value) { this.paragraphId = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getAppId() { return this.appId; } public AppOutputAppendEvent setAppId(@org.apache.thrift.annotation.Nullable java.lang.String appId) { this.appId = appId; return this; } public void unsetAppId() { this.appId = null; } /** Returns true if field appId is set (has been assigned a value) and false otherwise */ public boolean isSetAppId() { return this.appId != null; } public void setAppIdIsSet(boolean value) { if (!value) { this.appId = null; } } public int getIndex() { return this.index; } public AppOutputAppendEvent setIndex(int index) { this.index = index; setIndexIsSet(true); return this; } public void unsetIndex() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __INDEX_ISSET_ID); } /** Returns true if field index is set (has been assigned a value) and false otherwise */ public boolean isSetIndex() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __INDEX_ISSET_ID); } public void setIndexIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __INDEX_ISSET_ID, value); } @org.apache.thrift.annotation.Nullable public java.lang.String getData() { return this.data; } public AppOutputAppendEvent setData(@org.apache.thrift.annotation.Nullable java.lang.String data) { this.data = data; return this; } public void unsetData() { this.data = null; } /** Returns true if field data is set (has been assigned a value) and false otherwise */ public boolean isSetData() { return this.data != null; } public void setDataIsSet(boolean value) { if (!value) { this.data = null; } } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case NOTE_ID: if (value == null) { unsetNoteId(); } else { setNoteId((java.lang.String)value); } break; case PARAGRAPH_ID: if (value == null) { unsetParagraphId(); } else { setParagraphId((java.lang.String)value); } break; case APP_ID: if (value == null) { unsetAppId(); } else { setAppId((java.lang.String)value); } break; case INDEX: if (value == null) { unsetIndex(); } else { setIndex((java.lang.Integer)value); } break; case DATA: if (value == null) { unsetData(); } else { setData((java.lang.String)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NOTE_ID: return getNoteId(); case PARAGRAPH_ID: return getParagraphId(); case APP_ID: return getAppId(); case INDEX: return getIndex(); case DATA: return getData(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case NOTE_ID: return isSetNoteId(); case PARAGRAPH_ID: return isSetParagraphId(); case APP_ID: return isSetAppId(); case INDEX: return isSetIndex(); case DATA: return isSetData(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that == null) return false; if (that instanceof AppOutputAppendEvent) return this.equals((AppOutputAppendEvent)that); return false; } public boolean equals(AppOutputAppendEvent that) { if (that == null) return false; if (this == that) return true; boolean this_present_noteId = true && this.isSetNoteId(); boolean that_present_noteId = true && that.isSetNoteId(); if (this_present_noteId || that_present_noteId) { if (!(this_present_noteId && that_present_noteId)) return false; if (!this.noteId.equals(that.noteId)) return false; } boolean this_present_paragraphId = true && this.isSetParagraphId(); boolean that_present_paragraphId = true && that.isSetParagraphId(); if (this_present_paragraphId || that_present_paragraphId) { if (!(this_present_paragraphId && that_present_paragraphId)) return false; if (!this.paragraphId.equals(that.paragraphId)) return false; } boolean this_present_appId = true && this.isSetAppId(); boolean that_present_appId = true && that.isSetAppId(); if (this_present_appId || that_present_appId) { if (!(this_present_appId && that_present_appId)) return false; if (!this.appId.equals(that.appId)) return false; } boolean this_present_index = true; boolean that_present_index = true; if (this_present_index || that_present_index) { if (!(this_present_index && that_present_index)) return false; if (this.index != that.index) return false; } boolean this_present_data = true && this.isSetData(); boolean that_present_data = true && that.isSetData(); if (this_present_data || that_present_data) { if (!(this_present_data && that_present_data)) return false; if (!this.data.equals(that.data)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetNoteId()) ? 131071 : 524287); if (isSetNoteId()) hashCode = hashCode * 8191 + noteId.hashCode(); hashCode = hashCode * 8191 + ((isSetParagraphId()) ? 131071 : 524287); if (isSetParagraphId()) hashCode = hashCode * 8191 + paragraphId.hashCode(); hashCode = hashCode * 8191 + ((isSetAppId()) ? 131071 : 524287); if (isSetAppId()) hashCode = hashCode * 8191 + appId.hashCode(); hashCode = hashCode * 8191 + index; hashCode = hashCode * 8191 + ((isSetData()) ? 131071 : 524287); if (isSetData()) hashCode = hashCode * 8191 + data.hashCode(); return hashCode; } @Override public int compareTo(AppOutputAppendEvent other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.valueOf(isSetNoteId()).compareTo(other.isSetNoteId()); if (lastComparison != 0) { return lastComparison; } if (isSetNoteId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.noteId, other.noteId); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(isSetParagraphId()).compareTo(other.isSetParagraphId()); if (lastComparison != 0) { return lastComparison; } if (isSetParagraphId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, other.paragraphId); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(isSetAppId()).compareTo(other.isSetAppId()); if (lastComparison != 0) { return lastComparison; } if (isSetAppId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.appId, other.appId); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(isSetIndex()).compareTo(other.isSetIndex()); if (lastComparison != 0) { return lastComparison; } if (isSetIndex()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.index, other.index); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(isSetData()).compareTo(other.isSetData()); if (lastComparison != 0) { return lastComparison; } if (isSetData()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("AppOutputAppendEvent("); boolean first = true; sb.append("noteId:"); if (this.noteId == null) { sb.append("null"); } else { sb.append(this.noteId); } first = false; if (!first) sb.append(", "); sb.append("paragraphId:"); if (this.paragraphId == null) { sb.append("null"); } else { sb.append(this.paragraphId); } first = false; if (!first) sb.append(", "); sb.append("appId:"); if (this.appId == null) { sb.append("null"); } else { sb.append(this.appId); } first = false; if (!first) sb.append(", "); sb.append("index:"); sb.append(this.index); first = false; if (!first) sb.append(", "); sb.append("data:"); if (this.data == null) { sb.append("null"); } else { sb.append(this.data); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class AppOutputAppendEventStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public AppOutputAppendEventStandardScheme getScheme() { return new AppOutputAppendEventStandardScheme(); } } private static class AppOutputAppendEventStandardScheme extends org.apache.thrift.scheme.StandardScheme<AppOutputAppendEvent> { public void read(org.apache.thrift.protocol.TProtocol iprot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NOTE_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.noteId = iprot.readString(); struct.setNoteIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // PARAGRAPH_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.paragraphId = iprot.readString(); struct.setParagraphIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // APP_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.appId = iprot.readString(); struct.setAppIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // INDEX if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.index = iprot.readI32(); struct.setIndexIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // DATA if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.data = iprot.readString(); struct.setDataIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.noteId != null) { oprot.writeFieldBegin(NOTE_ID_FIELD_DESC); oprot.writeString(struct.noteId); oprot.writeFieldEnd(); } if (struct.paragraphId != null) { oprot.writeFieldBegin(PARAGRAPH_ID_FIELD_DESC); oprot.writeString(struct.paragraphId); oprot.writeFieldEnd(); } if (struct.appId != null) { oprot.writeFieldBegin(APP_ID_FIELD_DESC); oprot.writeString(struct.appId); oprot.writeFieldEnd(); } oprot.writeFieldBegin(INDEX_FIELD_DESC); oprot.writeI32(struct.index); oprot.writeFieldEnd(); if (struct.data != null) { oprot.writeFieldBegin(DATA_FIELD_DESC); oprot.writeString(struct.data); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class AppOutputAppendEventTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public AppOutputAppendEventTupleScheme getScheme() { return new AppOutputAppendEventTupleScheme(); } } private static class AppOutputAppendEventTupleScheme extends org.apache.thrift.scheme.TupleScheme<AppOutputAppendEvent> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetNoteId()) { optionals.set(0); } if (struct.isSetParagraphId()) { optionals.set(1); } if (struct.isSetAppId()) { optionals.set(2); } if (struct.isSetIndex()) { optionals.set(3); } if (struct.isSetData()) { optionals.set(4); } oprot.writeBitSet(optionals, 5); if (struct.isSetNoteId()) { oprot.writeString(struct.noteId); } if (struct.isSetParagraphId()) { oprot.writeString(struct.paragraphId); } if (struct.isSetAppId()) { oprot.writeString(struct.appId); } if (struct.isSetIndex()) { oprot.writeI32(struct.index); } if (struct.isSetData()) { oprot.writeString(struct.data); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(5); if (incoming.get(0)) { struct.noteId = iprot.readString(); struct.setNoteIdIsSet(true); } if (incoming.get(1)) { struct.paragraphId = iprot.readString(); struct.setParagraphIdIsSet(true); } if (incoming.get(2)) { struct.appId = iprot.readString(); struct.setAppIdIsSet(true); } if (incoming.get(3)) { struct.index = iprot.readI32(); struct.setIndexIsSet(true); } if (incoming.get(4)) { struct.data = iprot.readString(); struct.setDataIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
/* * SparkBit * * Copyright 2011-2014 multibit.org * Copyright 2014 Coin Sciences Ltd * * Licensed under the MIT license (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/mit-license.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.multibit.model.bitcoin; import com.google.bitcoin.core.*; import com.google.bitcoin.core.Wallet.BalanceType; import com.google.bitcoin.script.Script; import com.google.bitcoin.store.BlockStoreException; //import org.coinsparks.CSAssetDatabase; import org.sparkbit.ApplicationDataDirectoryLocator; import org.multibit.controller.Controller; import org.multibit.controller.bitcoin.BitcoinController; import org.multibit.model.AbstractModel; import org.multibit.model.ModelEnum; import org.multibit.model.core.CoreModel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.math.BigInteger; import java.util.*; import org.multibit.utils.CSMiscUtils; import org.sparkbit.SparkBitMapDB; /** * Model containing the MultiBit data. * * Most of the methods act on the single, active wallet in the model. * * @author jim * */ public class BitcoinModel extends AbstractModel<CoreModel> { private static final Logger log = LoggerFactory.getLogger(BitcoinModel.class); // Constants used in the multibit.properties. // MultiBit start up. public static final String TEST_OR_PRODUCTION_NETWORK = "testOrProductionNetwork"; public static final String TEST_NETWORK_VALUE = "test"; public static final String TESTNET3_VALUE = "testnet3"; public static final String PRODUCTION_NETWORK_VALUE = "production"; public static final String WALLET_FILENAME = "walletFilename"; // Wallets, open wallet and save wallet as dialog. public static final String GRAB_FOCUS_FOR_ACTIVE_WALLET = "grabFocusForActiveWallet"; public static final String ACTIVE_WALLET_FILENAME = "selectedWalletFilename"; public static final String WALLET_DESCRIPTION_PREFIX = "walletDescription."; public static final String SHOW_DELETE_WALLET = "showDeleteWallet"; // The number of serialised and protobuf2 wallets in the multibit.properties. public static final String EARLY_WALLET_FILENAME_PREFIX = "walletFilename."; public static final String NUMBER_OF_EARLY_WALLETS = "numberOfWallets"; public static final String PROTOBUF3_WALLET_FILENAME_PREFIX = "protobuf3WalletFilename."; public static final String NUMBER_OF_PROTOBUF3_WALLETS = "numberfProtobuf3Wallets"; public static final String WALLET_ORDER_TOTAL = "walletOrderTotal"; public static final String WALLET_ORDER_PREFIX = "walletOrder."; public static final String WALLET_CLEANED_OF_SPAM = "walletCleanedOfSpam"; /* CoinSpark START */ public static final String SHOW_MULTIPLE_WALLETS = "showMultipleWallets"; public static final String SHOW_DEVELOPER_TOOLS = "showDeveloperTools"; public static final String SHOW_PRIVATE_KEY_MENU = "showPrivateKeyMenu"; public static final String CAN_SEND_INVALID_ASSETS = "canSendInvalidAssets"; public static final String CAN_DELETE_INVALID_ASSETS = "canDeleteInvalidAssets"; public static final String HTTPS_TRUST_ALL_CERTS = "httpsTrustAllCerts"; // Send bitcoin and send bitcoin confirm. public static final String SEND_MESSAGE = "sendMessage"; public static final String SEND_SPARK_ADDRESS = "sendSparkAddress"; public static final String SEND_ADDRESS = "sendAddress"; public static final String SEND_LABEL = "sendLabel"; public static final String SEND_AMOUNT = "sendAmount"; public static final String SEND_ASSET_AMOUNT = "sendAssetAmount"; public static final String SEND_FEE = "sendFee"; public static final String SEND_PERFORM_PASTE_NOW = "sendPerformPasteNow"; public static final String SHOW_SIDE_PANEL = "showSidePanel"; public static final String DISPLAY_AS_SWATCH = "displayAsSwatch"; public static final String DISPLAY_AS_QR_CODE = "displayAsQRcode"; public static final int MINIMUM_NUMBER_OF_CONNECTED_PEERS_BEFORE_SEND_IS_ENABLED = 2; // Open bitcoin URI. public static final String OPEN_URI_SHOW_DIALOG = "openUriShowDialog"; public static final String OPEN_URI_USE_URI = "openUriUseUri"; public static final String OPEN_URI_ADDRESS = "openUriAddress"; public static final String OPEN_URI_LABEL = "openUriLabel"; public static final String OPEN_URI_AMOUNT = "openUriAmount"; public static final String BRING_TO_FRONT = "bringToFront"; // Default fee and feePerKB public static final BigInteger SEND_FEE_DEFAULT = new BigInteger("50000"); public static final BigInteger SEND_FEE_PER_KB_DEFAULT = new BigInteger("10000"); // CoinSpark START public static final BigInteger COINSPARK_SEND_MINIMUM_AMOUNT = new BigInteger("10000"); // CoinSpark END // Minimum fee. public static final BigInteger SEND_MINIMUM_FEE = new BigInteger("10000"); // Maximum fee. public static final BigInteger SEND_MAXIMUM_FEE = new BigInteger("100000000"); // 1 BTC. // Receive bitcoin. public static final String IS_RECEIVE_BITCOIN = "isReceiveBitcoin"; public static final String RECEIVE_ADDRESS = "receiveAddress"; public static final String RECEIVE_LABEL = "receiveLabel"; public static final String RECEIVE_AMOUNT = "receiveAmount"; public static final String RECEIVE_NEW_KEY = "receiveNewKey"; // to delete // Validation. public static final String VALIDATION_ADDRESS_IS_INVALID = "validationAddressIsInvalid"; public static final String VALIDATION_AMOUNT_IS_INVALID = "validationAmountIsInvalid"; public static final String VALIDATION_AMOUNT_IS_MISSING = "validationAmountIsMissing"; public static final String VALIDATION_AMOUNT_IS_NEGATIVE_OR_ZERO = "validationAmountIsNegativeOrZero"; public static final String VALIDATION_AMOUNT_IS_TOO_SMALL = "validationAmountIsTooSmall"; public static final String VALIDATION_NOT_ENOUGH_FUNDS = "validationNotEnoughFunds"; public static final String VALIDATION_ADDRESS_VALUE = "validationAddressValue"; public static final String VALIDATION_AMOUNT_VALUE = "validationAmountValue"; public static final String VALIDATION_AMOUNT_IS_TOO_BIG_FOR_MIGRATION = "validationAmountIsTooBigForMigration"; public static final String WALLET_FILE_EXTENSION = "sparkwallet"; public static final String CSV_FILE_EXTENSION = "csv"; // Private key import and export. public static final String PRIVATE_KEY_FILE_EXTENSION = "key"; public static final String PRIVATE_KEY_FILENAME = "privateKeyFilename"; // Blockchain.info support. public static final String BLOCKCHAIN_WALLET_ENCRYPTED_SUFFIX = "aes.json"; public static final String BLOCKCHAIN_WALLET_PLAIN_SUFFIX = "json"; // Connect to nodes. @Deprecated public static final String SINGLE_NODE_CONNECTION = "singleNodeConnection"; public static final String PEERS = "peers"; // Sizes and last modified dates of files. public static final String WALLET_FILE_SIZE = "walletFileSize"; public static final String WALLET_FILE_LAST_MODIFIED = "walletFileLastModified"; public static final String WALLET_INFO_FILE_SIZE = "walletInfoFileSize"; public static final String WALLET_INFO_FILE_LAST_MODIFIED = "walletInfoFileLastModified"; // User preferences undo. public static final String PREVIOUS_OPEN_URI_SHOW_DIALOG = "previousOpenUriShowDialog"; public static final String PREVIOUS_OPEN_URI_USE_URI = "previousOpenUriUseUri"; public static final String PREVIOUS_SEND_FEE = "previousSendFee"; // Wallet backup. public static final String WALLET_BACKUP_FILE = "walletBackupFile"; // AlertManager and versions public static final String ALERT_MANAGER_NEW_VERSION_VALUE = "alertManagerNewVersionValue"; public static final String ALERT_MANAGER_NEW_VERSION_SEEN_COUNT = "alertManagerNewVersionSeenCount"; /** * List of each wallet's total model data. */ private List<WalletData> perWalletModelDataList; /** * The current active wallet. */ private WalletData activeWalletModelData; public static final int UNKNOWN_NUMBER_OF_CONNECTD_PEERS = -1; /** * The number of peers connected. */ private int numberOfConnectedPeers = UNKNOWN_NUMBER_OF_CONNECTD_PEERS; /** * Used to enable/ disable blinking of the SingleWalletPanels when language changes etc. */ private boolean blinkEnabled = true; @SuppressWarnings("deprecation") public BitcoinModel(CoreModel coreModel) { super(coreModel); perWalletModelDataList = new LinkedList<WalletData>(); activeWalletModelData = new WalletData(); perWalletModelDataList.add(activeWalletModelData); } public WalletData getActivePerWalletModelData() { return activeWalletModelData; } /** * Get a wallet preference from the active wallet. * * @param key String key of property * @return String property value */ public String getActiveWalletPreference(String key) { if (activeWalletModelData.getWalletInfo() != null) { return activeWalletModelData.getWalletInfo().getProperty(key); } else { return null; } } /** * Set a wallet preference from the active wallet. * */ public void setActiveWalletPreference(String key, String value) { if (BitcoinModel.SEND_AMOUNT.equals(key)) { if (value.contains(",")) { boolean bad = true; bad = !bad; } } if (activeWalletModelData.getWalletInfo() != null && value != null) { activeWalletModelData.getWalletInfo().put(key, value); activeWalletModelData.setDirty(true); } } /** * Get the estimated balance of the active wallet. * * @return The estimated balance */ public BigInteger getActiveWalletEstimatedBalance() { if (activeWalletModelData.getWallet() == null) { return BigInteger.ZERO; } else { return activeWalletModelData.getWallet().getBalance(BalanceType.ESTIMATED); } } /** * Get the available balance (plus boomeranged change) of the active wallet. * * @return the available balance */ public BigInteger getActiveWalletAvailableBalance() { if (activeWalletModelData.getWallet() == null) { return BigInteger.ZERO; } else { return activeWalletModelData.getWallet().getBalance(BalanceType.AVAILABLE); } } /** * Get the wallet data for the active wallet. * * @return the table data list */ public List<WalletTableData> getActiveWalletWalletData() { return activeWalletModelData.getWalletTableDataList(); } /** * @return the wallet info for the active wallet */ public WalletInfoData getActiveWalletWalletInfo() { return activeWalletModelData.getWalletInfo(); } /** * @return the active wallet */ public Wallet getActiveWallet() { return activeWalletModelData.getWallet(); } /** * Set the active wallet, given a wallet filename. * * @param walletFilename the wallet filename */ public void setActiveWalletByFilename(String walletFilename) { if (walletFilename == null) { return; } if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (walletFilename.equals(loopPerWalletModelData.getWalletFilename())) { activeWalletModelData = loopPerWalletModelData; break; } } } } /** * Remove the specified perWalletModelData. Note that this does not remove * any backing wallet or wallet info files. * * Removal is determined by matching the wallet filename. Use FileHandler to * do that. * * @param perWalletModelDataToRemove The wallet data */ public void remove(WalletData perWalletModelDataToRemove) { if (perWalletModelDataToRemove == null) { return; } if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (perWalletModelDataToRemove.getWalletFilename().equals(loopPerWalletModelData.getWalletFilename())) { perWalletModelDataList.remove(loopPerWalletModelData); break; } } } // If there are no wallets, clear the activeWalletModelData. activeWalletModelData = new WalletData(); } /** * Set a wallet description, given a wallet filename. * * @param walletFilename The wallet file name * @param walletDescription The wallet description */ public void setWalletDescriptionByFilename(String walletFilename, String walletDescription) { if (walletFilename == null) { return; } if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (walletFilename.equals(loopPerWalletModelData.getWalletFilename())) { loopPerWalletModelData.setWalletDescription(walletDescription); loopPerWalletModelData.setDirty(true); break; } } } } /** * Add a new wallet to the list of managed wallets. */ public WalletData addWallet(final BitcoinController bitcoinController, Wallet wallet, String walletFilename) { if (walletFilename == null) { return null; } // Check to see if it is already in the managed list - no need to add it // again if so. for (WalletData loopModelData : perWalletModelDataList) { if (walletFilename.equals(loopModelData.getWalletFilename())) { return loopModelData; } } WalletData newPerWalletModelData = new WalletData(); newPerWalletModelData.setWallet(wallet); newPerWalletModelData.setWalletFilename(walletFilename); // Table row data used in displaying transactions - initially empty newPerWalletModelData.setWalletTableDataList(new ArrayList<WalletTableData>()); // If it is the initial empty activeWalletModelData remove it. if (thereIsNoActiveWallet()) { perWalletModelDataList.remove(activeWalletModelData); activeWalletModelData = newPerWalletModelData; } perWalletModelDataList.add(newPerWalletModelData); // Wire up the controller as a wallet event listener. if (wallet != null) { wallet.addEventListener(bitcoinController); } createWalletTableData(bitcoinController, walletFilename); createAddressBookReceivingAddresses(walletFilename); return newPerWalletModelData; } /** * Get the active wallet filename. * * * @return */ public String getActiveWalletFilename() { return activeWalletModelData.getWalletFilename(); } /** * Convert the active wallet info into walletdata records as they are easier * to show to the user in tabular form. */ public ArrayList<WalletTableData> createActiveWalletData(final BitcoinController bitcoinController) { return createWalletTableData(bitcoinController, this.getActivePerWalletModelData()); } /** * Convert the wallet info into walletdata records as they are easier * to show to the user in tabular form. */ public ArrayList<WalletTableData> createWalletTableData(final BitcoinController bitcoinController, String walletFilename) { ArrayList<WalletTableData> walletData = new ArrayList<WalletTableData>(); if (walletFilename == null) { return walletData; } WalletData perWalletModelData = null; if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (walletFilename.equals(loopPerWalletModelData.getWalletFilename())) { perWalletModelData = loopPerWalletModelData; break; } } } return createWalletTableData(bitcoinController, perWalletModelData); } public ArrayList<WalletTableData> createWalletTableData(final BitcoinController bitcoinController, WalletData perWalletModelData) { ArrayList<WalletTableData> walletData = new ArrayList<WalletTableData>(); if (perWalletModelData == null || perWalletModelData.getWallet() == null) { return walletData; } Set<Transaction> transactions = perWalletModelData.getWallet().getTransactions(false); if (transactions != null) { for (Transaction loopTransaction : transactions) { WalletTableData walletDataRow = new WalletTableData(loopTransaction); walletData.add(walletDataRow); walletDataRow.setCredit(loopTransaction.getValueSentToMe(perWalletModelData.getWallet())); try { walletDataRow.setDebit(loopTransaction.getValueSentFromMe(perWalletModelData.getWallet())); } catch (ScriptException e) { log.error(e.getMessage(), e); } List<TransactionInput> transactionInputs = loopTransaction.getInputs(); List<TransactionOutput> transactionOutputs = loopTransaction.getOutputs(); if (transactionInputs != null) { TransactionInput firstInput = transactionInputs.get(0); if (firstInput != null) { walletDataRow.setDescription(createDescription(bitcoinController, perWalletModelData.getWallet(), loopTransaction, transactionInputs, transactionOutputs, walletDataRow.getCredit(), walletDataRow.getDebit())); } } walletDataRow.setDate(createDate(bitcoinController, loopTransaction)); walletDataRow.setHeight(workOutHeight(loopTransaction)); } } // Run through all the walletdata to see if both credit and debit are // set (this means change was received). for (WalletTableData walletDataRow : walletData) { if (walletDataRow.getCredit() != null && (walletDataRow.getCredit().compareTo(BigInteger.ZERO) > 0) && (walletDataRow.getDebit() != null) && walletDataRow.getDebit().compareTo(BigInteger.ZERO) > 0) { BigInteger net = walletDataRow.getCredit().subtract(walletDataRow.getDebit()); if (net.compareTo(BigInteger.ZERO) >= 0) { walletDataRow.setCredit(net); walletDataRow.setDebit(BigInteger.ZERO); } else { walletDataRow.setCredit(BigInteger.ZERO); walletDataRow.setDebit(net.negate()); } } } return walletData; } /** * Add the receiving addresses of all the keys of the specified wallet. */ public void createAddressBookReceivingAddresses(String walletFilename) { if (walletFilename == null) { return; } WalletData perWalletModelData = null; if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (walletFilename.equals(loopPerWalletModelData.getWalletFilename())) { perWalletModelData = loopPerWalletModelData; break; } } } if (!(perWalletModelData == null)) { List<ECKey> keyChain = perWalletModelData.getWallet().getKeychain(); if (keyChain != null) { NetworkParameters networkParameters = getNetworkParameters(); if (networkParameters != null) { if (perWalletModelData.getWalletInfo() != null) { // Keep a copy of the existing receiving addresses - labels will be recycled. List<WalletAddressBookData> currentReceivingAddresses = perWalletModelData.getWalletInfo().getReceivingAddresses(); // Clear the existing receiving addresses. ArrayList<WalletAddressBookData> newReceivingAddresses = new ArrayList<WalletAddressBookData>(); perWalletModelData.getWalletInfo().setReceivingAddresses(newReceivingAddresses); // Add the new receiving addresses from the keys, checking if there is an old label. for (ECKey key : keyChain) { Address address = key.toAddress(getNetworkParameters()); String addressString = address.toString(); WalletAddressBookData addressBookData = new WalletAddressBookData(null, addressString); for (WalletAddressBookData loopAddressBookData : currentReceivingAddresses) { if (loopAddressBookData.getAddress().equals(addressString)) { // Recycle label. addressBookData.setLabel(loopAddressBookData.getLabel()); break; } } perWalletModelData.getWalletInfo().addReceivingAddress(addressBookData, false); } } } } } } /** * Create a description for a transaction. * * @param transactionInputs * @param transactionOutputs * @param credit * @param debit * @return A description of the transaction */ public String createDescription( final BitcoinController controller, Wallet wallet, Transaction tx, List<TransactionInput> transactionInputs, List<TransactionOutput> transactionOutputs, BigInteger credit, BigInteger debit) { String toReturn = ""; WalletData perWalletModelData = null; if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (wallet.equals(loopPerWalletModelData.getWallet())) { perWalletModelData = loopPerWalletModelData; break; } } } if (perWalletModelData == null) { return toReturn; } TransactionOutput myOutput = null; TransactionOutput theirOutput = null; if (transactionOutputs != null) { for (TransactionOutput transactionOutput : transactionOutputs) { if (transactionOutput != null && transactionOutput.isMine(perWalletModelData.getWallet())) { myOutput = transactionOutput; } if (transactionOutput != null && !transactionOutput.isMine(perWalletModelData.getWallet())) { /* CoinSpark START */ // We have to skip the OP_RETURN output as there is no address and it result sin an exception when trying to get the destination address Script script = transactionOutput.getScriptPubKey(); if (script != null) { if (script.isSentToAddress() || script.isSentToP2SH()) { theirOutput = transactionOutput; } } // theirOutput = transactionOutput; /* CoinSpark END */ } } } if (credit != null && credit.compareTo(BigInteger.ZERO) > 0) { // Credit. try { String addressString = ""; if (myOutput != null) { Address toAddress = new Address(getNetworkParameters(), myOutput .getScriptPubKey().getPubKeyHash()); addressString = toAddress.toString(); } String label = null; if (perWalletModelData.getWalletInfo() != null) { label = perWalletModelData.getWalletInfo().lookupLabelForReceivingAddress(addressString); } // If this transaction has assets, convert bitcoin address to a coinspark address. // Fudge: balanceDB might be null when this is invoked, so do nothing boolean hasAssets = false; if (wallet.CS.getBalanceDB() != null) { Map<Integer, BigInteger> receiveMap = wallet.CS.getAssetsSentToMe(tx); for (Integer assetID : receiveMap.keySet()) { if (assetID == null || assetID == 0) { continue; // skip bitcoin } hasAssets = true; break; } } String txid = tx.getHashAsString(); String message = CSMiscUtils.getShortTextMessage(wallet, txid); boolean hasMessage = (message != null); boolean showCoinSparkAddress = hasAssets || hasMessage; if (showCoinSparkAddress) { // Use address from sent txid map if it exists, else convert BTC to Spark address. String s = SparkBitMapDB.INSTANCE.getSendCoinSparkAddressForTxid(txid); if (s!=null) { addressString = s; } else { addressString = CSMiscUtils.convertBitcoinAddressToCoinSparkAddress(addressString); } } if (label != null && !label.equals("")) { toReturn = controller.getLocaliser().getString("multiBitModel.creditDescriptionWithLabel", new Object[]{addressString, label}); } else { toReturn = controller.getLocaliser().getString("multiBitModel.creditDescription", new Object[]{addressString}); } } catch (ScriptException e) { log.error(e.getMessage(), e); } } if (debit != null && debit.compareTo(BigInteger.ZERO) > 0) { // Debit. try { // See if the address is a known sending address. if (theirOutput != null) { /* CoinSpark START */ // Catch error "com.google.bitcoin.core.ScriptException: Cannot cast this script to a pay-to-address type" // String addressString = theirOutput.getScriptPubKey().getToAddress(getNetworkParameters()).toString(); String addressString = null; // First let's see if we have stored the recipient in our map try { addressString = SparkBitMapDB.INSTANCE.getSendCoinSparkAddressForTxid(tx.getHashAsString()); } catch (Exception e) { } if (addressString == null) { // get bitcoin address try { addressString = theirOutput.getScriptPubKey().getToAddress(getNetworkParameters()).toString(); } catch (ScriptException se) { } // We must ignore implicit transfer of assets to work out if assets were sent boolean hasAssets = false; if (wallet.CS.getBalanceDB() != null) { Map<Integer, BigInteger> receiveMap = wallet.CS.getAssetsSentToMe(tx); Map<Integer, BigInteger> sendMap = wallet.CS.getAssetsSentFromMe(tx); for (Integer assetID : sendMap.keySet()) { if (assetID == null || assetID == 0) { continue; // skip bitcoin } BigInteger receivedAmount = receiveMap.get(assetID); // should be number of raw units BigInteger sentAmount = sendMap.get(assetID); boolean isReceivedAmountMissing = (receivedAmount == null); boolean isSentAmountMissing = (sentAmount == null); BigInteger netAmount = BigInteger.ZERO; if (!isReceivedAmountMissing) { netAmount = netAmount.add(receivedAmount); } if (!isSentAmountMissing) { netAmount = netAmount.subtract(sentAmount); } if (netAmount.equals(BigInteger.ZERO)) { continue; } if (sentAmount != null && sentAmount.equals(BigInteger.ZERO)) { continue; // not confirmed yet } hasAssets = true; break; } } if (hasAssets) { addressString = CSMiscUtils.convertBitcoinAddressToCoinSparkAddress(addressString); } if (addressString == null) { addressString = ""; } } /* CoinSpark END */ String label = null; if (perWalletModelData.getWalletInfo() != null) { //String csa = perWalletModelData.getWalletInfo().lookupCoinSparkAddressForSendingAddress(addressString); //if (csa != null) addressString = csa; // NOTE: Check to make sure addressString not used elsewhere, except below for string generation. label = perWalletModelData.getWalletInfo().lookupLabelForSendingAddress(addressString); } if (label != null && !label.equals("")) { toReturn = controller.getLocaliser().getString("multiBitModel.debitDescriptionWithLabel", new Object[]{addressString, label}); } else { toReturn = controller.getLocaliser().getString("multiBitModel.debitDescription", new Object[]{addressString}); } } } catch (ScriptException e) { log.error(e.getMessage(), e); } } return toReturn; } /** * Work out the transaction date. * * @param transaction * @return Date date of transaction */ private Date createDate(final BitcoinController bitcoinController, Transaction transaction) { // If transaction has altered date - return that. if (transaction.getUpdateTime() != null) { return transaction.getUpdateTime(); } // Other wise return the date of the block it first appeared in. Map<Sha256Hash, Integer> appearsIn = transaction.getAppearsInHashes(); if (appearsIn != null) { if (!appearsIn.isEmpty()) { Iterator<Sha256Hash> iterator = appearsIn.keySet().iterator(); // just take the first i.e. ignore impact of side chains if (iterator.hasNext()) { Sha256Hash appearsInHash = iterator.next(); StoredBlock appearsInStoredBlock; try { if (bitcoinController != null && bitcoinController.getMultiBitService() != null && bitcoinController.getMultiBitService().getBlockStore() != null) { appearsInStoredBlock = bitcoinController.getMultiBitService().getBlockStore().get(appearsInHash); Block appearsInBlock = appearsInStoredBlock.getHeader(); // Set the time of the block to be the time of the // transaction - TODO get transaction time. return new Date(appearsInBlock.getTimeSeconds() * 1000); } } catch (BlockStoreException e) { e.printStackTrace(); } } } } return null; } /** * Work out the height of the block chain in which the transaction appears. * * @param transaction * @return */ private int workOutHeight(Transaction transaction) { return -1; // -1 = we do not know. TODO probably needs replacing by height on TransactionConfidence. } public void setActiveWalletInfo(WalletInfoData walletInfo) { activeWalletModelData.setWalletInfo(walletInfo); } public List<WalletData> getPerWalletModelDataList() { return perWalletModelDataList; } public WalletData getPerWalletModelDataByWalletFilename(String walletFilename) { if (walletFilename == null) { return null; } if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (walletFilename.equals(loopPerWalletModelData.getWalletFilename())) { return loopPerWalletModelData; } } } return null; } public NetworkParameters getNetworkParameters() { // If test or production is not specified, default to production. String testOrProduction = super.getUserPreference(BitcoinModel.TEST_OR_PRODUCTION_NETWORK); if (testOrProduction == null) { testOrProduction = BitcoinModel.PRODUCTION_NETWORK_VALUE; super.setUserPreference(BitcoinModel.TEST_OR_PRODUCTION_NETWORK, testOrProduction); } if (BitcoinModel.TEST_NETWORK_VALUE.equalsIgnoreCase(testOrProduction)) { return NetworkParameters.testNet2(); } else if (BitcoinModel.TESTNET3_VALUE.equalsIgnoreCase(testOrProduction)) { return NetworkParameters.testNet(); } else { return NetworkParameters.prodNet(); } } public boolean thereIsNoActiveWallet() { return activeWalletModelData == null || "".equals(activeWalletModelData.getWalletFilename()) || activeWalletModelData.getWalletFilename() == null; } public int getNumberOfConnectedPeers() { return numberOfConnectedPeers; } public void setNumberOfConnectedPeers(int numberOfConnectedPeers) { this.numberOfConnectedPeers = numberOfConnectedPeers; } public boolean isBlinkEnabled() { return blinkEnabled; } public void setBlinkEnabled(boolean blinkEnabled) { this.blinkEnabled = blinkEnabled; } @Override public ModelEnum getModelEnum() { return ModelEnum.BITCOIN; } public Date getDateOfTransaction(BitcoinController controller, Transaction t) { return createDate(controller, t); } }
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing; import com.graphhopper.routing.ev.*; import com.graphhopper.routing.util.AccessFilter; import com.graphhopper.routing.util.FlagEncoder; import com.graphhopper.routing.weighting.Weighting; import com.graphhopper.storage.Graph; import com.graphhopper.storage.NodeAccess; import com.graphhopper.util.*; import com.graphhopper.util.shapes.GHPoint; import static com.graphhopper.routing.util.EncodingManager.getKey; /** * This class calculates instructions from the edges in a Path. * * @author Peter Karich * @author Robin Boldt * @author jan soe */ public class InstructionsFromEdges implements Path.EdgeVisitor { private final Weighting weighting; private final FlagEncoder encoder; private final NodeAccess nodeAccess; private final InstructionList ways; private final EdgeExplorer outEdgeExplorer; private final EdgeExplorer crossingExplorer; private final BooleanEncodedValue roundaboutEnc; private final BooleanEncodedValue accessEnc; private final BooleanEncodedValue roadClassLinkEnc; private final EnumEncodedValue<RoadClass> roadClassEnc; private final DecimalEncodedValue maxSpeedEnc; /* * We need three points to make directions * * (1)----(2) * / * / * (0) * * 0 is the node visited at t-2, 1 is the node visited * at t-1 and 2 is the node being visited at instant t. * orientation is the angle of the vector(1->2) expressed * as atan2, while previousOrientation is the angle of the * vector(0->1) * Intuitively, if orientation is smaller than * previousOrientation, then we have to turn right, while * if it is greater we have to turn left. To make this * algorithm work, we need to make the comparison by * considering orientation belonging to the interval * [ - pi + previousOrientation , + pi + previousOrientation ] */ private EdgeIteratorState prevEdge; private double prevLat; private double prevLon; private double doublePrevLat, doublePrevLon; // Lat and Lon of node t-2 private int prevNode; private double prevOrientation; private double prevInstructionPrevOrientation = Double.NaN; private Instruction prevInstruction; private boolean prevInRoundabout; private String prevName; private String prevInstructionName; private static final int MAX_U_TURN_DISTANCE = 35; public InstructionsFromEdges(Graph graph, Weighting weighting, EncodedValueLookup evLookup, InstructionList ways) { this.encoder = weighting.getFlagEncoder(); this.weighting = weighting; this.accessEnc = evLookup.getBooleanEncodedValue(getKey(encoder.toString(), "access")); this.roundaboutEnc = evLookup.getBooleanEncodedValue(Roundabout.KEY); this.roadClassEnc = evLookup.getEnumEncodedValue(RoadClass.KEY, RoadClass.class); this.roadClassLinkEnc = evLookup.getBooleanEncodedValue(RoadClassLink.KEY); this.maxSpeedEnc = evLookup.getDecimalEncodedValue(MaxSpeed.KEY); this.nodeAccess = graph.getNodeAccess(); this.ways = ways; prevNode = -1; prevInRoundabout = false; prevName = null; outEdgeExplorer = graph.createEdgeExplorer(AccessFilter.outEdges(encoder.getAccessEnc())); crossingExplorer = graph.createEdgeExplorer(AccessFilter.allEdges(encoder.getAccessEnc())); } /** * @return the list of instructions for this path. */ public static InstructionList calcInstructions(Path path, Graph graph, Weighting weighting, EncodedValueLookup evLookup, final Translation tr) { final InstructionList ways = new InstructionList(tr); if (path.isFound()) { if (path.getEdgeCount() == 0) { ways.add(new FinishInstruction(graph.getNodeAccess(), path.getEndNode())); } else { path.forEveryEdge(new InstructionsFromEdges(graph, weighting, evLookup, ways)); } } return ways; } @Override public void next(EdgeIteratorState edge, int index, int prevEdgeId) { // baseNode is the current node and adjNode is the next int adjNode = edge.getAdjNode(); int baseNode = edge.getBaseNode(); if (prevNode == -1) { prevLat = this.nodeAccess.getLat(baseNode); prevLon = this.nodeAccess.getLon(baseNode); } double adjLat = nodeAccess.getLat(adjNode); double adjLon = nodeAccess.getLon(adjNode); double latitude, longitude; PointList wayGeo = edge.fetchWayGeometry(FetchMode.ALL); boolean isRoundabout = edge.get(roundaboutEnc); if (wayGeo.size() <= 2) { latitude = adjLat; longitude = adjLon; } else { latitude = wayGeo.getLat(1); longitude = wayGeo.getLon(1); assert Double.compare(prevLat, nodeAccess.getLat(baseNode)) == 0; assert Double.compare(prevLon, nodeAccess.getLon(baseNode)) == 0; } String name = edge.getName(); if ((prevName == null) && (!isRoundabout)) // very first instruction (if not in Roundabout) { int sign = Instruction.CONTINUE_ON_STREET; prevInstruction = new Instruction(sign, name, new PointList(10, nodeAccess.is3D())); double startLat = nodeAccess.getLat(baseNode); double startLon = nodeAccess.getLon(baseNode); double heading = AngleCalc.ANGLE_CALC.calcAzimuth(startLat, startLon, latitude, longitude); prevInstruction.setExtraInfo("heading", Helper.round(heading, 2)); ways.add(prevInstruction); prevName = name; } else if (isRoundabout) { // remark: names and annotations within roundabout are ignored if (!prevInRoundabout) //just entered roundabout { int sign = Instruction.USE_ROUNDABOUT; RoundaboutInstruction roundaboutInstruction = new RoundaboutInstruction(sign, name, new PointList(10, nodeAccess.is3D())); prevInstructionPrevOrientation = prevOrientation; if (prevName != null) { // check if there is an exit at the same node the roundabout was entered EdgeIterator edgeIter = outEdgeExplorer.setBaseNode(baseNode); while (edgeIter.next()) { if ((edgeIter.getAdjNode() != prevNode) && !edgeIter.get(roundaboutEnc)) { roundaboutInstruction.increaseExitNumber(); break; } } // previous orientation is last orientation before entering roundabout prevOrientation = AngleCalc.ANGLE_CALC.calcOrientation(doublePrevLat, doublePrevLon, prevLat, prevLon); // calculate direction of entrance turn to determine direction of rotation // right turn == counterclockwise and vice versa double orientation = AngleCalc.ANGLE_CALC.calcOrientation(prevLat, prevLon, latitude, longitude); orientation = AngleCalc.ANGLE_CALC.alignOrientation(prevOrientation, orientation); double delta = (orientation - prevOrientation); roundaboutInstruction.setDirOfRotation(delta); } else // first instructions is roundabout instruction { prevOrientation = AngleCalc.ANGLE_CALC.calcOrientation(prevLat, prevLon, latitude, longitude); prevName = name; } prevInstruction = roundaboutInstruction; ways.add(prevInstruction); } // Add passed exits to instruction. A node is counted if there is at least one outgoing edge // out of the roundabout EdgeIterator edgeIter = outEdgeExplorer.setBaseNode(edge.getAdjNode()); while (edgeIter.next()) { if (!edgeIter.get(roundaboutEnc)) { ((RoundaboutInstruction) prevInstruction).increaseExitNumber(); break; } } } else if (prevInRoundabout) //previously in roundabout but not anymore { prevInstruction.setName(name); // calc angle between roundabout entrance and exit double orientation = AngleCalc.ANGLE_CALC.calcOrientation(prevLat, prevLon, latitude, longitude); orientation = AngleCalc.ANGLE_CALC.alignOrientation(prevOrientation, orientation); double deltaInOut = (orientation - prevOrientation); // calculate direction of exit turn to determine direction of rotation // right turn == counterclockwise and vice versa double recentOrientation = AngleCalc.ANGLE_CALC.calcOrientation(doublePrevLat, doublePrevLon, prevLat, prevLon); orientation = AngleCalc.ANGLE_CALC.alignOrientation(recentOrientation, orientation); double deltaOut = (orientation - recentOrientation); prevInstruction = ((RoundaboutInstruction) prevInstruction) .setRadian(deltaInOut) .setDirOfRotation(deltaOut) .setExited(); prevInstructionName = prevName; prevName = name; } else { int sign = getTurn(edge, baseNode, prevNode, adjNode, name); if (sign != Instruction.IGNORE) { /* Check if the next instruction is likely to only be a short connector to execute a u-turn --A->-- | <-- This is the short connector --B-<-- Road A and Road B have to have the same name and roughly the same, but opposite orientation, otherwise we are assuming this is no u-turn. Note: This approach only works if there a turn instruction for A->Connector and Connector->B. Currently we don't create a turn instruction if there is no other possible turn We only create a u-turn if edge B is a one-way, see #1073 for more details. */ boolean isUTurn = false; int uTurnType = Instruction.U_TURN_UNKNOWN; if (!Double.isNaN(prevInstructionPrevOrientation) && prevInstruction.getDistance() < MAX_U_TURN_DISTANCE && (sign < 0) == (prevInstruction.getSign() < 0) && (Math.abs(sign) == Instruction.TURN_SLIGHT_RIGHT || Math.abs(sign) == Instruction.TURN_RIGHT || Math.abs(sign) == Instruction.TURN_SHARP_RIGHT) && (Math.abs(prevInstruction.getSign()) == Instruction.TURN_SLIGHT_RIGHT || Math.abs(prevInstruction.getSign()) == Instruction.TURN_RIGHT || Math.abs(prevInstruction.getSign()) == Instruction.TURN_SHARP_RIGHT) && edge.get(accessEnc) != edge.getReverse(accessEnc) && InstructionsHelper.isNameSimilar(prevInstructionName, name)) { // Chances are good that this is a u-turn, we only need to check if the orientation matches GHPoint point = InstructionsHelper.getPointForOrientationCalculation(edge, nodeAccess); double lat = point.getLat(); double lon = point.getLon(); double currentOrientation = AngleCalc.ANGLE_CALC.calcOrientation(prevLat, prevLon, lat, lon, false); double diff = Math.abs(prevInstructionPrevOrientation - currentOrientation); if (diff > (Math.PI * .9) && diff < (Math.PI * 1.1)) { isUTurn = true; if (sign < 0) { uTurnType = Instruction.U_TURN_LEFT; } else { uTurnType = Instruction.U_TURN_RIGHT; } } } if (isUTurn) { prevInstruction.setSign(uTurnType); prevInstruction.setName(name); } else { prevInstruction = new Instruction(sign, name, new PointList(10, nodeAccess.is3D())); // Remember the Orientation and name of the road, before doing this maneuver prevInstructionPrevOrientation = prevOrientation; prevInstructionName = prevName; ways.add(prevInstruction); } } // Update the prevName, since we don't always create an instruction on name changes the previous // name can be an old name. This leads to incorrect turn instructions due to name changes prevName = name; } updatePointsAndInstruction(edge, wayGeo); if (wayGeo.size() <= 2) { doublePrevLat = prevLat; doublePrevLon = prevLon; } else { int beforeLast = wayGeo.size() - 2; doublePrevLat = wayGeo.getLat(beforeLast); doublePrevLon = wayGeo.getLon(beforeLast); } prevInRoundabout = isRoundabout; prevNode = baseNode; prevLat = adjLat; prevLon = adjLon; prevEdge = edge; } @Override public void finish() { if (prevInRoundabout) { // calc angle between roundabout entrance and finish double orientation = AngleCalc.ANGLE_CALC.calcOrientation(doublePrevLat, doublePrevLon, prevLat, prevLon); orientation = AngleCalc.ANGLE_CALC.alignOrientation(prevOrientation, orientation); double delta = (orientation - prevOrientation); ((RoundaboutInstruction) prevInstruction).setRadian(delta); } Instruction finishInstruction = new FinishInstruction(nodeAccess, prevEdge.getAdjNode()); // This is the heading how the edge ended finishInstruction.setExtraInfo("last_heading", AngleCalc.ANGLE_CALC.calcAzimuth(doublePrevLat, doublePrevLon, prevLat, prevLon)); ways.add(finishInstruction); } private int getTurn(EdgeIteratorState edge, int baseNode, int prevNode, int adjNode, String name) { GHPoint point = InstructionsHelper.getPointForOrientationCalculation(edge, nodeAccess); double lat = point.getLat(); double lon = point.getLon(); prevOrientation = AngleCalc.ANGLE_CALC.calcOrientation(doublePrevLat, doublePrevLon, prevLat, prevLon); int sign = InstructionsHelper.calculateSign(prevLat, prevLon, lat, lon, prevOrientation); InstructionsOutgoingEdges outgoingEdges = new InstructionsOutgoingEdges(prevEdge, edge, encoder, maxSpeedEnc, roadClassEnc, roadClassLinkEnc, crossingExplorer, nodeAccess, prevNode, baseNode, adjNode); int nrOfPossibleTurns = outgoingEdges.getAllowedTurns(); // there is no other turn possible if (nrOfPossibleTurns <= 1) { if (Math.abs(sign) > 1 && outgoingEdges.getVisibleTurns() > 1) { // This is an actual turn because |sign| > 1 // There could be some confusion, if we would not create a turn instruction, even though it is the only // possible turn, also see #1048 // TODO if we see issue with this approach we could consider checking if the edge is a oneway return sign; } return Instruction.IGNORE; } // Very certain, this is a turn if (Math.abs(sign) > 1) { /* * Don't show an instruction if the user is following a street, even though the street is * bending. We should only do this, if following the street is the obvious choice. */ if (InstructionsHelper.isNameSimilar(name, prevName) && outgoingEdges.outgoingEdgesAreSlowerByFactor(2)) { return Instruction.IGNORE; } return sign; } /* The current state is a bit uncertain. So we are going more or less straight sign < 2 So it really depends on the surrounding street if we need a turn instruction or not In most cases this will be a simple follow the current street and we don't necessarily need a turn instruction */ if (prevEdge == null) { // TODO Should we log this case? return sign; } boolean outgoingEdgesAreSlower = outgoingEdges.outgoingEdgesAreSlowerByFactor(1); // There is at least one other possibility to turn, and we are almost going straight // Check the other turns if one of them is also going almost straight // If not, we don't need a turn instruction EdgeIteratorState otherContinue = outgoingEdges.getOtherContinue(prevLat, prevLon, prevOrientation); // Signs provide too less detail, so we use the delta for a precise comparison double delta = InstructionsHelper.calculateOrientationDelta(prevLat, prevLon, lat, lon, prevOrientation); // This state is bad! Two streets are going more or less straight // Happens a lot for trunk_links // For _links, comparing flags works quite good, as links usually have different speeds => different flags if (otherContinue != null) { // We are at a fork if (!InstructionsHelper.isNameSimilar(name, prevName) || InstructionsHelper.isNameSimilar(otherContinue.getName(), prevName) || !outgoingEdgesAreSlower) { final RoadClass roadClass = edge.get(roadClassEnc); final RoadClass prevRoadClass = prevEdge.get(roadClassEnc); final RoadClass otherRoadClass = otherContinue.get(roadClassEnc); final boolean link = edge.get(roadClassLinkEnc); final boolean prevLink = prevEdge.get(roadClassLinkEnc); final boolean otherLink = otherContinue.get(roadClassLinkEnc); // We know this is a fork, but we only need an instruction if highways are actually changing, // this approach only works for major roads, for minor roads it can be hard to differentiate easily in real life if (roadClass == RoadClass.MOTORWAY || roadClass == RoadClass.TRUNK || roadClass == RoadClass.PRIMARY || roadClass == RoadClass.SECONDARY || roadClass == RoadClass.TERTIARY) { if ((roadClass == prevRoadClass && link == prevLink) && (otherRoadClass != prevRoadClass || otherLink != prevLink)) { return Instruction.IGNORE; } } GHPoint tmpPoint = InstructionsHelper.getPointForOrientationCalculation(otherContinue, nodeAccess); double otherDelta = InstructionsHelper.calculateOrientationDelta(prevLat, prevLon, tmpPoint.getLat(), tmpPoint.getLon(), prevOrientation); // This is required to avoid keep left/right on the motorway at off-ramps/motorway_links if (Math.abs(delta) < .1 && Math.abs(otherDelta) > .15 && InstructionsHelper.isNameSimilar(name, prevName)) { return Instruction.CONTINUE_ON_STREET; } if (otherDelta < delta) { return Instruction.KEEP_LEFT; } else { return Instruction.KEEP_RIGHT; } } } if (!outgoingEdgesAreSlower) { if (Math.abs(delta) > .6 || outgoingEdges.isLeavingCurrentStreet(prevName, name)) { // Leave the current road -> create instruction return sign; } } return Instruction.IGNORE; } private void updatePointsAndInstruction(EdgeIteratorState edge, PointList pl) { // skip adjNode int len = pl.size() - 1; for (int i = 0; i < len; i++) { prevInstruction.getPoints().add(pl, i); } double newDist = edge.getDistance(); prevInstruction.setDistance(newDist + prevInstruction.getDistance()); // todo: why do we not account for turn times here ? prevInstruction.setTime(weighting.calcEdgeMillis(edge, false) + prevInstruction.getTime()); } }
package com.indeed.proctor.common; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.indeed.proctor.common.model.Allocation; import com.indeed.proctor.common.model.ConsumableTestDefinition; import com.indeed.proctor.common.model.Range; import com.indeed.proctor.common.model.TestBucket; import com.indeed.proctor.common.model.TestDefinition; import com.indeed.proctor.common.model.TestDependency; import com.indeed.proctor.common.model.TestType; import org.apache.el.ExpressionFactoryImpl; import org.easymock.classextension.EasyMock; import org.junit.Before; import org.junit.Test; import javax.el.ExpressionFactory; import javax.el.FunctionMapper; import java.util.Collections; import java.util.List; import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; /** * @author rboyer */ public class TestStandardTestChooser { private ExpressionFactory expressionFactory; private FunctionMapper functionMapper; private String testName; private ConsumableTestDefinition testDefinition; private int[] counts; private int[] hashes; private static final ImmutableList<Range> RANGES_50_50 = ImmutableList.of( new Range(-1, 0.0), new Range(0, 0.5), new Range(1, 0.5) ); private static final ImmutableList<Range> RANGES_100_0 = ImmutableList.of( new Range(-1, 0.0), new Range(0, 0.0), new Range(1, 1.0) ); private static final List<TestBucket> INACTIVE_CONTROL_TEST_BUCKETS = ImmutableList.of( new TestBucket("inactive", -1, "zoot", null), new TestBucket("control", 0, "zoot", null), new TestBucket("test", 1, "zoot", null) ); @Before public void setupMocks() throws Exception { expressionFactory = new ExpressionFactoryImpl(); functionMapper = RuleEvaluator.FUNCTION_MAPPER; testName = "testName"; testDefinition = new ConsumableTestDefinition(); testDefinition.setConstants(Collections.<String, Object>emptyMap()); testDefinition.setTestType(TestType.AUTHENTICATED_USER); // most tests just set the salt to be the same as the test name testDefinition.setSalt(testName); testDefinition.setBuckets(INACTIVE_CONTROL_TEST_BUCKETS); updateAllocations(RANGES_50_50); final int effBuckets = INACTIVE_CONTROL_TEST_BUCKETS.size() - 1; counts = new int[effBuckets]; hashes = new int[effBuckets]; } @Test public void testSimple_100_0() { updateAllocations(RANGES_100_0); final StandardTestChooser rtc = newChooser(); final Map<String, Object> values = Collections.emptyMap(); for (int i = 0; i < 100; i++) { final TestChooser.Result chosen = rtc.choose(String.valueOf(i), values, Collections.emptyMap()); assertNotNull(chosen); assertNotNull(chosen.getTestBucket()); assertNotNull(chosen.getAllocation()); assertEquals(1, chosen.getTestBucket().getValue()); assertEquals("#A1", chosen.getAllocation().getId()); } } @Test public void testSimple_50_50() { testDefinition.setSalt(testName); final StandardTestChooser chooser = newChooser(); exerciseChooser(chooser); // uncomment this if you need to recompute these values // for (int i = 0; i < counts.length; i++) System.err.println(i + ": " + counts[i] + " / " + hashes[i]); // if this ever fails, it means that something is broken about how tests are split // and you should investigate why! assertEquals("bucket0 counts", 4999412, counts[0]); assertEquals("bucket1 counts", 5000587, counts[1]); assertEquals("bucket0 hash", 1863060514, hashes[0]); assertEquals("bucket1 hash", 765061458, hashes[1]); } // constants shared between the next two tests private static final int COUNTS_BUCKET0_SALT_AMP_TESTNAME = 4999049; private static final int COUNTS_BUCKET1_SALT_AMP_TESTNAME = 5000950; private static final int HASH_BUCKET0_SALT_AMP_TESTNAME = 1209398320; private static final int HASH_BUCKET1_SALT_AMP_TESTNAME = 494965600; @Test public void test_50_50_withMagicTestSalt() { // Now change the spec version and reevaluate testDefinition.setSalt("&" + testName); final StandardTestChooser chooser = newChooser(); exerciseChooser(chooser); // uncomment this if you need to recompute these values // for (int i = 0; i < counts.length; i++) System.err.println(i + ": " + counts[i] + " / " + hashes[i]); // if this ever fails, it means that something is broken about how tests are split // and you should investigate why! assertEquals("bucket0 counts", COUNTS_BUCKET0_SALT_AMP_TESTNAME, counts[0]); assertEquals("bucket1 counts", COUNTS_BUCKET1_SALT_AMP_TESTNAME, counts[1]); assertEquals("bucket0 hash", HASH_BUCKET0_SALT_AMP_TESTNAME, hashes[0]); assertEquals("bucket1 hash", HASH_BUCKET1_SALT_AMP_TESTNAME, hashes[1]); } @Test public void test50_50_withMagicTestSalt_and_unrelatedTestName() { final String originalTestName = testName; testName = "someOtherTestName"; testDefinition.setSalt("&" + originalTestName); final StandardTestChooser chooser = newChooser(); exerciseChooser(chooser); // uncomment this if you need to recompute these values // for (int i = 0; i < counts.length; i++) System.err.println(i + ": " + counts[i] + " / " + hashes[i]); // if this ever fails, it means that something is broken about how tests are split // and you should investigate why! // These values should be the same as in the preceding test assertEquals("bucket0 counts", COUNTS_BUCKET0_SALT_AMP_TESTNAME, counts[0]); assertEquals("bucket1 counts", COUNTS_BUCKET1_SALT_AMP_TESTNAME, counts[1]); assertEquals("bucket0 hash", HASH_BUCKET0_SALT_AMP_TESTNAME, hashes[0]); assertEquals("bucket1 hash", HASH_BUCKET1_SALT_AMP_TESTNAME, hashes[1]); } @Test public void testExceptionsDealtWith() { final String testName = "test"; final ConsumableTestDefinition testDefinition = new ConsumableTestDefinition(); testDefinition.setConstants(Collections.<String, Object>emptyMap()); testDefinition.setRule("${lang == 'en'}"); testDefinition.setTestType(TestType.ANONYMOUS_USER); // most tests just set the salt to be the same as the test name testDefinition.setSalt(testName); testDefinition.setBuckets(Collections.<TestBucket>emptyList()); final RuleEvaluator ruleEvaluator = EasyMock.createMock(RuleEvaluator.class); EasyMock.expect(ruleEvaluator.evaluateBooleanRule( EasyMock.<String>anyObject(), EasyMock.<Map<String,Object>>anyObject() )) // throw an unexpected type of runtime exception .andThrow(new RuntimeException() {}) // Must be evaluated, or this was not a valid test .once(); EasyMock.replay(ruleEvaluator); final TestRangeSelector selector = new TestRangeSelector( ruleEvaluator, testName, testDefinition ); // Ensure no exceptions thrown. final TestChooser.Result chooseResult = new StandardTestChooser(selector) .choose("identifier", Collections.<String, Object>emptyMap(), Collections.emptyMap()); assertNotNull(chooseResult); assertNull( "Expected no bucket to be found ", chooseResult.getTestBucket()); assertNull( "Expected no allocation to be found ", chooseResult.getAllocation()); EasyMock.verify(ruleEvaluator); } @Test public void testDefaultAllocationWithNonEmptyRule_fallback() { final String testName = "test"; final ConsumableTestDefinition testDefinition = new ConsumableTestDefinition(); testDefinition.setConstants(Collections.<String, Object>emptyMap()); testDefinition.setTestType(TestType.ANONYMOUS_USER); testDefinition.setSalt(testName); testDefinition.setBuckets(INACTIVE_CONTROL_TEST_BUCKETS); final List<Allocation> allocations = Lists.newArrayList(); allocations.add(new Allocation("${country == 'US'}", RANGES_100_0, "#B1")); allocations.add(new Allocation("${country == 'GB'}", RANGES_100_0, "#C1")); testDefinition.setAllocations(allocations); final RuleEvaluator ruleEvaluator = newRuleEvaluator(false); final TestRangeSelector selector = new TestRangeSelector( ruleEvaluator, testName, testDefinition ); final TestChooser.Result chooseResult = new StandardTestChooser(selector) .choose("identifier", Collections.<String, Object>emptyMap(), Collections.emptyMap()); assertNotNull(chooseResult); assertNull("Expected no bucket to be found", chooseResult.getTestBucket()); assertNull("Expected no allocation to be found", chooseResult.getAllocation()); EasyMock.verify(ruleEvaluator); } @Test public void testDefaultAllocationWithNonEmptyRule_match() { final String testName = "test"; final ConsumableTestDefinition testDefinition = new ConsumableTestDefinition(); testDefinition.setConstants(Collections.<String, Object>emptyMap()); testDefinition.setRule("${lang == 'en'}"); testDefinition.setTestType(TestType.ANONYMOUS_USER); testDefinition.setSalt(testName); testDefinition.setBuckets(INACTIVE_CONTROL_TEST_BUCKETS); final List<Allocation> allocations = Lists.newArrayList(); allocations.add(new Allocation("${country == 'GB'}", RANGES_100_0, "#B1")); testDefinition.setAllocations(allocations); final RuleEvaluator ruleEvaluator = newRuleEvaluator(true); final TestRangeSelector selector = new TestRangeSelector( ruleEvaluator, testName, testDefinition ); final TestChooser.Result chooseResult = new StandardTestChooser(selector) .choose("identifier", Collections.<String, Object>emptyMap(), Collections.emptyMap()); assertEquals("Test bucket with value 1 expected", 1, chooseResult.getTestBucket().getValue()); assertEquals("Test allocation with id #B1 expected", "#B1", chooseResult.getAllocation().getId()); EasyMock.verify(ruleEvaluator); } @Test public void testDependency_match() { final String testName = "test"; final ConsumableTestDefinition testDefinition = ConsumableTestDefinition.fromTestDefinition( TestDefinition.builder() .setTestType(TestType.ANONYMOUS_USER) .setSalt(testName) .setBuckets(INACTIVE_CONTROL_TEST_BUCKETS) .addAllocations(new Allocation("", RANGES_100_0, "#B1")) .setDependsOn(new TestDependency("par_test", 10)) .build() ); final RuleEvaluator ruleEvaluator = newRuleEvaluator(true); final TestRangeSelector selector = new TestRangeSelector( ruleEvaluator, testName, testDefinition ); final TestChooser.Result chooseResult = new StandardTestChooser(selector) .choose( "identifier", Collections.emptyMap(), ImmutableMap.of("par_test", new TestBucket("", 10, "")) ); assertThat(chooseResult.getTestBucket().getValue()).isEqualTo(1); assertThat(chooseResult.getAllocation().getId()).isEqualTo("#B1"); } @Test public void testDependency_fallback() { final String testName = "test"; final ConsumableTestDefinition testDefinition = ConsumableTestDefinition.fromTestDefinition( TestDefinition.builder() .setTestType(TestType.ANONYMOUS_USER) .setSalt(testName) .setBuckets(INACTIVE_CONTROL_TEST_BUCKETS) .addAllocations(new Allocation("", RANGES_100_0, "#B1")) .setDependsOn(new TestDependency("par_test", 10)) .build() ); final RuleEvaluator ruleEvaluator = newRuleEvaluator(true); final TestRangeSelector selector = new TestRangeSelector( ruleEvaluator, testName, testDefinition ); final TestChooser.Result chooseResult = new StandardTestChooser(selector) .choose( "identifier", Collections.emptyMap(), ImmutableMap.of("par_test", new TestBucket("", 1, "")) ); assertThat(chooseResult.getTestBucket()).isNull(); assertThat(chooseResult.getAllocation()).isNull(); } private StandardTestChooser newChooser() { return new StandardTestChooser( expressionFactory, functionMapper, testName, testDefinition ); } private RuleEvaluator newRuleEvaluator(final boolean result) { final RuleEvaluator ruleEvaluator = EasyMock.createMock(RuleEvaluator.class); EasyMock.expect(ruleEvaluator.evaluateBooleanRule( EasyMock.<String>anyObject(), EasyMock.<Map<String,Object>>anyObject() )) .andReturn(result) .anyTimes(); EasyMock.replay(ruleEvaluator); return ruleEvaluator; } private void exerciseChooser(final StandardTestChooser rtc) { final int num = 10000000; final Map<String, Object> values = Collections.emptyMap(); for (int accountId = 1; accountId < num; accountId++) { // deliberately skipping 0 final TestChooser.Result chosen = rtc.choose(String.valueOf(accountId), values, Collections.emptyMap()); assertNotNull(chosen); assertNotNull(chosen.getTestBucket()); assertNotNull(chosen.getAllocation()); counts[chosen.getTestBucket().getValue()]++; hashes[chosen.getTestBucket().getValue()] = 31 * hashes[chosen.getTestBucket().getValue()] + accountId; } } private void updateAllocations(final ImmutableList<Range> ranges) { final List<Allocation> allocations = Lists.newArrayList(); allocations.add(new Allocation("${}", ranges, "#A1")); testDefinition.setAllocations(allocations); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection.miscGenerics; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.ExpectedTypeInfo; import com.intellij.codeInsight.ExpectedTypesProvider; import com.intellij.codeInspection.*; import com.intellij.java.JavaBundle; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.refactoring.util.InlineUtil; import com.intellij.util.IncorrectOperationException; import com.siyeh.ig.callMatcher.CallMatcher; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import static com.siyeh.ig.callMatcher.CallMatcher.exactInstanceCall; /** * @author ven */ public class RedundantArrayForVarargsCallInspection extends AbstractBaseJavaLocalInspectionTool implements CleanupLocalInspectionTool { @Override public @NotNull PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, final boolean isOnTheFly) { return new RedundantArrayForVarargVisitor(holder); } @Override @NotNull public String getGroupDisplayName() { return InspectionsBundle.message("group.names.verbose.or.redundant.code.constructs"); } @Override @NotNull @NonNls public String getShortName() { return "RedundantArrayCreation"; } private static final class RedundantArrayForVarargVisitor extends JavaElementVisitor { private static final Logger LOG = Logger.getInstance(RedundantArrayForVarargVisitor.class); private static final String[] LOGGER_NAMES = new String[]{"debug", "error", "info", "trace", "warn"}; private static final CallMatcher LOGGER_MESSAGE_CALL = exactInstanceCall("org.slf4j.Logger", LOGGER_NAMES) .parameterTypes(String.class.getName(), "java.lang.Object..."); private static final LocalQuickFix myQuickFixAction = new MyQuickFix(); private @NotNull final ProblemsHolder myHolder; private RedundantArrayForVarargVisitor(@NotNull final ProblemsHolder holder) { myHolder = holder; } @Override public void visitCallExpression(PsiCallExpression expression) { super.visitCallExpression(expression); checkCall(expression); } @Override public void visitEnumConstant(PsiEnumConstant expression) { super.visitEnumConstant(expression); checkCall(expression); } private void checkCall(PsiCall expression) { final JavaResolveResult resolveResult = expression.resolveMethodGenerics(); PsiElement element = resolveResult.getElement(); final PsiSubstitutor substitutor = resolveResult.getSubstitutor(); if (!(element instanceof PsiMethod)) { return; } PsiMethod method = (PsiMethod)element; if (!method.isVarArgs() || AnnotationUtil.isAnnotated(method, CommonClassNames.JAVA_LANG_INVOKE_MH_POLYMORPHIC, 0)) { return; } PsiParameter[] parameters = method.getParameterList().getParameters(); PsiExpressionList argumentList = expression.getArgumentList(); if (argumentList == null) { return; } PsiExpression[] args = argumentList.getExpressions(); if (parameters.length != args.length) { return; } PsiExpression lastArg = PsiUtil.skipParenthesizedExprDown(args[args.length - 1]); PsiParameter lastParameter = parameters[args.length - 1]; if (!lastParameter.isVarArgs()) { return; } PsiType lastParamType = lastParameter.getType(); LOG.assertTrue(lastParamType instanceof PsiEllipsisType, lastParamType); if (!(lastArg instanceof PsiNewExpression)) { return; } final PsiType substitutedLastParamType = substitutor.substitute(((PsiEllipsisType)lastParamType).toArrayType()); final PsiType lastArgType = lastArg.getType(); if (lastArgType == null || !lastArgType.equals(substitutedLastParamType) && !lastArgType.equals(TypeConversionUtil.erasure(substitutedLastParamType))) { return; } PsiExpression[] initializers = getInitializers((PsiNewExpression)lastArg); if (initializers == null) { return; } if (Arrays.stream(initializers).anyMatch(expr -> expr instanceof PsiArrayInitializerExpression)) { return; } if (!isSafeToFlatten(expression, method, initializers)) { return; } final String message = JavaBundle.message("inspection.redundant.array.creation.for.varargs.call.descriptor"); myHolder.registerProblem(lastArg, message, myQuickFixAction); } private static boolean isSafeToFlatten(@NotNull final PsiCall callExpression, @NotNull final PsiMethod oldRefMethod, @NotNull final PsiExpression @NotNull [] arrayElements) { if (callExpression instanceof PsiExpression && LOGGER_MESSAGE_CALL.matches((PsiExpression)callExpression)) { return true; } if (arrayElements.length == 1) { PsiType type = arrayElements[0].getType(); // change foo(new Object[]{array}) to foo(array) is not safe if (PsiType.NULL.equals(type) || type instanceof PsiArrayType) return false; } PsiCall copy = (PsiCall)callExpression.copy(); PsiExpressionList copyArgumentList = copy.getArgumentList(); LOG.assertTrue(copyArgumentList != null); PsiExpression[] args = copyArgumentList.getExpressions(); try { args[args.length - 1].delete(); if (arrayElements.length > 0) { copyArgumentList.addRange(arrayElements[0], arrayElements[arrayElements.length - 1]); } final Project project = callExpression.getProject(); final JavaResolveResult resolveResult; if (callExpression instanceof PsiEnumConstant) { final PsiEnumConstant enumConstant = (PsiEnumConstant)callExpression; final PsiClass containingClass = enumConstant.getContainingClass(); if (containingClass == null) return false; final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiClassType classType = facade.getElementFactory().createType(containingClass); resolveResult = facade.getResolveHelper().resolveConstructor(classType, copyArgumentList, enumConstant); return resolveResult.isValidResult() && resolveResult.getElement() == oldRefMethod; } else { resolveResult = copy.resolveMethodGenerics(); if (!resolveResult.isValidResult() || resolveResult.getElement() != oldRefMethod) { return false; } if (callExpression.getParent() instanceof PsiExpressionStatement) return true; final ExpectedTypeInfo[] expectedTypes = ExpectedTypesProvider.getExpectedTypes((PsiCallExpression)callExpression, false); if (expectedTypes.length == 0) return false; final PsiType expressionType = ((PsiCallExpression)copy).getType(); if (expressionType == null) return false; for (ExpectedTypeInfo expectedType : expectedTypes) { if (expectedType.getType().isAssignableFrom(expressionType)) { return true; } } return false; } } catch (IncorrectOperationException e) { return false; } } private static PsiExpression @Nullable [] getInitializers(@NotNull final PsiNewExpression newExpression) { PsiArrayInitializerExpression initializer = newExpression.getArrayInitializer(); if (initializer != null) { return initializer.getInitializers(); } PsiExpression[] dims = newExpression.getArrayDimensions(); if (dims.length > 0) { PsiExpression firstDimension = dims[0]; Object value = JavaPsiFacade.getInstance(newExpression.getProject()).getConstantEvaluationHelper().computeConstantExpression(firstDimension); if (value instanceof Integer && ((Integer)value).intValue() == 0) return PsiExpression.EMPTY_ARRAY; } return null; } private static final class MyQuickFix implements LocalQuickFix { @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { PsiNewExpression arrayCreation = (PsiNewExpression)descriptor.getPsiElement(); if (arrayCreation == null) return; InlineUtil.inlineArrayCreationForVarargs(arrayCreation); } @Override @NotNull public String getFamilyName() { return JavaBundle.message("inspection.redundant.array.creation.quickfix"); } } } }
package galvin; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; public final class StringUtils extends org.apache.commons.lang3.StringUtils { private StringUtils() { } public static String addLeadingZeroIfNecessary( int integer ) { if( integer < 10 ) { return "0" + integer; } else { return "" + integer; } } public static String appendIfNecessary( String target, String append ) { if( target.endsWith( append ) ) { return target; } else { return target + append; } } public static String capitalize( String word ) { if( word.length() > 0 ) { return word.substring( 0, 1 ).toUpperCase() + word.substring( 1, word.length() ); } else { return ""; } } public static String cat( List<String> strings ) { StringBuilder result = new StringBuilder(); if( strings != null ) { for( int i = 0; i < strings.size(); i++ ) { result.append( strings.get( i ) ); if( i + 1 < strings.size() ) { result.append( "\n" ); } } } return result.toString(); } public static String cat( String[] strings ) { StringBuilder result = new StringBuilder(); if( strings != null ) { for( int i = 0; i < strings.length; i++ ) { result.append( strings[i] ); if( i + 1 < strings.length ) { result.append( "\n" ); } } } return result.toString(); } public static String cat( String[] strings, String delimiter ) { StringBuilder result = new StringBuilder(); if( strings != null ) { for( int i = 0; i < strings.length; i++ ) { result.append( strings[i] ); if( i + 1 < strings.length ) { result.append( delimiter ); } } } return result.toString(); } public static String camelCase( String string ) { StringBuilder builder = new StringBuilder( string.length() ); string = string.toLowerCase(); boolean whitespaceMode = true; for( char c : string.toCharArray() ) { if( whitespaceMode ) { if( !Character.isWhitespace( c ) ) { c = Character.toUpperCase( c ); whitespaceMode = false; } } else { if( Character.isWhitespace( c ) ) { whitespaceMode = true; } } builder.append( c ); } return builder.toString(); } public static boolean contains( String[] array, String target ){ for( String string : array ){ if( target.equals( string ) ){ return true; } } return false; } public static String csv( Collection<String> strings ) { StringBuilder builder = new StringBuilder(); Iterator<String> iterator = strings.iterator(); while( iterator.hasNext() ) { String string = iterator.next(); builder.append( string ); if( iterator.hasNext() ) { builder.append( ", " ); } } return builder.toString(); } public static boolean isEmpty( String string ) { return empty( string ); } public static boolean empty( String string ) { return string == null || string.trim().length() == 0; } public static String getOrdinal( int number ) { String numberText = "" + number; if( number == 11 || number == 12 || number == 13 ) { return "th"; } else if( numberText.endsWith( "1" ) ) { return "st"; } else if( numberText.endsWith( "2" ) ) { return "nd"; } else if( numberText.endsWith( "3" ) ) { return "rd"; } else { return "th"; } } public static int longestLength( String ... strings ){ int result = 0; for( String string : strings ){ if( string != null ){ result = Math.max( result, string.length() ); } } return result; } public static String markup( String text, String markup ) { return markup( text, markup, markup ); } public static String markup( String text, String startMarkup, String endMarkup ) { StringBuilder builder = new StringBuilder( startMarkup ); builder.append( text ); builder.append( endMarkup ); return builder.toString(); } public static String neverNull( String string ) { return ( string == null ? "" : string ); } /** * Pads a string with ' ' until it is the given minimum length. * * If the string is already long enough, it will not be modified. * * @param text * @param minLength * @return */ public static String padTo( String text, int minLength ){ StringBuilder result = new StringBuilder( minLength ); int remainder = minLength; if( text != null){ int length = text.length(); if( length <= minLength ){ remainder = minLength - length; } else{ remainder = 0; } result.append(text); } for( int i = 0; i < remainder; i++ ){ result.append(' '); } return result.toString(); } public static List<String> padTo( List<String> strings ){ int count = strings.size(); List<String> result = new ArrayList<>(count); int length = 0; for( String string : strings ){ if( string != null ){ length = Math.max( length, string.length() ); } } for( String string : strings ){ result.add( padTo( string, length ) ); } return result; } public static String paddedLayout( char tableHeaderSeparator, List<String> ... lists ){ String padded = paddedLayout( lists ); StringBuilder separator = new StringBuilder(); String[] lines = padded.split("\n"); if( lines.length > 0 ){ int length = lines[0].length(); for( int i = 0; i < length; i++ ){ separator.append(tableHeaderSeparator); } StringBuilder result = new StringBuilder(); result.append( lines[0] ); result.append( "\n" ); result.append( separator ); result.append( "\n" ); for( int i = 1; i < lines.length; i++ ){ result.append( lines[i] ); if( i+1 < lines.length ){ result.append( "\n" ); } } return result.toString(); } return padded; } public static String paddedLayout( List<String> ... lists ){ // copy the list to a clone. we're gonna be playing around with it, // and this prevents side effects List<String>[] clone = new List[lists.length]; for( int i = 0; i < clone.length; i++ ){ List _list = new ArrayList<>( lists[i].size() ); _list.addAll( lists[i] ); clone[i] = _list; } //calculate the columns and rows of the table int columns = clone.length; int rows = 0; for( List<String> list : clone ){ rows = Math.max( rows, list.size() ); } //make sure every list is the same length for( List<String> list : clone ){ if( list.size() < rows ){ for( int i = list.size(); i < rows; i++ ){ list.add(" "); } } } //create padded text for every column List<String>[] padded = new List[columns]; int index = 0; for( List<String> list : clone ){ padded[index] = padTo(list); index++; } //create the table StringBuilder result = new StringBuilder(); for( int row = 0; row < rows; row++ ){ for( int column = 0; column < columns; column++ ){ List<String> columnStrings = padded[column]; result.append( columnStrings.get(row) ); if( column+1 < columns ){ result.append( " " ); } else if( row+1 < rows ){ result.append( "\n" ); } } } return result.toString(); } public static String replaceAll( String target, String oldText, String newText ) { return replaceAll( target, oldText, newText, false ); } public static String replaceAll( String target, String oldText, String newText, boolean ignorCase ) { if( target != null && oldText != null && newText != null ) { StringBuilder result = new StringBuilder( target ); replaceAll( result, oldText, newText ); return result.toString(); } else { return target; } } public static String replaceAll( StringBuilder target, String oldText, String newText ) { replaceAll( target, oldText, newText, false ); return target.toString(); } public static int replaceAll( StringBuilder target, String oldText, String newText, boolean ignorCase ) { int count = 0; if( target != null && oldText != null && newText != null ) { if( ignorCase ) { StringBuilder noCaseTarget = new StringBuilder( target.toString().toLowerCase() ); String noCaseOldText = oldText.toLowerCase(); int index = noCaseTarget.indexOf( noCaseOldText ); while( index != -1 ) { count++; int endIndex = index + oldText.length(); target.replace( index, endIndex, newText ); noCaseTarget.replace( index, endIndex, newText ); endIndex = index + newText.length(); index = noCaseTarget.indexOf( noCaseOldText, endIndex ); } } else { int index = target.indexOf( oldText ); while( index != -1 ) { count++; int endIndex = index + oldText.length(); target.replace( index, endIndex, newText ); endIndex = index + newText.length(); index = target.indexOf( oldText, endIndex ); } } } return count; } public static String reverseChars( String target ) { byte[] bytes = target.getBytes(); byte[] result = new byte[ bytes.length ]; for( int i = 0; i < bytes.length; i++ ) { result[( bytes.length - i - 1 )] = bytes[i]; } return new String( result ); } public static String rot13( String s ) { //'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' //'nopqrstuvwxyzabcdefghijklmNOPQRSTUVWXYZABCDEFGHIJKLM' if( s != null ) { if( s.length() > 0 ) { int length = s.length(); StringBuilder buffer = new StringBuilder( length ); for( int i = 0; i < length; i++ ) { char c = s.charAt( i ); if( c >= 'a' && c <= 'z' ) { c += 13; if( c > 'z' ) { c -= 26; } } else if( c >= 'A' && c <= 'Z' ) { c += 13; if( c > 'Z' ) { c -= 26; } } buffer.insert( i, c ); } return buffer.toString(); } } return s; } /** * Like ROT13, but it also encodes numeric characters. * 1234567890 * will become * 6789012345 */ public static String rot135( String s ) { if( s != null ) { s = rot13( s ); int length = s.length(); StringBuilder buffer = new StringBuilder( length ); for( int i = 0; i < length; i++ ) { char c = s.charAt( i ); if( c >= '0' && c <= '4' ) { c += 5; } else if( c >= '5' && c <= '9' ) { c -= 5; } buffer.insert( i, c ); } return buffer.toString(); } return s; } public static String sideBySide( String left, String right ){ final String separator = " | "; String[] one = left.split( "\n" ); String[] two = right.split( "\n" ); int lineCount = Math.max(one.length, two.length); int longestLine = 0; for( String line : one ){ longestLine = Math.max(longestLine, line.length() ); } for( String line : two ){ longestLine = Math.max(longestLine, line.length() ); } int length = longestLine * lineCount * 2; length += (separator.length()+1) * lineCount; StringBuilder result = new StringBuilder(length); for( int i = 0; i < lineCount; i++ ){ String a = padTo( i < one.length ? one[i] : "", longestLine ); String b = padTo( i < two.length ? two[i] : "", longestLine ); result.append(a); result.append(separator); result.append(b); result.append("\n"); } return result.toString(); } public static String spacesToTabs( String text, int spacesPerTab ) { if( text != null && text.length() > 0 && spacesPerTab > 0 ) { String spaces = ""; for( int i = 0; i < spacesPerTab; i++ ) { spaces += " "; } return replaceAll( text, spaces, "\t" ); } return text; } public static String tabsToSpaces( String text, int spacesPerTab ) { if( text != null && text.length() > 0 && spacesPerTab > 0 ) { String spaces = ""; for( int i = 0; i < spacesPerTab; i++ ) { spaces += " "; } return replaceAll( text, "\t", spaces ); } return text; } public static String[] tokenize( String target, String delimiter ) { List tokens = new ArrayList(); int lastIndex = 0; int index = 0; while( index > -1 ) { index = target.indexOf( delimiter, lastIndex ); if( index != -1 ) { String token = target.substring( lastIndex, index ); lastIndex = index + delimiter.length(); tokens.add( token ); } else { String token = target.substring( lastIndex, target.length() ); if( token.length() != 0 ) { tokens.add( token ); } } } int numTokens = tokens.size(); String[] result = new String[ numTokens ]; for( int i = 0; i < numTokens; i++ ) { result[i] = (String)tokens.get( i ); } return result; } public static List<String> tokenizeToList( String target, String delimiter ) { List<String> result = new ArrayList(); String[] tokens = tokenize( target, delimiter ); result.addAll( Arrays.asList( tokens ) ); return result; } public static String[] toArray( List target ) { return toStringArray( target ); } public static String[] toStringArray( List target ) { int size = target.size(); String[] result = new String[ size ]; for( int i = 0; i < size; i++ ) { result[i] = target.get( i ).toString(); } return result; } }
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.change; import static com.google.gerrit.common.data.SubmitRecord.Status.OK; import com.google.common.base.MoreObjects; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.FluentIterable; import com.google.common.collect.HashBasedTable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Table; import com.google.gerrit.common.TimeUtil; import com.google.gerrit.common.data.ParameterizedString; import com.google.gerrit.common.data.SubmitRecord; import com.google.gerrit.extensions.api.changes.SubmitInput; import com.google.gerrit.extensions.common.ChangeInfo; import com.google.gerrit.extensions.restapi.AuthException; import com.google.gerrit.extensions.restapi.ResourceConflictException; import com.google.gerrit.extensions.restapi.RestApiException; import com.google.gerrit.extensions.restapi.RestModifyView; import com.google.gerrit.extensions.restapi.UnprocessableEntityException; import com.google.gerrit.extensions.webui.UiAction; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.ChangeMessage; import com.google.gerrit.reviewdb.client.LabelId; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.PatchSetApproval; import com.google.gerrit.reviewdb.client.RevId; import com.google.gerrit.reviewdb.server.ReviewDb; import com.google.gerrit.server.ApprovalsUtil; import com.google.gerrit.server.ChangeMessagesUtil; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.ProjectUtil; import com.google.gerrit.server.account.AccountsCollection; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.git.LabelNormalizer; import com.google.gerrit.server.git.MergeQueue; import com.google.gerrit.server.git.VersionedMetaData.BatchMetaDataUpdate; import com.google.gerrit.server.index.ChangeIndexer; import com.google.gerrit.server.notedb.ChangeUpdate; import com.google.gerrit.server.project.ChangeControl; import com.google.gerrit.server.project.SubmitRuleEvaluator; import com.google.gerrit.server.query.change.ChangeData; import com.google.gerrit.server.query.change.InternalChangeQuery; import com.google.gwtorm.server.AtomicUpdate; import com.google.gwtorm.server.OrmException; import com.google.gwtorm.server.OrmRuntimeException; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import org.eclipse.jgit.errors.RepositoryNotFoundException; import org.eclipse.jgit.lib.CommitBuilder; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.PersonIdent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; @Singleton public class Submit implements RestModifyView<RevisionResource, SubmitInput>, UiAction<RevisionResource> { private static final Logger log = LoggerFactory.getLogger(Submit.class); private static final String DEFAULT_TOOLTIP = "Submit patch set ${patchSet} into ${branch}"; private static final String DEFAULT_TOPIC_TOOLTIP = "Submit all ${topicSize} changes of the same topic"; private static final String BLOCKED_TOPIC_TOOLTIP = "Other changes in this topic are not ready"; private static final String BLOCKED_HIDDEN_TOPIC_TOOLTIP = "Other hidden changes in this topic are not ready"; public enum Status { SUBMITTED, MERGED } public static class Output { public Status status; transient Change change; private Output(Status s, Change c) { status = s; change = c; } } private final PersonIdent serverIdent; private final Provider<ReviewDb> dbProvider; private final GitRepositoryManager repoManager; private final IdentifiedUser.GenericFactory userFactory; private final ChangeData.Factory changeDataFactory; private final ChangeUpdate.Factory updateFactory; private final ApprovalsUtil approvalsUtil; private final ChangeMessagesUtil cmUtil; private final MergeQueue mergeQueue; private final ChangeIndexer indexer; private final LabelNormalizer labelNormalizer; private final AccountsCollection accounts; private final ChangesCollection changes; private final String label; private final ParameterizedString titlePattern; private final String submitTopicLabel; private final ParameterizedString submitTopicTooltip; private final boolean submitWholeTopic; private final Provider<InternalChangeQuery> queryProvider; private final Provider<Mergeable> mergeableProvider; @Inject Submit(@GerritPersonIdent PersonIdent serverIdent, Provider<ReviewDb> dbProvider, GitRepositoryManager repoManager, IdentifiedUser.GenericFactory userFactory, ChangeData.Factory changeDataFactory, ChangeUpdate.Factory updateFactory, ApprovalsUtil approvalsUtil, ChangeMessagesUtil cmUtil, MergeQueue mergeQueue, AccountsCollection accounts, ChangesCollection changes, ChangeIndexer indexer, LabelNormalizer labelNormalizer, @GerritServerConfig Config cfg, Provider<InternalChangeQuery> queryProvider, Provider<Mergeable> mergeableProvider) { this.serverIdent = serverIdent; this.dbProvider = dbProvider; this.repoManager = repoManager; this.userFactory = userFactory; this.changeDataFactory = changeDataFactory; this.updateFactory = updateFactory; this.approvalsUtil = approvalsUtil; this.cmUtil = cmUtil; this.mergeQueue = mergeQueue; this.accounts = accounts; this.changes = changes; this.indexer = indexer; this.labelNormalizer = labelNormalizer; this.label = MoreObjects.firstNonNull( Strings.emptyToNull(cfg.getString("change", null, "submitLabel")), "Submit"); this.titlePattern = new ParameterizedString(MoreObjects.firstNonNull( cfg.getString("change", null, "submitTooltip"), DEFAULT_TOOLTIP)); submitWholeTopic = wholeTopicEnabled(cfg); this.submitTopicLabel = MoreObjects.firstNonNull( Strings.emptyToNull(cfg.getString("change", null, "submitTopicLabel")), "Submit whole topic"); this.submitTopicTooltip = new ParameterizedString(MoreObjects.firstNonNull( cfg.getString("change", null, "submitTopicTooltip"), DEFAULT_TOPIC_TOOLTIP)); this.queryProvider = queryProvider; this.mergeableProvider = mergeableProvider; } @Override public Output apply(RevisionResource rsrc, SubmitInput input) throws AuthException, ResourceConflictException, RepositoryNotFoundException, IOException, OrmException, UnprocessableEntityException { input.onBehalfOf = Strings.emptyToNull(input.onBehalfOf); if (input.onBehalfOf != null) { rsrc = onBehalfOf(rsrc, input); } ChangeControl control = rsrc.getControl(); IdentifiedUser caller = (IdentifiedUser) control.getCurrentUser(); Change change = rsrc.getChange(); if (input.onBehalfOf == null && !control.canSubmit()) { throw new AuthException("submit not permitted"); } else if (!change.getStatus().isOpen()) { throw new ResourceConflictException("change is " + status(change)); } else if (!ProjectUtil.branchExists(repoManager, change.getDest())) { throw new ResourceConflictException(String.format( "destination branch \"%s\" not found.", change.getDest().get())); } else if (!rsrc.getPatchSet().getId().equals(change.currentPatchSetId())) { // TODO Allow submitting non-current revision by changing the current. throw new ResourceConflictException(String.format( "revision %s is not current revision", rsrc.getPatchSet().getRevision().get())); } List<Change> submittedChanges = submit(rsrc, caller, false); if (input.waitForMerge) { for (Change c : submittedChanges) { // TODO(sbeller): We should make schedule return a Future, then we // could do these all in parallel and still block until they're done. mergeQueue.merge(c.getDest()); } change = dbProvider.get().changes().get(change.getId()); } else { for (Change c : submittedChanges) { mergeQueue.schedule(c.getDest()); } } if (change == null) { throw new ResourceConflictException("change is deleted"); } switch (change.getStatus()) { case SUBMITTED: return new Output(Status.SUBMITTED, change); case MERGED: return new Output(Status.MERGED, change); case NEW: ChangeMessage msg = getConflictMessage(rsrc); if (msg != null) { throw new ResourceConflictException(msg.getMessage()); } //$FALL-THROUGH$ default: throw new ResourceConflictException("change is " + status(change)); } } /** * @param changes list of changes to be submitted at once * @param identifiedUser the user who is checking to submit * @return a reason why any of the changes is not submittable or null */ private String problemsForSubmittingChanges(List<ChangeData> changes, IdentifiedUser identifiedUser) { for (ChangeData c : changes) { try { ChangeControl changeControl = c.changeControl().forUser( identifiedUser); if (!changeControl.isVisible(dbProvider.get())) { return BLOCKED_HIDDEN_TOPIC_TOOLTIP; } if (!changeControl.canSubmit()) { return BLOCKED_TOPIC_TOOLTIP; } checkSubmitRule(c, c.currentPatchSet(), false); } catch (OrmException e) { log.error("Error checking if change is submittable", e); throw new OrmRuntimeException(e); } catch (ResourceConflictException e) { return BLOCKED_TOPIC_TOOLTIP; } } return null; } @Override public UiAction.Description getDescription(RevisionResource resource) { PatchSet.Id current = resource.getChange().currentPatchSetId(); String topic = resource.getChange().getTopic(); boolean visible = !resource.getPatchSet().isDraft() && resource.getChange().getStatus().isOpen() && resource.getPatchSet().getId().equals(current) && resource.getControl().canSubmit(); ReviewDb db = dbProvider.get(); ChangeData cd = changeDataFactory.create(db, resource.getControl()); if (problemsForSubmittingChanges(Arrays.asList(cd), resource.getUser()) != null) { visible = false; } if (!visible) { return new UiAction.Description() .setLabel("") .setTitle("") .setVisible(false); } boolean enabled; try { enabled = mergeableProvider.get().apply(resource).mergeable; } catch (RestApiException | OrmException | IOException e) { throw new OrmRuntimeException("Could not determine mergeability", e); } List<ChangeData> changesByTopic = null; if (submitWholeTopic && !Strings.isNullOrEmpty(topic)) { changesByTopic = getChangesByTopic(topic); } if (submitWholeTopic && !Strings.isNullOrEmpty(topic) && changesByTopic.size() > 1) { Map<String, String> params = ImmutableMap.of( "topicSize", String.valueOf(changesByTopic.size())); String topicProblems = problemsForSubmittingChanges(changesByTopic, resource.getUser()); if (topicProblems != null) { return new UiAction.Description() .setLabel(submitTopicLabel) .setTitle(topicProblems) .setVisible(true) .setEnabled(false); } else { return new UiAction.Description() .setLabel(submitTopicLabel) .setTitle(Strings.emptyToNull( submitTopicTooltip.replace(params))) .setVisible(true) .setEnabled(enabled); } } else { RevId revId = resource.getPatchSet().getRevision(); Map<String, String> params = ImmutableMap.of( "patchSet", String.valueOf(resource.getPatchSet().getPatchSetId()), "branch", resource.getChange().getDest().getShortName(), "commit", ObjectId.fromString(revId.get()).abbreviate(7).name()); return new UiAction.Description() .setLabel(label) .setTitle(Strings.emptyToNull(titlePattern.replace(params))) .setVisible(true) .setEnabled(enabled); } } /** * If the merge was attempted and it failed the system usually writes a * comment as a ChangeMessage and sets status to NEW. Find the relevant * message and return it. */ public ChangeMessage getConflictMessage(RevisionResource rsrc) throws OrmException { return FluentIterable.from(cmUtil.byPatchSet(dbProvider.get(), rsrc.getNotes(), rsrc.getPatchSet().getId())) .filter(new Predicate<ChangeMessage>() { @Override public boolean apply(ChangeMessage input) { return input.getAuthor() == null; } }) .last() .orNull(); } private Change submitToDatabase(final ReviewDb db, final Change.Id changeId, final Timestamp timestamp) throws OrmException, ResourceConflictException { Change ret = db.changes().atomicUpdate(changeId, new AtomicUpdate<Change>() { @Override public Change update(Change change) { if (change.getStatus().isOpen()) { change.setStatus(Change.Status.SUBMITTED); change.setLastUpdatedOn(timestamp); return change; } return null; } }); if (ret != null) { return ret; } else { throw new ResourceConflictException("change " + changeId + " is " + status(db.changes().get(changeId))); } } private Change submitThisChange(RevisionResource rsrc, IdentifiedUser caller, boolean force) throws ResourceConflictException, OrmException, IOException { ReviewDb db = dbProvider.get(); ChangeData cd = changeDataFactory.create(db, rsrc.getControl()); List<SubmitRecord> submitRecords = checkSubmitRule(cd, rsrc.getPatchSet(), force); final Timestamp timestamp = TimeUtil.nowTs(); Change change = rsrc.getChange(); ChangeUpdate update = updateFactory.create(rsrc.getControl(), timestamp); update.submit(submitRecords); db.changes().beginTransaction(change.getId()); try { BatchMetaDataUpdate batch = approve(rsrc.getPatchSet().getId(), cd.changeControl(), update, caller, timestamp); // Write update commit after all normalized label commits. batch.write(update, new CommitBuilder()); change = submitToDatabase(db, change.getId(), timestamp); db.commit(); } finally { db.rollback(); } indexer.index(db, change); return change; } private List<Change> submitWholeTopic(RevisionResource rsrc, IdentifiedUser caller, boolean force, String topic) throws ResourceConflictException, OrmException, IOException { Preconditions.checkNotNull(topic); final Timestamp timestamp = TimeUtil.nowTs(); ReviewDb db = dbProvider.get(); ChangeData cd = changeDataFactory.create(db, rsrc.getControl()); List<ChangeData> changesByTopic = queryProvider.get().byTopicOpen(topic); String problems = problemsForSubmittingChanges(changesByTopic, caller); if (problems != null) { throw new ResourceConflictException(problems); } Change change = rsrc.getChange(); ChangeUpdate update = updateFactory.create(rsrc.getControl(), timestamp); List<SubmitRecord> submitRecords = checkSubmitRule(cd, rsrc.getPatchSet(), force); update.submit(submitRecords); db.changes().beginTransaction(change.getId()); try { for (ChangeData c : changesByTopic) { BatchMetaDataUpdate batch = approve(c.currentPatchSet().getId(), c.changeControl(), update, caller, timestamp); // Write update commit after all normalized label commits. batch.write(update, new CommitBuilder()); submitToDatabase(db, c.getId(), timestamp); } db.commit(); } finally { db.rollback(); } List<Change.Id> ids = new ArrayList<>(changesByTopic.size()); List<Change> ret = new ArrayList<>(changesByTopic.size()); for (ChangeData c : changesByTopic) { ids.add(c.getId()); ret.add(c.change()); } indexer.indexAsync(ids).checkedGet(); return ret; } public List<Change> submit(RevisionResource rsrc, IdentifiedUser caller, boolean force) throws ResourceConflictException, OrmException, IOException { String topic = rsrc.getChange().getTopic(); if (submitWholeTopic && !Strings.isNullOrEmpty(topic)) { return submitWholeTopic(rsrc, caller, force, topic); } else { return Arrays.asList(submitThisChange(rsrc, caller, force)); } } private BatchMetaDataUpdate approve(PatchSet.Id psId, ChangeControl control, ChangeUpdate update, IdentifiedUser caller, Timestamp timestamp) throws OrmException { Map<PatchSetApproval.Key, PatchSetApproval> byKey = Maps.newHashMap(); for (PatchSetApproval psa : approvalsUtil.byPatchSet(dbProvider.get(), control, psId)) { if (!byKey.containsKey(psa.getKey())) { byKey.put(psa.getKey(), psa); } } PatchSetApproval submit = ApprovalsUtil.getSubmitter(psId, byKey.values()); if (submit == null || !submit.getAccountId().equals(caller.getAccountId())) { submit = new PatchSetApproval( new PatchSetApproval.Key( psId, caller.getAccountId(), LabelId.SUBMIT), (short) 1, TimeUtil.nowTs()); byKey.put(submit.getKey(), submit); } submit.setValue((short) 1); submit.setGranted(timestamp); // Flatten out existing approvals for this patch set based upon the current // permissions. Once the change is closed the approvals are not updated at // presentation view time, except for zero votes used to indicate a reviewer // was added. So we need to make sure votes are accurate now. This way if // permissions get modified in the future, historical records stay accurate. LabelNormalizer.Result normalized = labelNormalizer.normalize(control, byKey.values()); // TODO(dborowitz): Don't use a label in notedb; just check when status // change happened. update.putApproval(submit.getLabel(), submit.getValue()); dbProvider.get().patchSetApprovals().upsert(normalized.getNormalized()); dbProvider.get().patchSetApprovals().delete(normalized.deleted()); try { return saveToBatch(control, update, normalized, timestamp); } catch (IOException e) { throw new OrmException(e); } } private BatchMetaDataUpdate saveToBatch(ChangeControl ctl, ChangeUpdate callerUpdate, LabelNormalizer.Result normalized, Timestamp timestamp) throws IOException { Table<Account.Id, String, Optional<Short>> byUser = HashBasedTable.create(); for (PatchSetApproval psa : normalized.updated()) { byUser.put(psa.getAccountId(), psa.getLabel(), Optional.of(psa.getValue())); } for (PatchSetApproval psa : normalized.deleted()) { byUser.put(psa.getAccountId(), psa.getLabel(), Optional.<Short> absent()); } BatchMetaDataUpdate batch = callerUpdate.openUpdate(); for (Account.Id accountId : byUser.rowKeySet()) { if (!accountId.equals(callerUpdate.getUser().getAccountId())) { ChangeUpdate update = updateFactory.create( ctl.forUser(userFactory.create(dbProvider, accountId)), timestamp); update.setSubject("Finalize approvals at submit"); putApprovals(update, byUser.row(accountId)); CommitBuilder commit = new CommitBuilder(); commit.setCommitter(new PersonIdent(serverIdent, timestamp)); batch.write(update, commit); } } putApprovals(callerUpdate, byUser.row(callerUpdate.getUser().getAccountId())); return batch; } private static void putApprovals(ChangeUpdate update, Map<String, Optional<Short>> approvals) { for (Map.Entry<String, Optional<Short>> e : approvals.entrySet()) { if (e.getValue().isPresent()) { update.putApproval(e.getKey(), e.getValue().get()); } else { update.removeApproval(e.getKey()); } } } private List<SubmitRecord> checkSubmitRule(ChangeData cd, PatchSet patchSet, boolean force) throws ResourceConflictException, OrmException { List<SubmitRecord> results = new SubmitRuleEvaluator(cd) .setPatchSet(patchSet) .canSubmit(); Optional<SubmitRecord> ok = findOkRecord(results); if (ok.isPresent()) { // Rules supplied a valid solution. return ImmutableList.of(ok.get()); } else if (force) { return results; } else if (results.isEmpty()) { throw new IllegalStateException(String.format( "ChangeControl.canSubmit returned empty list for %s in %s", patchSet.getId(), cd.change().getProject().get())); } for (SubmitRecord record : results) { switch (record.status) { case CLOSED: throw new ResourceConflictException("change is closed"); case RULE_ERROR: throw new ResourceConflictException(String.format( "rule error: %s", record.errorMessage)); case NOT_READY: StringBuilder msg = new StringBuilder(); for (SubmitRecord.Label lbl : record.labels) { switch (lbl.status) { case OK: case MAY: continue; case REJECT: if (msg.length() > 0) { msg.append("; "); } msg.append("blocked by ").append(lbl.label); continue; case NEED: if (msg.length() > 0) { msg.append("; "); } msg.append("needs ").append(lbl.label); continue; case IMPOSSIBLE: if (msg.length() > 0) { msg.append("; "); } msg.append("needs ").append(lbl.label) .append(" (check project access)"); continue; default: throw new IllegalStateException(String.format( "Unsupported SubmitRecord.Label %s for %s in %s", lbl.toString(), patchSet.getId(), cd.change().getProject().get())); } } throw new ResourceConflictException(msg.toString()); default: throw new IllegalStateException(String.format( "Unsupported SubmitRecord %s for %s in %s", record, patchSet.getId().getId(), cd.change().getProject().get())); } } throw new IllegalStateException(); } private static Optional<SubmitRecord> findOkRecord(Collection<SubmitRecord> in) { return Iterables.tryFind(in, new Predicate<SubmitRecord>() { @Override public boolean apply(SubmitRecord input) { return input.status == OK; } }); } static String status(Change change) { return change != null ? change.getStatus().name().toLowerCase() : "deleted"; } private RevisionResource onBehalfOf(RevisionResource rsrc, SubmitInput in) throws AuthException, UnprocessableEntityException, OrmException { ChangeControl caller = rsrc.getControl(); if (!caller.canSubmit()) { throw new AuthException("submit not permitted"); } if (!caller.canSubmitAs()) { throw new AuthException("submit on behalf of not permitted"); } IdentifiedUser targetUser = accounts.parseId(in.onBehalfOf); if (targetUser == null) { throw new UnprocessableEntityException(String.format( "Account Not Found: %s", in.onBehalfOf)); } ChangeControl target = caller.forUser(targetUser); if (!target.getRefControl().isVisible()) { throw new UnprocessableEntityException(String.format( "on_behalf_of account %s cannot see destination ref", targetUser.getAccountId())); } return new RevisionResource(changes.parse(target), rsrc.getPatchSet()); } static boolean wholeTopicEnabled(Config config) { return config.getBoolean("change", null, "submitWholeTopic" , false); } private List<ChangeData> getChangesByTopic(String topic) { try { return queryProvider.get().byTopicOpen(topic); } catch (OrmException e) { throw new OrmRuntimeException(e); } } public static class CurrentRevision implements RestModifyView<ChangeResource, SubmitInput> { private final Provider<ReviewDb> dbProvider; private final Submit submit; private final ChangeJson json; @Inject CurrentRevision(Provider<ReviewDb> dbProvider, Submit submit, ChangeJson json) { this.dbProvider = dbProvider; this.submit = submit; this.json = json; } @Override public ChangeInfo apply(ChangeResource rsrc, SubmitInput input) throws AuthException, ResourceConflictException, RepositoryNotFoundException, IOException, OrmException, UnprocessableEntityException { PatchSet ps = dbProvider.get().patchSets() .get(rsrc.getChange().currentPatchSetId()); if (ps == null) { throw new ResourceConflictException("current revision is missing"); } else if (!rsrc.getControl().isPatchVisible(ps, dbProvider.get())) { throw new AuthException("current revision not accessible"); } Output out = submit.apply(new RevisionResource(rsrc, ps), input); return json.format(out.change); } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.configurations; import com.intellij.execution.CommandLineUtil; import com.intellij.execution.ExecutionException; import com.intellij.execution.IllegalEnvVarException; import com.intellij.execution.Platform; import com.intellij.execution.process.ProcessNotCreatedException; import com.intellij.ide.IdeBundle; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.UserDataHolder; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.util.EnvironmentUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.CaseInsensitiveStringHashingStrategy; import gnu.trove.THashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.*; /** * OS-independent way of executing external processes with complex parameters. * <p> * Main idea of the class is to accept parameters "as-is", just as they should look to an external process, and quote/escape them * as required by the underlying platform - so to run some program with a "parameter with space" all that's needed is * {@code new GeneralCommandLine("some program", "parameter with space").createProcess()}. * <p> * Consider the following things when using this class. * * <h3>Working directory</h3> * By default, a current directory of the IDE process is used (usually a "bin/" directory of IDE installation). * If a child process may create files in it, this choice is unwelcome. On the other hand, informational commands (e.g. "git --version") * are safe. When unsure, set it to something neutral - like user's home or a temp directory. * * <h3>Parent Environment</h3> * {@link ParentEnvironmentType Three options here}. * For commands designed from the ground up for typing into a terminal, use {@link ParentEnvironmentType#CONSOLE CONSOLE} * (typical cases: version controls, Node.js and all the stuff around it, Python and Ruby interpreters and utilities, etc). * For GUI apps and CLI tools which aren't primarily intended to be launched by humans, use {@link ParentEnvironmentType#SYSTEM SYSTEM} * (examples: UI builders, browsers, XCode components). For the empty environment, there is {@link ParentEnvironmentType#NONE NONE}. * According to an extensive research conducted by British scientists (tm) on a diverse population of both wild and domesticated tools * (no one was harmed), most of them are either insensitive to an environment or fall into the first category, * thus backing up the choice of CONSOLE as the default value. * * <h3>Encoding/Charset</h3> * The {@link #getCharset()} method is used by classes like {@link com.intellij.execution.process.OSProcessHandler OSProcessHandler} * or {@link com.intellij.execution.util.ExecUtil ExecUtil} to decode bytes of a child's output stream. For proper conversion, * the same value should be used on another side of the pipe. Chances are you don't have to mess with the setting - * because a platform-dependent guessing behind {@link Charset#defaultCharset()} is used by default and a child process * may happen to use a similar heuristic. * If the above automagic fails or more control is needed, the charset may be set explicitly. Again, do not forget the other side - * call {@code addParameter("-Dfile.encoding=...")} for Java-based tools, or use {@code withEnvironment("HGENCODING", "...")} * for Mercurial, etc. * * @see com.intellij.execution.util.ExecUtil * @see com.intellij.execution.process.OSProcessHandler */ public class GeneralCommandLine implements UserDataHolder { private static final Logger LOG = Logger.getInstance("#com.intellij.execution.configurations.GeneralCommandLine"); /** * Determines the scope of a parent environment passed to a child process. * <p> * {@code NONE} means a child process will receive an empty environment. <br/> * {@code SYSTEM} will provide it with the same environment as an IDE. <br/> * {@code CONSOLE} provides the child with a similar environment as if it was launched from, well, a console. * On OS X, a console environment is simulated (see {@link EnvironmentUtil#getEnvironmentMap()} for reasons it's needed * and details on how it works). On Windows and Unix hosts, this option is no different from {@code SYSTEM} * since there is no drastic distinction in environment between GUI and console apps. */ public enum ParentEnvironmentType {NONE, SYSTEM, CONSOLE} private String myExePath; private File myWorkDirectory; private final Map<String, String> myEnvParams = new MyTHashMap(); private ParentEnvironmentType myParentEnvironmentType = ParentEnvironmentType.CONSOLE; private final ParametersList myProgramParams = new ParametersList(); private Charset myCharset = CharsetToolkit.getDefaultSystemCharset(); private boolean myRedirectErrorStream = false; private File myInputFile; private Map<Object, Object> myUserData; public GeneralCommandLine() { } public GeneralCommandLine(@NotNull String... command) { this(Arrays.asList(command)); } public GeneralCommandLine(@NotNull List<String> command) { int size = command.size(); if (size > 0) { setExePath(command.get(0)); if (size > 1) { addParameters(command.subList(1, size)); } } } protected GeneralCommandLine(@NotNull GeneralCommandLine original) { myExePath = original.myExePath; myWorkDirectory = original.myWorkDirectory; myEnvParams.putAll(original.myEnvParams); myParentEnvironmentType = original.myParentEnvironmentType; original.myProgramParams.copyTo(myProgramParams); myCharset = original.myCharset; myRedirectErrorStream = original.myRedirectErrorStream; myInputFile = original.myInputFile; // this is intentional memory waste, to avoid warning suppression. We should not copy UserData, but can't suppress a warning for a single field myUserData = ContainerUtil.newHashMap(); } @NotNull public String getExePath() { return myExePath; } @NotNull public GeneralCommandLine withExePath(@NotNull String exePath) { myExePath = exePath.trim(); return this; } public void setExePath(@NotNull String exePath) { withExePath(exePath); } public File getWorkDirectory() { return myWorkDirectory; } @NotNull public GeneralCommandLine withWorkDirectory(@Nullable String path) { return withWorkDirectory(path != null ? new File(path) : null); } @NotNull public GeneralCommandLine withWorkDirectory(@Nullable File workDirectory) { myWorkDirectory = workDirectory; return this; } public void setWorkDirectory(@Nullable String path) { withWorkDirectory(path); } public void setWorkDirectory(@Nullable File workDirectory) { withWorkDirectory(workDirectory); } /** * Note: the map returned is forgiving to passing null values into putAll(). */ @NotNull public Map<String, String> getEnvironment() { return myEnvParams; } @NotNull public GeneralCommandLine withEnvironment(@Nullable Map<String, String> environment) { if (environment != null) { getEnvironment().putAll(environment); } return this; } @NotNull public GeneralCommandLine withEnvironment(@NotNull String key, @NotNull String value) { getEnvironment().put(key, value); return this; } public boolean isPassParentEnvironment() { return myParentEnvironmentType != ParentEnvironmentType.NONE; } /** @deprecated use {@link #withParentEnvironmentType(ParentEnvironmentType)} (to be removed in IDEA 2018.*) */ @Deprecated public void setPassParentEnvironment(boolean passParentEnvironment) { withParentEnvironmentType(passParentEnvironment ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE); } @NotNull public ParentEnvironmentType getParentEnvironmentType() { return myParentEnvironmentType; } @NotNull public GeneralCommandLine withParentEnvironmentType(@NotNull ParentEnvironmentType type) { myParentEnvironmentType = type; return this; } /** * Returns an environment that will be inherited by a child process. * @see #getEffectiveEnvironment() */ @NotNull public Map<String, String> getParentEnvironment() { switch (myParentEnvironmentType) { case SYSTEM: return System.getenv(); case CONSOLE: return EnvironmentUtil.getEnvironmentMap(); default: return Collections.emptyMap(); } } /** * Returns an environment as seen by a child process, * that is the {@link #getEnvironment() environment} merged with the {@link #getParentEnvironment() parent} one. */ @NotNull public Map<String, String> getEffectiveEnvironment() { MyTHashMap env = new MyTHashMap(); setupEnvironment(env); return env; } public void addParameters(@NotNull String... parameters) { withParameters(parameters); } public void addParameters(@NotNull List<String> parameters) { withParameters(parameters); } @NotNull public GeneralCommandLine withParameters(@NotNull String... parameters) { for (String parameter : parameters) addParameter(parameter); return this; } @NotNull public GeneralCommandLine withParameters(@NotNull List<String> parameters) { for (String parameter : parameters) addParameter(parameter); return this; } public void addParameter(@NotNull String parameter) { myProgramParams.add(parameter); } @NotNull public ParametersList getParametersList() { return myProgramParams; } @NotNull public Charset getCharset() { return myCharset; } @NotNull public GeneralCommandLine withCharset(@NotNull Charset charset) { myCharset = charset; return this; } public void setCharset(@NotNull Charset charset) { withCharset(charset); } public boolean isRedirectErrorStream() { return myRedirectErrorStream; } @NotNull public GeneralCommandLine withRedirectErrorStream(boolean redirectErrorStream) { myRedirectErrorStream = redirectErrorStream; return this; } public void setRedirectErrorStream(boolean redirectErrorStream) { withRedirectErrorStream(redirectErrorStream); } @NotNull public GeneralCommandLine withInput(@Nullable File file) { myInputFile = file; return this; } /** * Returns string representation of this command line.<br/> * Warning: resulting string is not OS-dependent - <b>do not</b> use it for executing this command line. * * @return single-string representation of this command line. */ @NotNull public String getCommandLineString() { return getCommandLineString(null); } /** * Returns string representation of this command line.<br/> * Warning: resulting string is not OS-dependent - <b>do not</b> use it for executing this command line. * * @param exeName use this executable name instead of given by {@link #setExePath(String)} * @return single-string representation of this command line. */ @NotNull public String getCommandLineString(@Nullable String exeName) { return ParametersList.join(getCommandLineList(exeName)); } @NotNull public List<String> getCommandLineList(@Nullable String exeName) { List<String> commands = new ArrayList<>(); if (exeName != null) { commands.add(exeName); } else if (myExePath != null) { commands.add(myExePath); } else { commands.add("<null>"); } commands.addAll(myProgramParams.getList()); return commands; } /** * Prepares command (quotes and escapes all arguments) and returns it as a newline-separated list. * * @return command as a newline-separated list. * @see #getPreparedCommandLine(Platform) */ @NotNull public String getPreparedCommandLine() { return getPreparedCommandLine(Platform.current()); } /** * Prepares command (quotes and escapes all arguments) and returns it as a newline-separated list * (suitable e.g. for passing in an environment variable). * * @param platform a target platform * @return command as a newline-separated list. */ @NotNull public String getPreparedCommandLine(@NotNull Platform platform) { String exePath = myExePath != null ? myExePath : ""; return StringUtil.join(prepareCommandLine(exePath, myProgramParams.getList(), platform), "\n"); } @NotNull protected List<String> prepareCommandLine(@NotNull String command, @NotNull List<String> parameters, @NotNull Platform platform) { return CommandLineUtil.toCommandLine(command, parameters, platform); } @NotNull public Process createProcess() throws ExecutionException { if (LOG.isDebugEnabled()) { LOG.debug("Executing [" + getCommandLineString() + "]"); LOG.debug(" environment: " + myEnvParams + " (+" + myParentEnvironmentType + ")"); LOG.debug(" charset: " + myCharset); } try { if (myWorkDirectory != null) { if (!myWorkDirectory.exists()) { throw new ExecutionException(IdeBundle.message("run.configuration.error.working.directory.does.not.exist", myWorkDirectory)); } if (!myWorkDirectory.isDirectory()) { throw new ExecutionException(IdeBundle.message("run.configuration.error.working.directory.not.directory", myWorkDirectory)); } } if (StringUtil.isEmptyOrSpaces(myExePath)) { throw new ExecutionException(IdeBundle.message("run.configuration.error.executable.not.specified")); } } catch (ExecutionException e) { LOG.debug(e); throw e; } for (Map.Entry<String, String> entry : myEnvParams.entrySet()) { String name = entry.getKey(), value = entry.getValue(); if (!EnvironmentUtil.isValidName(name)) throw new IllegalEnvVarException(IdeBundle.message("run.configuration.invalid.env.name", name)); if (!EnvironmentUtil.isValidValue(value)) throw new IllegalEnvVarException(IdeBundle.message("run.configuration.invalid.env.value", name, value)); } String exePath = myExePath; if (SystemInfo.isMac && myParentEnvironmentType == ParentEnvironmentType.CONSOLE && exePath.indexOf(File.pathSeparatorChar) == -1) { String systemPath = System.getenv("PATH"); String shellPath = EnvironmentUtil.getValue("PATH"); if (!Objects.equals(systemPath, shellPath)) { File exeFile = PathEnvironmentVariableUtil.findInPath(myExePath, systemPath, null); if (exeFile == null) { exeFile = PathEnvironmentVariableUtil.findInPath(myExePath, shellPath, null); if (exeFile != null) { LOG.debug(exePath + " => " + exeFile); exePath = exeFile.getPath(); } } } } List<String> commands = prepareCommandLine(exePath, myProgramParams.getList(), Platform.current()); try { return startProcess(commands); } catch (IOException e) { LOG.debug(e); throw new ProcessNotCreatedException(e.getMessage(), e, this); } } /** * @implNote for subclasses: * On Windows the escapedCommands argument must never be modified or augmented in any way. * Windows command line handling is extremely fragile and vague, and the exact escaping of a particular argument may vary * depending on values of the preceding arguments. * * [foo] [^] -> [foo] [^^] * * but: * [foo] ["] [^] -> [foo] [\"] ["^"] * * Notice how the last parameter escaping changes after prepending another argument. * * If you need to alter the command line passed in, override the {@link #prepareCommandLine(String, List, Platform)} method instead. */ @NotNull protected Process startProcess(@NotNull List<String> escapedCommands) throws IOException { ProcessBuilder builder = new ProcessBuilder(escapedCommands); setupEnvironment(builder.environment()); builder.directory(myWorkDirectory); builder.redirectErrorStream(myRedirectErrorStream); if (myInputFile != null) { builder.redirectInput(ProcessBuilder.Redirect.from(myInputFile)); } return builder.start(); } protected void setupEnvironment(@NotNull Map<String, String> environment) { environment.clear(); if (myParentEnvironmentType != ParentEnvironmentType.NONE) { environment.putAll(getParentEnvironment()); } if (SystemInfo.isUnix) { File workDirectory = getWorkDirectory(); if (workDirectory != null) { environment.put("PWD", FileUtil.toSystemDependentName(workDirectory.getAbsolutePath())); } } if (!myEnvParams.isEmpty()) { if (SystemInfo.isWindows) { THashMap<String, String> envVars = new THashMap<>(CaseInsensitiveStringHashingStrategy.INSTANCE); envVars.putAll(environment); envVars.putAll(myEnvParams); environment.clear(); environment.putAll(envVars); } else { environment.putAll(myEnvParams); } } } /** * Normally, double quotes in parameters are escaped so they arrive to a called program as-is. * But some commands (e.g. {@code 'cmd /c start "title" ...'}) should get they quotes non-escaped. * Wrapping a parameter by this method (instead of using quotes) will do exactly this. * * @see com.intellij.execution.util.ExecUtil#getTerminalCommand(String, String) */ @NotNull public static String inescapableQuote(@NotNull String parameter) { return CommandLineUtil.specialQuote(parameter); } @Override public String toString() { return myExePath + " " + myProgramParams; } @Nullable @Override public <T> T getUserData(@NotNull Key<T> key) { if (myUserData != null) { @SuppressWarnings({"UnnecessaryLocalVariable", "unchecked"}) T t = (T)myUserData.get(key); return t; } return null; } @Override public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) { if (myUserData == null) { if (value == null) return; myUserData = ContainerUtil.newHashMap(); } myUserData.put(key, value); } private static class MyTHashMap extends THashMap<String, String> { private MyTHashMap() { super(SystemInfo.isWindows ? CaseInsensitiveStringHashingStrategy.INSTANCE : ContainerUtil.canonicalStrategy()); } @Override public void putAll(Map<? extends String, ? extends String> map) { if (map != null) { super.putAll(map); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.termvectors; import com.carrotsearch.hppc.ObjectIntHashMap; import org.apache.lucene.analysis.payloads.PayloadHelper; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Fields; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.FieldMapper; import org.hamcrest.Matcher; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class GetTermVectorsIT extends AbstractTermVectorsTestCase { public void testNoSuchDoc() throws Exception { XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); ensureYellow(); client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet(); refresh(); for (int i = 0; i < 20; i++) { ActionFuture<TermVectorsResponse> termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "" + i)); TermVectorsResponse actionGet = termVector.actionGet(); assertThat(actionGet, notNullValue()); assertThat(actionGet.getIndex(), equalTo("test")); assertThat(actionGet.isExists(), equalTo(false)); // check response is nevertheless serializable to json actionGet.toXContent(jsonBuilder().startObject(), ToXContent.EMPTY_PARAMS); } } public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception { XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("existingfield") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); ensureYellow(); // when indexing a field that simply has a question mark, the term vectors will be null client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet(); refresh(); ActionFuture<TermVectorsResponse> termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "0") .selectedFields(new String[]{"existingfield"})); // lets see if the null term vectors are caught... TermVectorsResponse actionGet = termVector.actionGet(); assertThat(actionGet, notNullValue()); assertThat(actionGet.isExists(), equalTo(true)); assertThat(actionGet.getIndex(), equalTo("test")); assertThat(actionGet.getFields().terms("existingfield"), nullValue()); } public void testExistingFieldButNotInDocNPE() throws Exception { XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("existingfield") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); ensureYellow(); // when indexing a field that simply has a question mark, the term vectors will be null client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet(); refresh(); ActionFuture<TermVectorsResponse> termVectors = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "0") .selectedFields(randomBoolean() ? new String[]{"existingfield"} : null) .termStatistics(true) .fieldStatistics(true) .dfs(true)); // lets see if the null term vectors are caught... TermVectorsResponse actionGet = termVectors.actionGet(); assertThat(actionGet, notNullValue()); assertThat(actionGet.isExists(), equalTo(true)); assertThat(actionGet.getIndex(), equalTo("test")); assertThat(actionGet.getFields().terms("existingfield"), nullValue()); } public void testNotIndexedField() throws Exception { // must be of type string and indexed. assertAcked(prepareCreate("test") .addAlias(new Alias("alias")) .addMapping("type1", "field0", "type=integer,", // no tvs "field1", "type=string,index=no", // no tvs "field2", "type=string,index=no,store=yes", // no tvs "field3", "type=string,index=no,term_vector=yes", // no tvs "field4", "type=string,index=not_analyzed", // yes tvs "field5", "type=string,index=analyzed")); // yes tvs ensureYellow(); List<IndexRequestBuilder> indexBuilders = new ArrayList<>(); for (int i = 0; i < 6; i++) { indexBuilders.add(client().prepareIndex() .setIndex("test") .setType("type1") .setId(String.valueOf(i)) .setSource("field" + i, i)); } indexRandom(true, indexBuilders); for (int i = 0; i < 4; i++) { TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i)) .setSelectedFields("field" + i) .get(); assertThat(resp, notNullValue()); assertThat(resp.isExists(), equalTo(true)); assertThat(resp.getIndex(), equalTo("test")); assertThat("field" + i + " :", resp.getFields().terms("field" + i), nullValue()); } for (int i = 4; i < 6; i++) { TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i)) .setSelectedFields("field" + i).get(); assertThat(resp.getIndex(), equalTo("test")); assertThat("field" + i + " :", resp.getFields().terms("field" + i), notNullValue()); } } public void testSimpleTermVectors() throws IOException { XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .field("analyzer", "tv_test") .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping) .addAlias(new Alias("alias")) .setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"))); ensureYellow(); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") // 0the3 4quick9 10brown15 16fox19 20jumps25 26over30 // 31the34 35lazy39 40dog43 .endObject()).execute().actionGet(); refresh(); } for (int i = 0; i < 10; i++) { TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), "type1", Integer.toString(i)).setPayloads(true) .setOffsets(true).setPositions(true).setSelectedFields(); TermVectorsResponse response = resp.execute().actionGet(); assertThat(response.getIndex(), equalTo("test")); assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(1)); checkBrownFoxTermVector(fields, "field", true); } } public void testRandomSingleTermVectors() throws IOException { FieldType ft = new FieldType(); int config = randomInt(6); boolean storePositions = false; boolean storeOffsets = false; boolean storePayloads = false; boolean storeTermVectors = false; switch (config) { case 0: { // do nothing break; } case 1: { storeTermVectors = true; break; } case 2: { storeTermVectors = true; storePositions = true; break; } case 3: { storeTermVectors = true; storeOffsets = true; break; } case 4: { storeTermVectors = true; storePositions = true; storeOffsets = true; break; } case 5: { storeTermVectors = true; storePositions = true; storePayloads = true; break; } case 6: { storeTermVectors = true; storePositions = true; storeOffsets = true; storePayloads = true; break; } } ft.setStoreTermVectors(storeTermVectors); ft.setStoreTermVectorOffsets(storeOffsets); ft.setStoreTermVectorPayloads(storePayloads); ft.setStoreTermVectorPositions(storePositions); String optionString = FieldMapper.termVectorOptionsToString(ft); XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field") .field("type", "string") .field("term_vector", optionString) .field("analyzer", "tv_test") .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping) .setSettings(settingsBuilder() .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"))); ensureYellow(); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") // 0the3 4quick9 10brown15 16fox19 20jumps25 26over30 // 31the34 35lazy39 40dog43 .endObject()).execute().actionGet(); refresh(); } String[] values = {"brown", "dog", "fox", "jumps", "lazy", "over", "quick", "the"}; int[] freq = {1, 1, 1, 1, 1, 1, 1, 2}; int[][] pos = {{2}, {8}, {3}, {4}, {7}, {5}, {1}, {0, 6}}; int[][] startOffset = {{10}, {40}, {16}, {20}, {35}, {26}, {4}, {0, 31}}; int[][] endOffset = {{15}, {43}, {19}, {25}, {39}, {30}, {9}, {3, 34}}; boolean isPayloadRequested = randomBoolean(); boolean isOffsetRequested = randomBoolean(); boolean isPositionsRequested = randomBoolean(); String infoString = createInfoString(isPositionsRequested, isOffsetRequested, isPayloadRequested, optionString); for (int i = 0; i < 10; i++) { TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)) .setPayloads(isPayloadRequested).setOffsets(isOffsetRequested).setPositions(isPositionsRequested).setSelectedFields(); TermVectorsResponse response = resp.execute().actionGet(); assertThat(infoString + "doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(ft.storeTermVectors() ? 1 : 0)); if (ft.storeTermVectors()) { Terms terms = fields.terms("field"); assertThat(terms.size(), equalTo(8l)); TermsEnum iterator = terms.iterator(); for (int j = 0; j < values.length; j++) { String string = values[j]; BytesRef next = iterator.next(); assertThat(infoString, next, notNullValue()); assertThat(infoString + "expected " + string, string, equalTo(next.utf8ToString())); assertThat(infoString, next, notNullValue()); // do not test ttf or doc frequency, because here we have // many shards and do not know how documents are distributed PostingsEnum docsAndPositions = iterator.postings(null, PostingsEnum.ALL); // docs and pos only returns something if positions or // payloads or offsets are stored / requestd Otherwise use // DocsEnum? assertThat(infoString, docsAndPositions.nextDoc(), equalTo(0)); assertThat(infoString, freq[j], equalTo(docsAndPositions.freq())); int[] termPos = pos[j]; int[] termStartOffset = startOffset[j]; int[] termEndOffset = endOffset[j]; if (isPositionsRequested && storePositions) { assertThat(infoString, termPos.length, equalTo(freq[j])); } if (isOffsetRequested && storeOffsets) { assertThat(termStartOffset.length, equalTo(freq[j])); assertThat(termEndOffset.length, equalTo(freq[j])); } for (int k = 0; k < freq[j]; k++) { int nextPosition = docsAndPositions.nextPosition(); // only return something useful if requested and stored if (isPositionsRequested && storePositions) { assertThat(infoString + "positions for term: " + string, nextPosition, equalTo(termPos[k])); } else { assertThat(infoString + "positions for term: ", nextPosition, equalTo(-1)); } // only return something useful if requested and stored if (isPayloadRequested && storePayloads) { assertThat(infoString + "payloads for term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef( "word"))); } else { assertThat(infoString + "payloads for term: " + string, docsAndPositions.getPayload(), equalTo(null)); } // only return something useful if requested and stored if (isOffsetRequested && storeOffsets) { assertThat(infoString + "startOffsets term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k])); assertThat(infoString + "endOffsets term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k])); } else { assertThat(infoString + "startOffsets term: " + string, docsAndPositions.startOffset(), equalTo(-1)); assertThat(infoString + "endOffsets term: " + string, docsAndPositions.endOffset(), equalTo(-1)); } } } assertThat(iterator.next(), nullValue()); } } } private String createInfoString(boolean isPositionsRequested, boolean isOffsetRequested, boolean isPayloadRequested, String optionString) { String ret = "Store config: " + optionString + "\n" + "Requested: pos-" + (isPositionsRequested ? "yes" : "no") + ", offsets-" + (isOffsetRequested ? "yes" : "no") + ", payload- " + (isPayloadRequested ? "yes" : "no") + "\n"; return ret; } public void testDuelESLucene() throws Exception { TestFieldSetting[] testFieldSettings = getFieldSettings(); createIndexBasedOnFieldSettings("test", "alias", testFieldSettings); //we generate as many docs as many shards we have TestDoc[] testDocs = generateTestDocs("test", testFieldSettings); DirectoryReader directoryReader = indexDocsWithLucene(testDocs); TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings); for (TestConfig test : testConfigs) { try { TermVectorsRequestBuilder request = getRequestForConfig(test); if (test.expectedException != null) { assertThrows(request, test.expectedException); continue; } TermVectorsResponse response = request.get(); Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); validateResponse(response, luceneTermVectors, test); } catch (Throwable t) { throw new Exception("Test exception while running " + test.toString(), t); } } } public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOException { //create the test document int encoding = randomIntBetween(0, 2); String encodingString = ""; if (encoding == 0) { encodingString = "float"; } if (encoding == 1) { encodingString = "int"; } if (encoding == 2) { encodingString = "identity"; } String[] tokens = crateRandomTokens(); Map<String, List<BytesRef>> payloads = createPayloads(tokens, encoding); String delimiter = createRandomDelimiter(tokens); String queryString = createString(tokens, payloads, encoding, delimiter.charAt(0)); //create the mapping XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field").field("type", "string").field("term_vector", "with_positions_offsets_payloads") .field("analyzer", "payload_test").endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.payload_test.tokenizer", "whitespace") .putArray("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter") .put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter) .put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString) .put("index.analysis.filter.my_delimited_payload_filter.type", "delimited_payload_filter"))); ensureYellow(); client().prepareIndex("test", "type1", Integer.toString(1)) .setSource(jsonBuilder().startObject().field("field", queryString).endObject()).execute().actionGet(); refresh(); TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(1)).setPayloads(true).setOffsets(true) .setPositions(true).setSelectedFields(); TermVectorsResponse response = resp.execute().actionGet(); assertThat("doc id 1 doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(1)); Terms terms = fields.terms("field"); TermsEnum iterator = terms.iterator(); while (iterator.next() != null) { String term = iterator.term().utf8ToString(); PostingsEnum docsAndPositions = iterator.postings(null, PostingsEnum.ALL); assertThat(docsAndPositions.nextDoc(), equalTo(0)); List<BytesRef> curPayloads = payloads.get(term); assertThat(term, curPayloads, notNullValue()); assertNotNull(docsAndPositions); for (int k = 0; k < docsAndPositions.freq(); k++) { docsAndPositions.nextPosition(); if (docsAndPositions.getPayload()!=null){ String infoString = "\nterm: " + term + " has payload \n"+ docsAndPositions.getPayload().toString() + "\n but should have payload \n"+curPayloads.get(k).toString(); assertThat(infoString, docsAndPositions.getPayload(), equalTo(curPayloads.get(k))); } else { String infoString = "\nterm: " + term + " has no payload but should have payload \n"+curPayloads.get(k).toString(); assertThat(infoString, curPayloads.get(k).length, equalTo(0)); } } } assertThat(iterator.next(), nullValue()); } private String createRandomDelimiter(String[] tokens) { String delimiter = ""; boolean isTokenOrWhitespace = true; while(isTokenOrWhitespace) { isTokenOrWhitespace = false; delimiter = randomUnicodeOfLength(1); for(String token:tokens) { if(token.contains(delimiter)) { isTokenOrWhitespace = true; } } if(Character.isWhitespace(delimiter.charAt(0))) { isTokenOrWhitespace = true; } } return delimiter; } private String createString(String[] tokens, Map<String, List<BytesRef>> payloads, int encoding, char delimiter) { String resultString = ""; ObjectIntHashMap<String> payloadCounter = new ObjectIntHashMap<>(); for (String token : tokens) { if (!payloadCounter.containsKey(token)) { payloadCounter.putIfAbsent(token, 0); } else { payloadCounter.put(token, payloadCounter.get(token) + 1); } resultString = resultString + token; BytesRef payload = payloads.get(token).get(payloadCounter.get(token)); if (payload.length > 0) { resultString = resultString + delimiter; switch (encoding) { case 0: { resultString = resultString + Float.toString(PayloadHelper.decodeFloat(payload.bytes, payload.offset)); break; } case 1: { resultString = resultString + Integer.toString(PayloadHelper.decodeInt(payload.bytes, payload.offset)); break; } case 2: { resultString = resultString + payload.utf8ToString(); break; } default: { throw new ElasticsearchException("unsupported encoding type"); } } } resultString = resultString + " "; } return resultString; } private Map<String, List<BytesRef>> createPayloads(String[] tokens, int encoding) { Map<String, List<BytesRef>> payloads = new HashMap<>(); for (String token : tokens) { if (payloads.get(token) == null) { payloads.put(token, new ArrayList<BytesRef>()); } boolean createPayload = randomBoolean(); if (createPayload) { switch (encoding) { case 0: { float theFloat = randomFloat(); payloads.get(token).add(new BytesRef(PayloadHelper.encodeFloat(theFloat))); break; } case 1: { payloads.get(token).add(new BytesRef(PayloadHelper.encodeInt(randomInt()))); break; } case 2: { String payload = randomUnicodeOfLengthBetween(50, 100); for (int c = 0; c < payload.length(); c++) { if (Character.isWhitespace(payload.charAt(c))) { payload = payload.replace(payload.charAt(c), 'w'); } } payloads.get(token).add(new BytesRef(payload)); break; } default: { throw new ElasticsearchException("unsupported encoding type"); } } } else { payloads.get(token).add(new BytesRef()); } } return payloads; } private String[] crateRandomTokens() { String[] tokens = { "the", "quick", "brown", "fox" }; int numTokensWithDuplicates = randomIntBetween(3, 15); String[] finalTokens = new String[numTokensWithDuplicates]; for (int i = 0; i < numTokensWithDuplicates; i++) { finalTokens[i] = tokens[randomIntBetween(0, tokens.length - 1)]; } return finalTokens; } // like testSimpleTermVectors but we create fields with no term vectors public void testSimpleTermVectorsWithGenerate() throws IOException { String[] fieldNames = new String[10]; for (int i = 0; i < fieldNames.length; i++) { fieldNames[i] = "field" + String.valueOf(i); } XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties"); XContentBuilder source = jsonBuilder().startObject(); for (String field : fieldNames) { mapping.startObject(field) .field("type", "string") .field("term_vector", randomBoolean() ? "with_positions_offsets_payloads" : "no") .field("analyzer", "tv_test") .endObject(); source.field(field, "the quick brown fox jumps over the lazy dog"); } mapping.endObject().endObject().endObject(); source.endObject(); assertAcked(prepareCreate("test") .addMapping("type1", mapping) .setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"))); ensureGreen(); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(source) .execute().actionGet(); refresh(); } for (int i = 0; i < 10; i++) { TermVectorsResponse response = client().prepareTermVectors("test", "type1", Integer.toString(i)) .setPayloads(true) .setOffsets(true) .setPositions(true) .setSelectedFields(fieldNames) .execute().actionGet(); assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true)); Fields fields = response.getFields(); assertThat(fields.size(), equalTo(fieldNames.length)); for (String fieldName : fieldNames) { // MemoryIndex does not support payloads checkBrownFoxTermVector(fields, fieldName, false); } } } private void checkBrownFoxTermVector(Fields fields, String fieldName, boolean withPayloads) throws IOException { String[] values = {"brown", "dog", "fox", "jumps", "lazy", "over", "quick", "the"}; int[] freq = {1, 1, 1, 1, 1, 1, 1, 2}; int[][] pos = {{2}, {8}, {3}, {4}, {7}, {5}, {1}, {0, 6}}; int[][] startOffset = {{10}, {40}, {16}, {20}, {35}, {26}, {4}, {0, 31}}; int[][] endOffset = {{15}, {43}, {19}, {25}, {39}, {30}, {9}, {3, 34}}; Terms terms = fields.terms(fieldName); assertThat(terms.size(), equalTo(8l)); TermsEnum iterator = terms.iterator(); for (int j = 0; j < values.length; j++) { String string = values[j]; BytesRef next = iterator.next(); assertThat(next, notNullValue()); assertThat("expected " + string, string, equalTo(next.utf8ToString())); assertThat(next, notNullValue()); // do not test ttf or doc frequency, because here we have many // shards and do not know how documents are distributed PostingsEnum docsAndPositions = iterator.postings(null, PostingsEnum.ALL); assertThat(docsAndPositions.nextDoc(), equalTo(0)); assertThat(freq[j], equalTo(docsAndPositions.freq())); int[] termPos = pos[j]; int[] termStartOffset = startOffset[j]; int[] termEndOffset = endOffset[j]; assertThat(termPos.length, equalTo(freq[j])); assertThat(termStartOffset.length, equalTo(freq[j])); assertThat(termEndOffset.length, equalTo(freq[j])); for (int k = 0; k < freq[j]; k++) { int nextPosition = docsAndPositions.nextPosition(); assertThat("term: " + string, nextPosition, equalTo(termPos[k])); assertThat("term: " + string, docsAndPositions.startOffset(), equalTo(termStartOffset[k])); assertThat("term: " + string, docsAndPositions.endOffset(), equalTo(termEndOffset[k])); if (withPayloads) { assertThat("term: " + string, docsAndPositions.getPayload(), equalTo(new BytesRef("word"))); } } } assertThat(iterator.next(), nullValue()); } public void testDuelWithAndWithoutTermVectors() throws IOException, ExecutionException, InterruptedException { // setup indices String[] indexNames = new String[] {"with_tv", "without_tv"}; assertAcked(prepareCreate(indexNames[0]) .addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets,analyzer=keyword")); assertAcked(prepareCreate(indexNames[1]) .addMapping("type1", "field1", "type=string,term_vector=no,analyzer=keyword")); ensureGreen(); // index documents with and without term vectors String[] content = new String[]{ "Generating a random permutation of a sequence (such as when shuffling cards).", "Selecting a random sample of a population (important in statistical sampling).", "Allocating experimental units via random assignment to a treatment or control condition.", "Generating random numbers: see Random number generation.", "Selecting a random sample of a population (important in statistical sampling).", "Allocating experimental units via random assignment to a treatment or control condition.", "Transforming a data stream (such as when using a scrambler in telecommunications)."}; List<IndexRequestBuilder> indexBuilders = new ArrayList<>(); for (String indexName : indexNames) { for (int id = 0; id < content.length; id++) { indexBuilders.add(client().prepareIndex() .setIndex(indexName) .setType("type1") .setId(String.valueOf(id)) .setSource("field1", content[id])); } } indexRandom(true, indexBuilders); // request tvs and compare from each index for (int id = 0; id < content.length; id++) { Fields[] fields = new Fields[2]; for (int j = 0; j < indexNames.length; j++) { TermVectorsResponse resp = client().prepareTermVector(indexNames[j], "type1", String.valueOf(id)) .setOffsets(true) .setPositions(true) .setSelectedFields("field1") .get(); assertThat("doc with index: " + indexNames[j] + ", type1 and id: " + id, resp.isExists(), equalTo(true)); fields[j] = resp.getFields(); } compareTermVectors("field1", fields[0], fields[1]); } } private void compareTermVectors(String fieldName, Fields fields0, Fields fields1) throws IOException { Terms terms0 = fields0.terms(fieldName); Terms terms1 = fields1.terms(fieldName); assertThat(terms0, notNullValue()); assertThat(terms1, notNullValue()); assertThat(terms0.size(), equalTo(terms1.size())); TermsEnum iter0 = terms0.iterator(); TermsEnum iter1 = terms1.iterator(); for (int i = 0; i < terms0.size(); i++) { BytesRef next0 = iter0.next(); assertThat(next0, notNullValue()); BytesRef next1 = iter1.next(); assertThat(next1, notNullValue()); // compare field value String string0 = next0.utf8ToString(); String string1 = next1.utf8ToString(); assertThat("expected: " + string0, string0, equalTo(string1)); // compare df and ttf assertThat("term: " + string0, iter0.docFreq(), equalTo(iter1.docFreq())); assertThat("term: " + string0, iter0.totalTermFreq(), equalTo(iter1.totalTermFreq())); // compare freq and docs PostingsEnum docsAndPositions0 = iter0.postings(null, PostingsEnum.ALL); PostingsEnum docsAndPositions1 = iter1.postings(null, PostingsEnum.ALL); assertThat("term: " + string0, docsAndPositions0.nextDoc(), equalTo(docsAndPositions1.nextDoc())); assertThat("term: " + string0, docsAndPositions0.freq(), equalTo(docsAndPositions1.freq())); // compare position, start offsets and end offsets for (int j = 0; j < docsAndPositions0.freq(); j++) { assertThat("term: " + string0, docsAndPositions0.nextPosition(), equalTo(docsAndPositions1.nextPosition())); assertThat("term: " + string0, docsAndPositions0.startOffset(), equalTo(docsAndPositions1.startOffset())); assertThat("term: " + string0, docsAndPositions0.endOffset(), equalTo(docsAndPositions1.endOffset())); } } assertThat(iter0.next(), nullValue()); assertThat(iter1.next(), nullValue()); } public void testSimpleWildCards() throws IOException { int numFields = 25; XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties"); XContentBuilder source = jsonBuilder().startObject(); for (int i = 0; i < numFields; i++) { mapping.startObject("field" + i) .field("type", "string") .field("term_vector", randomBoolean() ? "yes" : "no") .endObject(); source.field("field" + i, "some text here"); } source.endObject(); mapping.endObject().endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); ensureGreen(); client().prepareIndex("test", "type1", "0").setSource(source).get(); refresh(); TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setSelectedFields("field*").get(); assertThat("Doc doesn't exists but should", response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat("All term vectors should have been generated", response.getFields().size(), equalTo(numFields)); } public void testArtificialVsExisting() throws ExecutionException, InterruptedException, IOException { // setup indices Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "standard"); assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets")); ensureGreen(); // index documents existing document String[] content = new String[]{ "Generating a random permutation of a sequence (such as when shuffling cards).", "Selecting a random sample of a population (important in statistical sampling).", "Allocating experimental units via random assignment to a treatment or control condition.", "Generating random numbers: see Random number generation."}; List<IndexRequestBuilder> indexBuilders = new ArrayList<>(); for (int i = 0; i < content.length; i++) { indexBuilders.add(client().prepareIndex() .setIndex("test") .setType("type1") .setId(String.valueOf(i)) .setSource("field1", content[i])); } indexRandom(true, indexBuilders); for (int i = 0; i < content.length; i++) { // request tvs from existing document TermVectorsResponse respExisting = client().prepareTermVectors("test", "type1", String.valueOf(i)) .setOffsets(true) .setPositions(true) .setFieldStatistics(true) .setTermStatistics(true) .get(); assertThat("doc with index: test, type1 and id: existing", respExisting.isExists(), equalTo(true)); // request tvs from artificial document TermVectorsResponse respArtificial = client().prepareTermVectors() .setIndex("test") .setType("type1") .setRouting(String.valueOf(i)) // ensure we get the stats from the same shard as existing doc .setDoc(jsonBuilder() .startObject() .field("field1", content[i]) .endObject()) .setOffsets(true) .setPositions(true) .setFieldStatistics(true) .setTermStatistics(true) .get(); assertThat("doc with index: test, type1 and id: " + String.valueOf(i), respArtificial.isExists(), equalTo(true)); // compare existing tvs with artificial compareTermVectors("field1", respExisting.getFields(), respArtificial.getFields()); } } public void testArtificialNoDoc() throws IOException { // setup indices Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "standard"); assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "field1", "type=string")); ensureGreen(); // request tvs from artificial document String text = "the quick brown fox jumps over the lazy dog"; TermVectorsResponse resp = client().prepareTermVectors() .setIndex("test") .setType("type1") .setDoc(jsonBuilder() .startObject() .field("field1", text) .endObject()) .setOffsets(true) .setPositions(true) .setFieldStatistics(true) .setTermStatistics(true) .get(); assertThat(resp.isExists(), equalTo(true)); checkBrownFoxTermVector(resp.getFields(), "field1", false); } public void testArtificialNonExistingField() throws Exception { // setup indices Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "standard"); assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "field1", "type=string")); ensureGreen(); // index just one doc List<IndexRequestBuilder> indexBuilders = new ArrayList<>(); indexBuilders.add(client().prepareIndex() .setIndex("test") .setType("type1") .setId("1") .setRouting("1") .setSource("field1", "some text")); indexRandom(true, indexBuilders); // request tvs from artificial document XContentBuilder doc = jsonBuilder() .startObject() .field("field1", "the quick brown fox jumps over the lazy dog") .field("non_existing", "the quick brown fox jumps over the lazy dog") .endObject(); for (int i = 0; i < 2; i++) { TermVectorsResponse resp = client().prepareTermVectors() .setIndex("test") .setType("type1") .setDoc(doc) .setRouting("" + i) .setOffsets(true) .setPositions(true) .setFieldStatistics(true) .setTermStatistics(true) .get(); assertThat(resp.isExists(), equalTo(true)); checkBrownFoxTermVector(resp.getFields(), "field1", false); // we should have created a mapping for this field assertMappingOnMaster("test", "type1", "non_existing"); // and return the generated term vectors checkBrownFoxTermVector(resp.getFields(), "non_existing", false); } } public void testPerFieldAnalyzer() throws IOException { int numFields = 25; // setup mapping and document source Set<String> withTermVectors = new HashSet<>(); XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties"); XContentBuilder source = jsonBuilder().startObject(); for (int i = 0; i < numFields; i++) { String fieldName = "field" + i; if (randomBoolean()) { withTermVectors.add(fieldName); } mapping.startObject(fieldName) .field("type", "string") .field("term_vector", withTermVectors.contains(fieldName) ? "yes" : "no") .endObject(); source.field(fieldName, "some text here"); } source.endObject(); mapping.endObject().endObject().endObject(); // setup indices with mapping Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "standard"); assertAcked(prepareCreate("test") .addAlias(new Alias("alias")) .setSettings(settings) .addMapping("type1", mapping)); ensureGreen(); // index a single document with prepared source client().prepareIndex("test", "type1", "0").setSource(source).get(); refresh(); // create random per_field_analyzer and selected fields Map<String, String> perFieldAnalyzer = new HashMap<>(); Set<String> selectedFields = new HashSet<>(); for (int i = 0; i < numFields; i++) { if (randomBoolean()) { perFieldAnalyzer.put("field" + i, "keyword"); } if (randomBoolean()) { perFieldAnalyzer.put("non_existing" + i, "keyword"); } if (randomBoolean()) { selectedFields.add("field" + i); } if (randomBoolean()) { selectedFields.add("non_existing" + i); } } // selected fields not specified TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0") .setPerFieldAnalyzer(perFieldAnalyzer) .get(); // should return all fields that have terms vectors, some with overridden analyzer checkAnalyzedFields(response.getFields(), withTermVectors, perFieldAnalyzer); // selected fields specified including some not in the mapping response = client().prepareTermVectors(indexOrAlias(), "type1", "0") .setSelectedFields(selectedFields.toArray(Strings.EMPTY_ARRAY)) .setPerFieldAnalyzer(perFieldAnalyzer) .get(); // should return only the specified valid fields, with some with overridden analyzer checkAnalyzedFields(response.getFields(), selectedFields, perFieldAnalyzer); } private void checkAnalyzedFields(Fields fieldsObject, Set<String> fieldNames, Map<String, String> perFieldAnalyzer) throws IOException { Set<String> validFields = new HashSet<>(); for (String fieldName : fieldNames){ if (fieldName.startsWith("non_existing")) { assertThat("Non existing field\"" + fieldName + "\" should not be returned!", fieldsObject.terms(fieldName), nullValue()); continue; } Terms terms = fieldsObject.terms(fieldName); assertThat("Existing field " + fieldName + "should have been returned", terms, notNullValue()); // check overridden by keyword analyzer ... if (perFieldAnalyzer.containsKey(fieldName)) { TermsEnum iterator = terms.iterator(); assertThat("Analyzer for " + fieldName + " should have been overridden!", iterator.next().utf8ToString(), equalTo("some text here")); assertThat(iterator.next(), nullValue()); } validFields.add(fieldName); } // ensure no other fields are returned assertThat("More fields than expected are returned!", fieldsObject.size(), equalTo(validFields.size())); } private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } public void testDfs() throws ExecutionException, InterruptedException, IOException { logger.info("Setting up the index ..."); Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "standard") .put("index.number_of_shards", randomIntBetween(2, 10)); // we need at least 2 shards assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "text", "type=string")); ensureGreen(); int numDocs = scaledRandomIntBetween(25, 100); logger.info("Indexing {} documents...", numDocs); List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { builders.add(client().prepareIndex("test", "type1", i + "").setSource("text", "cat")); } indexRandom(true, builders); XContentBuilder expectedStats = jsonBuilder() .startObject() .startObject("text") .startObject("field_statistics") .field("sum_doc_freq", numDocs) .field("doc_count", numDocs) .field("sum_ttf", numDocs) .endObject() .startObject("terms") .startObject("cat") .field("doc_freq", numDocs) .field("ttf", numDocs) .endObject() .endObject() .endObject() .endObject(); logger.info("Without dfs 'cat' should appear strictly less than {} times.", numDocs); TermVectorsResponse response = client().prepareTermVectors("test", "type1", randomIntBetween(0, numDocs - 1) + "") .setSelectedFields("text") .setFieldStatistics(true) .setTermStatistics(true) .get(); checkStats(response.getFields(), expectedStats, false); logger.info("With dfs 'cat' should appear exactly {} times.", numDocs); response = client().prepareTermVectors("test", "type1", randomIntBetween(0, numDocs - 1) + "") .setSelectedFields("text") .setFieldStatistics(true) .setTermStatistics(true) .setDfs(true) .get(); checkStats(response.getFields(), expectedStats, true); } private void checkStats(Fields fields, XContentBuilder xContentBuilder, boolean isEqual) throws IOException { Map<String, Object> stats = JsonXContent.jsonXContent.createParser(xContentBuilder.bytes()).map(); assertThat("number of fields expected:", fields.size(), equalTo(stats.size())); for (String fieldName : fields) { logger.info("Checking field statistics for field: {}", fieldName); Terms terms = fields.terms(fieldName); Map<String, Integer> fieldStatistics = getFieldStatistics(stats, fieldName); String msg = "field: " + fieldName + " "; assertThat(msg + "sum_doc_freq:", (int) terms.getSumDocFreq(), equalOrLessThanTo(fieldStatistics.get("sum_doc_freq"), isEqual)); assertThat(msg + "doc_count:", terms.getDocCount(), equalOrLessThanTo(fieldStatistics.get("doc_count"), isEqual)); assertThat(msg + "sum_ttf:", (int) terms.getSumTotalTermFreq(), equalOrLessThanTo(fieldStatistics.get("sum_ttf"), isEqual)); final TermsEnum termsEnum = terms.iterator(); BytesRef text; while((text = termsEnum.next()) != null) { String term = text.utf8ToString(); logger.info("Checking term statistics for term: ({}, {})", fieldName, term); Map<String, Integer> termStatistics = getTermStatistics(stats, fieldName, term); msg = "term: (" + fieldName + "," + term + ") "; assertThat(msg + "doc_freq:", termsEnum.docFreq(), equalOrLessThanTo(termStatistics.get("doc_freq"), isEqual)); assertThat(msg + "ttf:", (int) termsEnum.totalTermFreq(), equalOrLessThanTo(termStatistics.get("ttf"), isEqual)); } } } private Map<String, Integer> getFieldStatistics(Map<String, Object> stats, String fieldName) throws IOException { return (Map<String, Integer>) ((Map<String, Object>) stats.get(fieldName)).get("field_statistics"); } private Map<String, Integer> getTermStatistics(Map<String, Object> stats, String fieldName, String term) { return (Map<String, Integer>) ((Map<String, Object>) ((Map<String, Object>) stats.get(fieldName)).get("terms")).get(term); } private Matcher<Integer> equalOrLessThanTo(Integer value, boolean isEqual) { if (isEqual) { return equalTo(value); } return lessThan(value); } public void testTermVectorsWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); ensureGreen(); TermVectorsResponse response = client().prepareTermVectors("test", "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); // From translog: // version 0 means ignore version, which is the default response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); fail(); } catch (VersionConflictEngineException e) { //all good } // From Lucene index: refresh(); // version 0 means ignore version, which is the default response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1l)); response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { //all good } logger.info("--> index doc 1 again, so increasing the version"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); // From translog: // version 0 means ignore version, which is the default response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); fail(); } catch (VersionConflictEngineException e) { //all good } response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); // From Lucene index: refresh(); // version 0 means ignore version, which is the default response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { //all good } response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); } public void testFilterLength() throws ExecutionException, InterruptedException, IOException { logger.info("Setting up the index ..."); Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "keyword"); assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "tags", "type=string")); ensureYellow(); int numTerms = scaledRandomIntBetween(10, 50); logger.info("Indexing one document with tags of increasing length ..."); List<String> tags = new ArrayList<>(); for (int i = 0; i < numTerms; i++) { String tag = "a"; for (int j = 0; j < i; j++) { tag += "a"; } tags.add(tag); } indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("tags", tags)); logger.info("Checking best tags by longest to shortest size ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); filterSettings.maxNumTerms = numTerms; TermVectorsResponse response; for (int i = 0; i < numTerms; i++) { filterSettings.minWordLength = numTerms - i; response = client().prepareTermVectors("test", "type1", "1") .setSelectedFields("tags") .setFieldStatistics(true) .setTermStatistics(true) .setFilterSettings(filterSettings) .get(); checkBestTerms(response.getFields().terms("tags"), tags.subList((numTerms - i - 1), numTerms)); } } public void testFilterTermFreq() throws ExecutionException, InterruptedException, IOException { logger.info("Setting up the index ..."); Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "keyword"); assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "tags", "type=string")); ensureYellow(); logger.info("Indexing one document with tags of increasing frequencies ..."); int numTerms = scaledRandomIntBetween(10, 50); List<String> tags = new ArrayList<>(); List<String> uniqueTags = new ArrayList<>(); String tag; for (int i = 0; i < numTerms; i++) { tag = "tag_" + i; tags.add(tag); for (int j = 0; j < i; j++) { tags.add(tag); } uniqueTags.add(tag); } indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("tags", tags)); logger.info("Checking best tags by highest to lowest term freq ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); TermVectorsResponse response; for (int i = 0; i < numTerms; i++) { filterSettings.maxNumTerms = i + 1; response = client().prepareTermVectors("test", "type1", "1") .setSelectedFields("tags") .setFieldStatistics(true) .setTermStatistics(true) .setFilterSettings(filterSettings) .get(); checkBestTerms(response.getFields().terms("tags"), uniqueTags.subList((numTerms - i - 1), numTerms)); } } public void testFilterDocFreq() throws ExecutionException, InterruptedException, IOException { logger.info("Setting up the index ..."); Settings.Builder settings = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer", "keyword") .put("index.number_of_shards", 1); // no dfs assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "tags", "type=string")); ensureYellow(); int numDocs = scaledRandomIntBetween(10, 50); // as many terms as there are docs logger.info("Indexing {} documents with tags of increasing dfs ...", numDocs); List<IndexRequestBuilder> builders = new ArrayList<>(); List<String> tags = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { tags.add("tag_" + i); builders.add(client().prepareIndex("test", "type1", i + "").setSource("tags", tags)); } indexRandom(true, builders); logger.info("Checking best terms by highest to lowest idf ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); TermVectorsResponse response; for (int i = 0; i < numDocs; i++) { filterSettings.maxNumTerms = i + 1; response = client().prepareTermVectors("test", "type1", (numDocs - 1) + "") .setSelectedFields("tags") .setFieldStatistics(true) .setTermStatistics(true) .setFilterSettings(filterSettings) .get(); checkBestTerms(response.getFields().terms("tags"), tags.subList((numDocs - i - 1), numDocs)); } } private void checkBestTerms(Terms terms, List<String> expectedTerms) throws IOException { final TermsEnum termsEnum = terms.iterator(); List<String> bestTerms = new ArrayList<>(); BytesRef text; while((text = termsEnum.next()) != null) { bestTerms.add(text.utf8ToString()); } Collections.sort(expectedTerms); Collections.sort(bestTerms); assertArrayEquals(expectedTerms.toArray(), bestTerms.toArray()); } }
package net.mediavrog.irr; import android.content.Context; import android.content.SharedPreferences; import net.mediavrog.irr.IrrLayout.OnUserDecisionListener; import net.mediavrog.ruli.Rule; import net.mediavrog.ruli.RuleEngine; import net.mediavrog.ruli.RuleSet; import net.mediavrog.ruli.SimpleRule; import net.mediavrog.ruli.Value; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Locale; import static net.mediavrog.ruli.SimpleRule.Comparator.EQ; import static net.mediavrog.ruli.SimpleRule.Comparator.GT_EQ; import static net.mediavrog.ruli.SimpleRule.Comparator.LT; import static net.mediavrog.ruli.SimpleRule.Comparator.LT_EQ; /** * Created by maikvlcek on 1/27/16. */ public class DefaultRuleEngine extends RuleEngine { public static final String TAG = DefaultRuleEngine.class.getSimpleName(); /** * Start nudging the user after this amount of app starts (== limit to engaged users) */ public static final int DEFAULT_APP_START_COUNT = 10; /** * Start nudging the user after this amount of days (== engaged user in combination with app starts) */ public static final int DEFAULT_DISTINCT_DAYS = 3; /** * Postpone next nudge by this amount of days. */ public static final int DEFAULT_POSTPONE_DAYS = 6; /** * Stop nudging after with amount of dismissals. At one point you gotta give up ^^. */ public static final int DEFAULT_MAX_DISMISS_COUNT = 3; private static final String PREF_FILE_NAME_SUFFIX = ".irr_default_rule_engine"; public static final String PREF_KEY_DID_RATE = "didRate"; public static final String PREF_KEY_APP_STARTS = "appStarts"; public static final String PREF_KEY_LAST_APP_START = "lastAppStart"; public static final String PREF_KEY_DAYS_USED = "daysUsedApp"; public static final String PREF_KEY_DISMISSAL_COUNT = "dismissCount"; public static final String PREF_KEY_LAST_DISMISSED_AT = "lastDismissedAt"; public static final String DEFAULT_DATE_FORMAT = "yyyy-MM-dd"; public static class DefaultOnUserDecisionListener implements OnUserDecisionListener { @Override public void onAccept(Context ctx, IrrLayout.State s) { switch (s) { case RATE: trackRated(ctx); break; case FEEDBACK: trackFeedback(ctx); break; } } @Override public void onDismiss(Context ctx, IrrLayout.State s) { switch (s) { // we don't track the first no thanks as dismissal, only from the last steps case RATE: case FEEDBACK: trackDismissal(ctx); break; } } } private static SharedPreferences sPrefs; private DefaultOnUserDecisionListener mListener; private Context mContext; public static DefaultRuleEngine newInstance(final Context ctx, int appStartCount, int distinctDays, final int postponeDays, int maxDismissCount) { PreferenceValue.PreferenceProvider pp = new PreferenceValue.PreferenceProvider() { SharedPreferences prefs; @Override public SharedPreferences getPreferences() { if (prefs == null) prefs = DefaultRuleEngine.getPreferences(ctx); return prefs; } }; RuleSet rule = new RuleSet.Builder() .addRule(new SimpleRule<>(PreferenceValue.b(pp, PREF_KEY_DID_RATE), EQ, false)) .addRule(new SimpleRule<>(PreferenceValue.i(pp, PREF_KEY_APP_STARTS), GT_EQ, appStartCount)) .addRule(new SimpleRule<>(PreferenceValue.i(pp, PREF_KEY_DAYS_USED), GT_EQ, distinctDays)) .addRule(new SimpleRule<>(PreferenceValue.i(pp, PREF_KEY_DISMISSAL_COUNT), LT, maxDismissCount)) .addRule(new SimpleRule<>(PreferenceValue.s(pp, PREF_KEY_LAST_DISMISSED_AT), LT_EQ, new Value<String>() { @Override public String get() { // compare to postpone days before today; current value should be smaller than that Calendar c = Calendar.getInstance(); c.add(Calendar.DATE, -1 * postponeDays); return new SimpleDateFormat(DEFAULT_DATE_FORMAT, Locale.getDefault()).format(c.getTime()); } })).build(); ArrayList<Rule> rules = new ArrayList<>(); rules.add(rule); return new DefaultRuleEngine(ctx, rules); } public DefaultRuleEngine(Context ctx, List<Rule> rules) { super(rules); mContext = ctx; } public void setListener(DefaultOnUserDecisionListener l) { mListener = l; } public DefaultOnUserDecisionListener getListener() { if (mListener == null) mListener = new DefaultOnUserDecisionListener(); return mListener; } @Override public String toString(boolean evaluate) { StringBuilder s = new StringBuilder(); // meta info s.append("DefaultRuleEngine").append("\n"); // dump rules s.append(super.toString(evaluate)); return s.toString(); } public void reset() { reset(mContext); } public static void reset(Context ctx) { getPreferences(ctx).edit().clear().apply(); } public void trackAppStart() { trackAppStart(mContext); } public static void trackAppStart(Context ctx) { SharedPreferences s = getPreferences(ctx); int appStarts = s.getInt(PREF_KEY_APP_STARTS, 0) + 1; int daysUsed = s.getInt(PREF_KEY_DAYS_USED, 1); String today = new SimpleDateFormat(DEFAULT_DATE_FORMAT, Locale.getDefault()).format(new Date()); String lastAppStart = s.getString(PREF_KEY_LAST_APP_START, today); if (!lastAppStart.equals(today)) daysUsed++; s.edit() .putInt(PREF_KEY_APP_STARTS, appStarts) .putString(PREF_KEY_LAST_APP_START, today) .putInt(PREF_KEY_DAYS_USED, daysUsed) .apply(); } public void trackDismissal() { trackDismissal(mContext); } public static void trackDismissal(Context ctx) { SharedPreferences s = getPreferences(ctx); int dismissalCount = s.getInt(PREF_KEY_DISMISSAL_COUNT, 0) + 1; String today = new SimpleDateFormat(DEFAULT_DATE_FORMAT, Locale.getDefault()).format(new Date()); s.edit() .putInt(PREF_KEY_DISMISSAL_COUNT, dismissalCount) .putString(PREF_KEY_LAST_DISMISSED_AT, today) .apply(); } public void trackRated() { trackRated(mContext); } public static void trackRated(Context ctx) { SharedPreferences s = getPreferences(ctx); s.edit() .putBoolean(PREF_KEY_DID_RATE, true) .apply(); } public void trackFeedback() { trackFeedback(mContext); } public static void trackFeedback(Context ctx) { trackDismissal(ctx); } public static SharedPreferences getPreferences(Context ctx) { if (sPrefs == null) sPrefs = ctx.getSharedPreferences(getPrefFileName(ctx), Context.MODE_PRIVATE); return sPrefs; } public static String getPrefFileName(Context ctx) { return ctx.getPackageName() + PREF_FILE_NAME_SUFFIX; } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.python; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import com.facebook.buck.cxx.CxxGenrule; import com.facebook.buck.cxx.CxxGenruleBuilder; import com.facebook.buck.cxx.CxxPlatformUtils; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.rules.AbstractNodeBuilder; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.DefaultBuildTargetSourcePath; import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.FakeSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.TargetGraphAndBuildTargets; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.rules.coercer.VersionMatchedCollection; import com.facebook.buck.shell.GenruleBuilder; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.testutil.TargetGraphFactory; import com.facebook.buck.util.RichStream; import com.facebook.buck.versions.FixedVersionSelector; import com.facebook.buck.versions.Version; import com.facebook.buck.versions.VersionedAliasBuilder; import com.facebook.buck.versions.VersionedTargetGraphBuilder; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import org.hamcrest.Matchers; import org.junit.Test; import java.nio.file.Paths; import java.util.concurrent.ForkJoinPool; import java.util.regex.Pattern; public class PythonLibraryDescriptionTest { @Test public void baseModule() throws Exception { ProjectFilesystem filesystem = new FakeProjectFilesystem(); BuildTarget target = BuildTargetFactory.newInstance("//foo:lib"); String sourceName = "main.py"; SourcePath source = new FakeSourcePath("foo/" + sourceName); // Run without a base module set and verify it defaults to using the build target // base name. PythonLibraryBuilder normalBuilder = new PythonLibraryBuilder(target) .setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(source))); TargetGraph normalTargetGraph = TargetGraphFactory.newInstance(normalBuilder.build()); PythonLibrary normal = normalBuilder.build( new BuildRuleResolver( normalTargetGraph, new DefaultTargetNodeToBuildRuleTransformer()), filesystem, normalTargetGraph); assertEquals( ImmutableMap.of( target.getBasePath().resolve(sourceName), source), normal .getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM) .getModules()); // Run *with* a base module set and verify it gets used to build the main module path. String baseModule = "blah"; PythonLibraryBuilder withBaseModuleBuilder = new PythonLibraryBuilder(target) .setSrcs(SourceList.ofUnnamedSources(ImmutableSortedSet.of(source))) .setBaseModule(baseModule); TargetGraph withBaseModuleTargetGraph = TargetGraphFactory.newInstance(withBaseModuleBuilder.build()); PythonLibrary withBaseModule = withBaseModuleBuilder.build( new BuildRuleResolver( withBaseModuleTargetGraph, new DefaultTargetNodeToBuildRuleTransformer()), filesystem, withBaseModuleTargetGraph); assertEquals( ImmutableMap.of( Paths.get(baseModule).resolve(sourceName), source), withBaseModule .getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM) .getModules()); } @Test public void platformSrcs() throws Exception { ProjectFilesystem filesystem = new FakeProjectFilesystem(); BuildTarget target = BuildTargetFactory.newInstance("//foo:lib"); SourcePath matchedSource = new FakeSourcePath("foo/a.py"); SourcePath unmatchedSource = new FakeSourcePath("foo/b.py"); PythonLibraryBuilder builder = new PythonLibraryBuilder(target) .setPlatformSrcs( PatternMatchedCollection.<SourceList>builder() .add( Pattern.compile(PythonTestUtils.PYTHON_PLATFORM.getFlavor().toString()), SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource))) .add( Pattern.compile("won't match anything"), SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource))) .build()); TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build()); PythonLibrary library = builder.build( new BuildRuleResolver( targetGraph, new DefaultTargetNodeToBuildRuleTransformer()), filesystem, targetGraph); assertThat( library .getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM) .getModules() .values(), Matchers.contains(matchedSource)); } @Test public void platformResources() throws Exception { ProjectFilesystem filesystem = new FakeProjectFilesystem(); BuildTarget target = BuildTargetFactory.newInstance("//foo:lib"); SourcePath matchedSource = new FakeSourcePath("foo/a.dat"); SourcePath unmatchedSource = new FakeSourcePath("foo/b.dat"); PythonLibraryBuilder builder = new PythonLibraryBuilder(target) .setPlatformResources( PatternMatchedCollection.<SourceList>builder() .add( Pattern.compile(PythonTestUtils.PYTHON_PLATFORM.getFlavor().toString()), SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource))) .add( Pattern.compile("won't match anything"), SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource))) .build()); TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build()); PythonLibrary library = builder.build( new BuildRuleResolver( targetGraph, new DefaultTargetNodeToBuildRuleTransformer()), filesystem, targetGraph); assertThat( library .getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM) .getResources() .values(), Matchers.contains(matchedSource)); } @Test public void versionedSrcs() throws Exception { BuildTarget target = BuildTargetFactory.newInstance("//foo:lib"); SourcePath matchedSource = new FakeSourcePath("foo/a.py"); SourcePath unmatchedSource = new FakeSourcePath("foo/b.py"); GenruleBuilder transitiveDepBuilder = GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:tdep")) .setOut("out"); VersionedAliasBuilder depBuilder = new VersionedAliasBuilder(BuildTargetFactory.newInstance("//:dep")) .setVersions( ImmutableMap.of( Version.of("1.0"), transitiveDepBuilder.getTarget(), Version.of("2.0"), transitiveDepBuilder.getTarget())); AbstractNodeBuilder<?, ?, ?> builder = new PythonLibraryBuilder(target) .setVersionedSrcs( VersionMatchedCollection.<SourceList>builder() .add( ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")), SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource))) .add( ImmutableMap.of(depBuilder.getTarget(), Version.of("2.0")), SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource))) .build()); TargetGraph targetGraph = VersionedTargetGraphBuilder.transform( new FixedVersionSelector( ImmutableMap.of( builder.getTarget(), ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")))), TargetGraphAndBuildTargets.of( TargetGraphFactory.newInstance( transitiveDepBuilder.build(), depBuilder.build(), builder.build()), ImmutableSet.of(builder.getTarget())), new ForkJoinPool()) .getTargetGraph(); BuildRuleResolver resolver = new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer()); PythonLibrary library = (PythonLibrary) resolver.requireRule(builder.getTarget()); assertThat( library .getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM) .getModules() .values(), Matchers.contains(matchedSource)); } @Test public void versionedResources() throws Exception { BuildTarget target = BuildTargetFactory.newInstance("//foo:lib"); SourcePath matchedSource = new FakeSourcePath("foo/a.py"); SourcePath unmatchedSource = new FakeSourcePath("foo/b.py"); GenruleBuilder transitiveDepBuilder = GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:tdep")) .setOut("out"); VersionedAliasBuilder depBuilder = new VersionedAliasBuilder(BuildTargetFactory.newInstance("//:dep")) .setVersions( ImmutableMap.of( Version.of("1.0"), transitiveDepBuilder.getTarget(), Version.of("2.0"), transitiveDepBuilder.getTarget())); AbstractNodeBuilder<?, ?, ?> builder = new PythonLibraryBuilder(target) .setVersionedResources( VersionMatchedCollection.<SourceList>builder() .add( ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")), SourceList.ofUnnamedSources(ImmutableSortedSet.of(matchedSource))) .add( ImmutableMap.of(depBuilder.getTarget(), Version.of("2.0")), SourceList.ofUnnamedSources(ImmutableSortedSet.of(unmatchedSource))) .build()); TargetGraph targetGraph = VersionedTargetGraphBuilder.transform( new FixedVersionSelector( ImmutableMap.of( builder.getTarget(), ImmutableMap.of(depBuilder.getTarget(), Version.of("1.0")))), TargetGraphAndBuildTargets.of( TargetGraphFactory.newInstance( transitiveDepBuilder.build(), depBuilder.build(), builder.build()), ImmutableSet.of(builder.getTarget())), new ForkJoinPool()) .getTargetGraph(); BuildRuleResolver resolver = new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer()); PythonLibrary library = (PythonLibrary) resolver.requireRule(builder.getTarget()); assertThat( library .getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM) .getResources() .values(), Matchers.contains(matchedSource)); } @Test public void cxxGenruleSrcs() throws Exception { CxxGenruleBuilder srcBuilder = new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:src")) .setOut("out.py"); PythonLibraryBuilder libraryBuilder = new PythonLibraryBuilder(BuildTargetFactory.newInstance("//:lib")) .setSrcs( SourceList.ofUnnamedSources( ImmutableSortedSet.of( new DefaultBuildTargetSourcePath(srcBuilder.getTarget())))); TargetGraph targetGraph = TargetGraphFactory.newInstance(srcBuilder.build(), libraryBuilder.build()); BuildRuleResolver resolver = new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer()); CxxGenrule src = (CxxGenrule) resolver.requireRule(srcBuilder.getTarget()); PythonLibrary library = (PythonLibrary) resolver.requireRule(libraryBuilder.getTarget()); PythonPackageComponents components = library.getPythonPackageComponents( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM); assertThat( components.getModules().values(), Matchers.contains(src.getGenrule(CxxPlatformUtils.DEFAULT_PLATFORM))); } @Test public void platformDeps() throws Exception { PythonLibraryBuilder libraryABuilder = PythonLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:libA")); PythonLibraryBuilder libraryBBuilder = PythonLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:libB")); PythonLibraryBuilder ruleBuilder = PythonLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:rule")) .setPlatformDeps( PatternMatchedCollection.<ImmutableSortedSet<BuildTarget>>builder() .add( Pattern.compile( CxxPlatformUtils.DEFAULT_PLATFORM.getFlavor().toString(), Pattern.LITERAL), ImmutableSortedSet.of(libraryABuilder.getTarget())) .add( Pattern.compile("matches nothing", Pattern.LITERAL), ImmutableSortedSet.of(libraryBBuilder.getTarget())) .build()); TargetGraph targetGraph = TargetGraphFactory.newInstance( libraryABuilder.build(), libraryBBuilder.build(), ruleBuilder.build()); BuildRuleResolver resolver = new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer()); PythonLibrary rule = (PythonLibrary) resolver.requireRule(ruleBuilder.getTarget()); assertThat( RichStream .from( rule.getPythonPackageDeps( PythonTestUtils.PYTHON_PLATFORM, CxxPlatformUtils.DEFAULT_PLATFORM)) .map(BuildRule::getBuildTarget) .toImmutableSet(), Matchers.allOf( Matchers.hasItem(libraryABuilder.getTarget()), Matchers.not(Matchers.hasItem(libraryBBuilder.getTarget())))); } }
package com.ait.lienzo.shared.core.types; import static com.ait.lienzo.shared.core.types.Color.fromColorString; import static com.ait.lienzo.shared.core.types.ColorName.ANTIQUEWHITE; import static com.ait.lienzo.shared.core.types.ColorName.BEIGE; import static com.ait.lienzo.shared.core.types.ColorName.BISQUE; import static com.ait.lienzo.shared.core.types.ColorName.BLACK; import static com.ait.lienzo.shared.core.types.ColorName.BLUE; import static com.ait.lienzo.shared.core.types.ColorName.BLUEVIOLET; import static com.ait.lienzo.shared.core.types.ColorName.BROWN; import static com.ait.lienzo.shared.core.types.ColorName.CHARTREUSE; import static com.ait.lienzo.shared.core.types.ColorName.LIME; import static com.ait.lienzo.shared.core.types.ColorName.PALEVIOLETRED; import static com.ait.lienzo.shared.core.types.ColorName.RED; import static com.ait.lienzo.shared.core.types.ColorName.SALMON; import static com.ait.lienzo.shared.core.types.ColorName.SIENNA; import static com.ait.lienzo.shared.core.types.ColorName.WHITE; import static org.junit.Assert.*; import org.junit.Test; public class ColorTest { @Test public void testHex2RGB() { Color white = Color.hex2RGB("#fFFffF"); assertEquals(WHITE.getColor(), white); Color black = Color.hex2RGB("#000000"); assertEquals(BLACK.getColor(), black); Color maximumRed = Color.hex2RGB("#ff0000"); assertEquals(RED.getColor(), maximumRed); Color maximumGreen = Color.hex2RGB("#00FF00"); assertEquals(LIME.getColor(), maximumGreen); maximumGreen = Color.hex2RGB("#0f0"); assertEquals(LIME.getColor(), maximumGreen); Color maximumBlue = Color.hex2RGB("#0000fF"); assertEquals(BLUE.getColor(), maximumBlue); maximumBlue = Color.hex2RGB("#00f"); assertEquals(BLUE.getColor(), maximumBlue); assertEquals(BEIGE.getColor(), Color.hex2RGB(BEIGE.getHexColor())); assertNull(Color.hex2RGB("#XYZ")); } @Test public void testGetRGB() { Color color = ANTIQUEWHITE.getColor(); final String ANTIQUEWHITE_RGB = String.format("rgb(%s,%s,%s)", ANTIQUEWHITE.getR(), ANTIQUEWHITE.getG(), ANTIQUEWHITE.getB()); assertEquals(ANTIQUEWHITE_RGB, color.getRGB()); } @Test public void testGetRGBA() { Color color = BISQUE.getColor(); final String BISQUE_RGBA = String.format("rgba(%s,%s,%s,%s)", BISQUE.getR(), BISQUE.getG(), BISQUE.getB(), BISQUE.getA()); assertEquals(BISQUE_RGBA, color.getRGBA()); } @Test public void testGetRandomHexColor() { for(int i = 0; i < 1000; i++) { String hex = Color.getRandomHexColor(); assertEquals(7, hex.length()); assertTrue(hex.startsWith("#")); assertFalse(hex.contains("-")); String hexWithoutPound = hex.substring(1, hex.length()); Integer.parseInt(hexWithoutPound, 16); } } @Test public void testBrightness() { Color color = BROWN.getColor(); assertEquals("rgb(165,42,42)", color.getRGB()); assertEquals("rgb(216,93,93)", color.brightness(0.2).getRGB()); assertEquals("rgb(114,0,0)", color.brightness(-0.2).getRGB()); assertEquals(WHITE.getColor(), color.brightness(5)); assertEquals(BLACK.getColor(), color.brightness(-5)); } @Test public void testToBrowserRGB() { assertEquals(BISQUE.getColor().getRGB(), Color.toBrowserRGB(BISQUE.getR(), BISQUE.getG(), BISQUE.getB())); } @Test public void testFromColorString() { assertEquals(ColorName.TRANSPARENT.getColor(), fromColorString("transparent")); assertNull(fromColorString("#1234567890")); assertNull(fromColorString("#aWaaaa")); assertEquals(BISQUE.getColor(), fromColorString("#ffe4c4")); assertEquals(LIME.getColor(), fromColorString("#0f0")); assertEquals(ANTIQUEWHITE.getColor(), fromColorString("antiquewhite")); assertEquals(BEIGE.getColor(), fromColorString("rgb(245, 245, 220)")); assertEquals(SIENNA.getColor(), fromColorString("rgb(62.7%, 32.2%, 17.6%)")); assertEquals(BEIGE.getColor().setA(0.23), fromColorString("rgba(245, 245, 220, 0.23)")); assertEquals(SIENNA.getColor().setA(0.67), fromColorString("rgba(62.7%, 32.2%, 17.6%, 0.67)")); assertEquals(SIENNA.getColor().setA(0.99), fromColorString("rgba(62.7%, 32.2%, 17.6%, 99%)")); assertEquals(SIENNA.getColor(), fromColorString("hsl(19.3, 56.1%, 40.2%)")); String hslWithCalculatedPercent = String.format("hsl(%.6f%%, 56.1%%, 40.2%%)", 19.3/360.0*100); assertEquals(SIENNA.getColor(), fromColorString(hslWithCalculatedPercent)); assertEquals(SIENNA.getColor().setA(0.42), fromColorString("hsla(19.3, 56.1%, 40.2%, 0.42)")); assertNull(fromColorString("asdf7ya_!+_)_")); } @Test(expected = IllegalArgumentException.class) public void testFromColorStringWithIllegalArgumentException() { assertEquals(SIENNA.getColor(), fromColorString("hsl(19.3, 56.1, 40.2)")); } @Test public void testFromHSL() { assertEquals(SIENNA.getColor(), Color.fromHSL(19.3, 56.1, 40.2)); assertEquals(BLACK.getColor(), Color.fromHSL(0, 0, 0)); assertEquals(WHITE.getColor(), Color.fromHSL(0.0, 0.0, 100.0)); } @Test public void test() { Color color = SIENNA.getColor(); final String SIENNA_RGB = String.format("rgb(%s,%s,%s)", SIENNA.getR(), SIENNA.getG(), SIENNA.getB()); assertEquals(color.getColorString(), SIENNA_RGB); color = BEIGE.getColor().setA(0.2); final String BEIGE_RGBA = String.format("rgba(%s,%s,%s,0.2)", BEIGE.getR(), BEIGE.getG(), BEIGE.getB()); assertEquals(color.getColorString(), BEIGE_RGBA); } @Test public void testEquals() { Color color = BEIGE.getColor(); assertTrue(color.equals(color)); assertTrue(color.equals(BEIGE.getColor())); assertFalse(color.equals(null)); assertFalse(color.equals(SALMON.getColor())); } @Test public void testGetHSLFromRGB() { final String SIENNA_HSL = "hsl(19.3, 56.1%, 40.2%)"; Color sienna = fromColorString(SIENNA_HSL); assertEquals(SIENNA.getColor(), sienna); Color.HSL siennaHSLFromRGB = Color.getHSLFromRGB(sienna.getR(), sienna.getG(), sienna.getB()); assertEquals(SIENNA_HSL, siennaHSLFromRGB.toBrowserHSL()); final String PALEVIOLETRED_HSL = "hsl(340.4, 59.8%, 64.9%)"; Color palevioletred = fromColorString(PALEVIOLETRED_HSL); assertEquals(PALEVIOLETRED.getColor(), palevioletred); Color.HSL paleVioletRedHSLFromGRG = Color.getHSLFromRGB(palevioletred.getR(), palevioletred.getG(), palevioletred.getB()); assertEquals(PALEVIOLETRED_HSL, paleVioletRedHSLFromGRG.toBrowserHSL()); final String CHARTREUSE_HSL = "hsl(90.1, 100.0%, 50.0%)"; Color chartreuse = fromColorString(CHARTREUSE_HSL); assertEquals(CHARTREUSE.getColor(), chartreuse); Color.HSL chartreuseHSLFromGRG = Color.getHSLFromRGB(chartreuse.getR(), chartreuse.getG(), chartreuse.getB()); assertEquals(CHARTREUSE_HSL, chartreuseHSLFromGRG.toBrowserHSL()); final String BLUEVIOLET_HSL = "hsl(271.1, 75.9%, 52.7%)"; Color blueviolet = fromColorString(BLUEVIOLET_HSL); assertEquals(BLUEVIOLET.getColor(), blueviolet); Color.HSL bluevioletHSLFromRGB = Color.getHSLFromRGB(blueviolet.getR(), blueviolet.getG(), blueviolet.getB()); assertEquals(BLUEVIOLET_HSL, bluevioletHSLFromRGB.toBrowserHSL()); } @Test public void testFixRGB() { Color red = new Color(256, -1, 0); assertEquals(RED.getColor(), red); } @Test public void testFixAlpha() { Color lime = new Color(0, 10000, -1000, -0.1); assertEquals(LIME.getColor().setA(0), lime); Color blue = new Color(-256, 0, 10000000, 2); assertEquals(BLUE.getColor(), blue); } @Test public void testGetHSL() { assertEquals("hsl(271.1, 75.9%, 52.7%)", BLUEVIOLET.getColor().getHSL().toBrowserHSL()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.hydromatic.optiq.materialize; import net.hydromatic.avatica.ColumnMetaData; import net.hydromatic.linq4j.*; import net.hydromatic.linq4j.expressions.Expression; import net.hydromatic.linq4j.function.Function1; import net.hydromatic.linq4j.function.Functions; import net.hydromatic.optiq.*; import net.hydromatic.optiq.Table; import net.hydromatic.optiq.config.OptiqConnectionProperty; import net.hydromatic.optiq.impl.clone.CloneSchema; import net.hydromatic.optiq.impl.java.JavaTypeFactory; import net.hydromatic.optiq.jdbc.*; import net.hydromatic.optiq.prepare.Prepare; import net.hydromatic.optiq.runtime.Hook; import net.hydromatic.optiq.util.BitSets; import org.eigenbase.reltype.RelDataType; import org.eigenbase.reltype.RelDataTypeImpl; import org.eigenbase.util.Pair; import com.google.common.collect.*; import java.lang.reflect.Type; import java.util.*; /** * Manages the collection of materialized tables known to the system, * and the process by which they become valid and invalid. */ public class MaterializationService { private static final MaterializationService INSTANCE = new MaterializationService(); /** For testing. */ private static final ThreadLocal<MaterializationService> THREAD_INSTANCE = new ThreadLocal<MaterializationService>() { @Override protected MaterializationService initialValue() { return new MaterializationService(); } }; private final MaterializationActor actor = new MaterializationActor(); private MaterializationService() { } /** Defines a new materialization. Returns its key. */ public MaterializationKey defineMaterialization(final OptiqSchema schema, TileKey tileKey, String viewSql, List<String> viewSchemaPath, String tableName, boolean create) { final MaterializationActor.QueryKey queryKey = new MaterializationActor.QueryKey(viewSql, schema, viewSchemaPath); final MaterializationKey existingKey = actor.keyBySql.get(queryKey); if (existingKey != null) { return existingKey; } if (!create) { return null; } final OptiqConnection connection = MetaImpl.connect(schema.root(), null); final MaterializationKey key = new MaterializationKey(); Table materializedTable; RelDataType rowType = null; OptiqSchema.TableEntry tableEntry; if (tableName != null) { final Pair<String, Table> pair = schema.getTable(tableName, true); materializedTable = pair == null ? null : pair.right; if (materializedTable == null) { final ImmutableMap<OptiqConnectionProperty, String> map = ImmutableMap.of(OptiqConnectionProperty.CREATE_MATERIALIZATIONS, "false"); final OptiqPrepare.PrepareResult<Object> prepareResult = Schemas.prepare(connection, schema, viewSchemaPath, viewSql, map); rowType = prepareResult.rowType; final JavaTypeFactory typeFactory = connection.getTypeFactory(); materializedTable = CloneSchema.createCloneTable(typeFactory, RelDataTypeImpl.proto(prepareResult.rowType), Functions.adapt(prepareResult.structType.columns, new Function1<ColumnMetaData, ColumnMetaData.Rep>() { public ColumnMetaData.Rep apply(ColumnMetaData column) { return column.type.representation; } }), new AbstractQueryable<Object>() { public Enumerator<Object> enumerator() { final DataContext dataContext = Schemas.createDataContext(connection); return prepareResult.enumerator(dataContext); } public Type getElementType() { return Object.class; } public Expression getExpression() { throw new UnsupportedOperationException(); } public QueryProvider getProvider() { return connection; } public Iterator<Object> iterator() { final DataContext dataContext = Schemas.createDataContext(connection); return prepareResult.iterator(dataContext); } }); schema.add(tableName, materializedTable); } tableEntry = schema.add(tableName, materializedTable); Hook.CREATE_MATERIALIZATION.run(tableName); } else { tableEntry = null; } if (rowType == null) { // If we didn't validate the SQL by populating a table, validate it now. final OptiqPrepare.ParseResult parse = Schemas.parse(connection, schema, viewSchemaPath, viewSql); rowType = parse.rowType; } final MaterializationActor.Materialization materialization = new MaterializationActor.Materialization(key, schema.root(), tableEntry, viewSql, rowType); actor.keyMap.put(materialization.key, materialization); actor.keyBySql.put(queryKey, materialization.key); if (tileKey != null) { actor.tileKeys.add(tileKey); } return key; } /** Checks whether a materialization is valid, and if so, returns the table * where the data are stored. */ public OptiqSchema.TableEntry checkValid(MaterializationKey key) { final MaterializationActor.Materialization materialization = actor.keyMap.get(key); if (materialization != null) { return materialization.materializedTable; } return null; } /** * Defines a tile. * * <p>Setting the {@code create} flag to false prevents a materialization * from being created if one does not exist. Critically, it is set to false * during the recursive SQL that populates a materialization. Otherwise a * materialization would try to create itself to populate itself! */ public Pair<OptiqSchema.TableEntry, TileKey> defineTile(Lattice lattice, BitSet groupSet, List<Lattice.Measure> measureList, OptiqSchema schema, boolean create) { // FIXME This is all upside down. We are looking for a materialization // first. But we should define a tile first, then find out whether an // exact materialization exists, then find out whether an acceptable // approximate materialization exists, and if it does not, then maybe // create a materialization. // // The SQL should not be part of the key of the materialization. There are // better, more concise keys. And especially, check that we are not using // that SQL to populate the materialization. There may be finer-grained // materializations that we can roll up. (Maybe the SQL on the fact table // gets optimized to use those materializations.) String sql = lattice.sql(groupSet, measureList); final TileKey tileKey = new TileKey(lattice, groupSet, ImmutableList.copyOf(measureList)); MaterializationKey materializationKey = defineMaterialization(schema, tileKey, sql, schema.path(null), "m" + groupSet, create); if (materializationKey != null) { final OptiqSchema.TableEntry tableEntry = checkValid(materializationKey); if (tableEntry != null) { return Pair.of(tableEntry, tileKey); } } // No direct hit. Look for roll-ups. for (TileKey tileKey2 : actor.tileKeys) { if (BitSets.contains(tileKey2.dimensions, groupSet) && allSatisfiable(measureList, tileKey2)) { sql = lattice.sql(tileKey2.dimensions, tileKey2.measures); materializationKey = defineMaterialization(schema, tileKey2, sql, schema.path(null), "m" + tileKey2.dimensions, create); final OptiqSchema.TableEntry tableEntry = checkValid(materializationKey); if (tableEntry != null) { return Pair.of(tableEntry, tileKey2); } } } return null; } private boolean allSatisfiable(List<Lattice.Measure> measureList, TileKey tileKey) { // A measure can be satisfied if it is contained in the measure list, or, // less obviously, if it is composed of grouping columns. for (Lattice.Measure measure : measureList) { if (!(tileKey.measures.contains(measure) || BitSets.contains(tileKey.dimensions, measure.argBitSet()))) { return false; } } return true; } /** Gathers a list of all materialized tables known within a given root * schema. (Each root schema defines a disconnected namespace, with no overlap * with the current schema. Especially in a test run, the contents of two * root schemas may look similar.) */ public List<Prepare.Materialization> query(OptiqSchema rootSchema) { final List<Prepare.Materialization> list = new ArrayList<Prepare.Materialization>(); for (MaterializationActor.Materialization materialization : actor.keyMap.values()) { if (materialization.rootSchema == rootSchema && materialization.materializedTable != null) { list.add( new Prepare.Materialization(materialization.materializedTable, materialization.sql)); } } return list; } /** De-registers all materialized tables in the system. */ public void clear() { actor.keyMap.clear(); } /** Used by tests, to ensure that they see their own service. */ public static void setThreadLocal() { THREAD_INSTANCE.set(new MaterializationService()); } /** Returns the instance of the materialization service. Usually the global * one, but returns a thread-local one during testing (when * {@link #setThreadLocal()} has been called by the current thread). */ public static MaterializationService instance() { MaterializationService materializationService = THREAD_INSTANCE.get(); if (materializationService != null) { return materializationService; } return INSTANCE; } /** Definition of a particular combination of dimensions and measures of a * lattice that is the basis of a materialization. * * <p>Holds similar information to a {@link Lattice.Tile} but a lattice is * immutable and tiles are not added after their creation. */ public static class TileKey { public final Lattice lattice; public final BitSet dimensions; public final ImmutableList<Lattice.Measure> measures; public TileKey(Lattice lattice, BitSet dimensions, ImmutableList<Lattice.Measure> measures) { this.lattice = lattice; this.dimensions = dimensions; this.measures = measures; } } } // End MaterializationService.java
package org.ektorp; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; import org.ektorp.util.Assert; /** * * @author henrik lundgren * */ public class ViewResult implements Iterable<ViewResult.Row>, Serializable { private static final String OFFSET_FIELD_NAME = "offset"; private static final String TOTAL_ROWS_FIELD_NAME = "total_rows"; private static final String UPDATE_SEQ = "update_seq"; private static final long serialVersionUID = 4750290767933801714L; private int totalRows = -1; private int offset = -1; private String updateSeq; @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SE_BAD_FIELD") private List<Row> rows; public ViewResult(JsonNode resultNode, boolean ignoreNotFound) { Assert.notNull(resultNode, "resultNode may not be null"); Assert.isTrue(resultNode.findPath("rows").isArray(), "result must contain 'rows' field of array type"); if (resultNode.get(TOTAL_ROWS_FIELD_NAME) != null) { totalRows = resultNode.get(TOTAL_ROWS_FIELD_NAME).intValue(); } if (resultNode.get(OFFSET_FIELD_NAME) != null) { offset = resultNode.get(OFFSET_FIELD_NAME).intValue(); } if (resultNode.get(UPDATE_SEQ) != null) { updateSeq = resultNode.get(UPDATE_SEQ).textValue(); if(updateSeq == null) { updateSeq = Long.toString(resultNode.get(UPDATE_SEQ).intValue()); } } JsonNode rowsNode = resultNode.get("rows"); rows = new ArrayList<ViewResult.Row>(rowsNode.size()); for (JsonNode n : rowsNode) { if (!(ignoreNotFound && n.has(Row.ERROR_FIELD_NAME))) { rows.add(new Row(n)); } } } public List<Row> getRows() { return rows; } public int getSize() { return rows.size(); } /** * * @return -1 if result did not contain an offset field */ public int getOffset() { return offset; } @JsonProperty void setOffset(int offset) { this.offset = offset; } /** * * @return -1 if result did not contain a total_rows field */ public int getTotalRows() { return totalRows; } @JsonProperty(TOTAL_ROWS_FIELD_NAME) void setTotalRows(int i) { this.totalRows = i; } /** * @return -1L if result did not contain an update_seq field */ public long getUpdateSeq() { if(updateSeq != null) { return Long.parseLong(updateSeq); } return -1L; } /** * @return false if db is an Cloudant instance. */ public boolean isUpdateSeqNumeric() { return updateSeq != null && updateSeq.matches("^\\d*$"); } /** * * @return null if result did not contain an update_seq field */ public String getUpdateSeqAsString() { return updateSeq; } @JsonProperty(UPDATE_SEQ) public void setUpdateSeq(String updateSeq) { this.updateSeq = updateSeq; } public Iterator<ViewResult.Row> iterator() { return rows.iterator(); } public boolean isEmpty() { return rows.isEmpty(); } @Override public String toString() { StringBuilder builder = new StringBuilder("{\n\"total_rows\":"); builder.append(totalRows); builder.append(",\n\"offset\":"); builder.append(offset); builder.append(",\n\"rows\":"); builder.append(rows.toString()); builder.append("\n}"); return builder.toString(); } public static class Row { static final String VALUE_FIELD_NAME = "value"; static final String ID_FIELD_NAME = "id"; static final String KEY_FIELD_NAME = "key"; static final String DOC_FIELD_NAME = "doc"; static final String ERROR_FIELD_NAME = "error"; private final JsonNode rowNode; @JsonCreator public Row(JsonNode rowNode) { Assert.notNull(rowNode, "row node may not be null"); this.rowNode = rowNode; if (getError() != null) { throw new ViewResultException(getKeyAsNode(), getError()); } } public String getId() { return rowNode.get(ID_FIELD_NAME).textValue(); } public String getKey() { return nodeAsString(getKeyAsNode()); } public JsonNode getKeyAsNode() { return rowNode.findPath(KEY_FIELD_NAME); } public String getValue() { return nodeAsString(getValueAsNode()); } public int getValueAsInt() { return getValueAsNode().asInt(0); } public JsonNode getValueAsNode() { return rowNode.findPath(VALUE_FIELD_NAME); } public String getDoc() { return nodeAsString(rowNode.findValue(DOC_FIELD_NAME)); } public JsonNode getDocAsNode() { return rowNode.findPath(DOC_FIELD_NAME); } private String getError() { return nodeAsString(rowNode.get(ERROR_FIELD_NAME)); } private String nodeAsString(JsonNode node) { if (isNull(node)) return null; return node.isContainerNode() ? node.toString() : node.asText(); } private boolean isNull(JsonNode node) { return node == null || node.isNull() || node.isMissingNode(); } @Override public String toString() { return rowNode.toString(); } } }
package com.billybyte.commonlibstometeor; import java.math.BigDecimal; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.billybyte.commoncollections.Tuple; import com.billybyte.dse.outputs.DeltaDerSen; import com.billybyte.dse.outputs.DerivativeReturn; import com.billybyte.dse.outputs.DerivativeSensitivityTypeInterface; import com.billybyte.dse.outputs.GammaDerSen; import com.billybyte.dse.outputs.RhoDerSen; import com.billybyte.dse.outputs.ThetaDerSen; import com.billybyte.dse.outputs.VegaDerSen; import com.billybyte.marketdata.MarketDataComLib; import com.billybyte.marketdata.SecDef; import com.billybyte.meteorjava.MeteorColumnModel; public class GreeksData extends PositionBaseItem{ static final DerivativeSensitivityTypeInterface deltaDerSen = new DeltaDerSen(); static final DerivativeSensitivityTypeInterface gammaDerSen = new GammaDerSen(); static final DerivativeSensitivityTypeInterface vegaDerSen = new VegaDerSen(); static final DerivativeSensitivityTypeInterface thetaDerSen = new ThetaDerSen(); static final DerivativeSensitivityTypeInterface rhoDerSen = new RhoDerSen(); static final Double badRet = -1111111.0; static final DecimalFormat INTDF = new DecimalFormat("0000"); private final String type; private final String exch; private final String underlying; // private final String symbol; private final String curr; private final Integer year; private final Integer month; private final Integer day; // private final String putCall; // private final BigDecimal strike; private final Double delta; private final Double gamma; private final Double vega; private final Double theta; private final Double rho; // public GreeksData(){ // this( // null,null,null,null,null,null, // null,null,null,null,null,null, // null,null,null,null,null,null,null); // } public GreeksData(){ this( null,null,null,null,null,null, null,null,null,null,null,null, null,null,null,null); } public GreeksData( String _id, String userId, String account, String strategy, String type, String exch, String underlying, // String symbol, String curr, Integer year, Integer month, Integer day, // String putCall, // BigDecimal strike, Double delta, Double gamma, Double vega, Double theta, Double rho) { super(_id, userId,account,strategy); this.type = type; this.exch = exch; this.underlying = underlying; // this.symbol = symbol; this.curr = curr; this.year = year; this.month = month; this.day = day; // this.putCall = putCall; // this.strike = strike; this.delta = delta; this.gamma = gamma; this.vega = vega; this.theta = theta; this.rho = rho; } public String getUnderlying() { return underlying; } // public String getSymbol() { // return symbol; // } public Integer getYear() { return year; } public Integer getMonth() { return month; } public Integer getDay() { return day; } // public String getPutCall() { // return putCall; // } // // public BigDecimal getStrike() { // return strike; // } public Double getDelta() { return delta; } public Double getGamma() { return gamma; } public Double getVega() { return vega; } public Double getTheta() { return theta; } public Double getRho() { return rho; } @Override public String toString() { return super.toString() + "," + type + "," + exch + "," + // symbol + "," + curr+ "," + year+ "," + month + "," + day + "," + // putCall + "," + // strike + "," + delta + "," + gamma + "," + vega + "," + theta + "," + rho; } @Override // public <M extends PositionBaseItem> Tuple<List<String>, M> positionBasedItemFromDerivativeReturn( public <M extends PositionBaseItem> Tuple<List<String>, List<M>> positionBasedItemFromDerivativeReturn( Position p, SecDef sd, Map<DerivativeSensitivityTypeInterface, DerivativeReturn[]> drSenseMap, List<SecDef> underlyingSds) { List<String> problems = new ArrayList<String>(); List<M> retList = new ArrayList<M>(); double qty = p.getQty().doubleValue(); String _id = p.get_id(); String userId = p.getUserId(); String account = p.getAccount(); String strategy = p.getStrategy(); Double[] delta = getSense(underlyingSds,drSenseMap, deltaDerSen,qty); Double[] gamma = getSense(underlyingSds,drSenseMap, gammaDerSen,qty); Double[] vega = getSense(underlyingSds,drSenseMap, vegaDerSen,qty); Double[] theta = getSense(underlyingSds,drSenseMap, thetaDerSen,qty); Double[] rho = getSense(underlyingSds,drSenseMap, deltaDerSen,qty); for(int i = 0;i<underlyingSds.size();i++){ SecDef sdUnder = underlyingSds.get(i); String type = sdUnder.getSymbolType().toString(); String exch = sdUnder.getExchange().toString(); String symbol = sdUnder.getSymbol(); String under = symbol; String curr = sdUnder.getCurrency().toString(); int year = sdUnder.getContractYear(); int month = sdUnder.getContractMonth(); Integer day = sdUnder.getContractDay(); day = day==null ? 0 : day; String putCall = sdUnder.getRight(); BigDecimal strike = sdUnder.getStrike(); _id = _id+INTDF.format(i); // M ret = // (M)new GreeksData(_id, userId, account, strategy, type, // exch,under, symbol, curr, // year, month, day, putCall, strike, // delta[i], gamma[i], vega[i], theta[i], rho[i]); M ret = (M)new GreeksData(_id, userId, account, strategy, type, exch,under, curr, year, month, day, delta[i], gamma[i], vega[i], theta[i], rho[i]); retList.add(ret); } return new Tuple<List<String>, List<M>>(problems, retList); } private Double[] getSense(List<SecDef> underlyingSds, Map<DerivativeSensitivityTypeInterface, DerivativeReturn[]> drSenseMap, DerivativeSensitivityTypeInterface sense, double qty){ DerivativeReturn[] drArr = drSenseMap.get(sense); Double[] senseValue= null; if(drArr!=null && drArr.length>0){ senseValue = new Double[drArr.length]; for(int i = 0;i<drArr.length;i++){ senseValue[i] = badRet; if(drArr[i].isValidReturn()){ senseValue[i] = drArr[i].getValue().doubleValue()*qty; } } } // if any of the above sensitivities have only one array element, while the number // of underlyings is greater than 1, divide that sensitivity by the number of underlyings and create a // psuedo-sensitivity for each underlying. This is especially true of things like theta, in which the dse // returns only one value for multiple underlyings int underLength = underlyingSds.size(); if(senseValue.length<underLength){ // first sum up old sensevalues Double totalSenseValue = 0.0; for(Double partialSenseValue : senseValue){ totalSenseValue += partialSenseValue; } // now redistribute the total over all underlyings senseValue = new Double[underLength]; for(int i = 0;i<underLength;i++){ senseValue[i] = totalSenseValue/underLength; } } return senseValue; } @Override public DerivativeSensitivityTypeInterface[] getDseSenseArray(){ DerivativeSensitivityTypeInterface[] ret = { deltaDerSen,gammaDerSen,vegaDerSen,thetaDerSen,rhoDerSen }; return ret; } @Override public MeteorColumnModel[] buildColumnModelArray() { MeteorColumnModel accountCm = new MeteorColumnModel("account","account","account",null); MeteorColumnModel strategyCm = new MeteorColumnModel("strategy","strategy","strategy",null); MeteorColumnModel underlyingCm = new MeteorColumnModel("underlying","underlying","underlying",null); // MeteorColumnModel symbolCm = // new MeteorColumnModel("symbol","symbol","symbol",null); MeteorColumnModel yearCm = new MeteorColumnModel("year","year","year",null); MeteorColumnModel monthCm = new MeteorColumnModel("month","month","month",null); MeteorColumnModel dayCm = new MeteorColumnModel("day","day","day",null); // MeteorColumnModel putCallCm = // new MeteorColumnModel("putCall","putCall","putCall",null); // MeteorColumnModel strikeCm = // new MeteorColumnModel("strike","strike","strike",null); MeteorColumnModel deltaCm = new MeteorColumnModel("delta","delta","delta",new String[]{"delta"}); MeteorColumnModel gammaCm = new MeteorColumnModel("gamma","gamma","gamma",new String[]{"gamma"}); MeteorColumnModel vegaCm = new MeteorColumnModel("vega","vega","vega",new String[]{"vega"}); MeteorColumnModel thetaCm = new MeteorColumnModel("theta","theta","theta",new String[]{"theta"}); MeteorColumnModel rhoCm = new MeteorColumnModel("rho","rho","rho",new String[]{"rho"}); // MeteorColumnModel[] ret = { // accountCm,strategyCm, // underlyingCm,symbolCm, // yearCm,monthCm,dayCm,putCallCm,strikeCm, // deltaCm,gammaCm,vegaCm,thetaCm,rhoCm // }; MeteorColumnModel[] ret = { accountCm,strategyCm, underlyingCm, yearCm,monthCm,dayCm, deltaCm,gammaCm,vegaCm,thetaCm,rhoCm }; return ret; } }
/*L * Copyright HealthCare IT, Inc. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/edct-formbuilder/LICENSE.txt for details. */ package com.healthcit.cacure.web.controller.question; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import org.apache.log4j.Logger; import org.directwebremoting.annotations.RemoteMethod; import org.directwebremoting.annotations.RemoteProxy; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.stereotype.Service; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.View; import org.springframework.web.servlet.view.RedirectView; import com.healthcit.cacure.businessdelegates.FormManager; import com.healthcit.cacure.businessdelegates.QuestionAnswerManager; import com.healthcit.cacure.businessdelegates.UserManager; import com.healthcit.cacure.businessdelegates.beans.SkipAffecteesBean; import com.healthcit.cacure.model.BaseForm; import com.healthcit.cacure.model.Category; import com.healthcit.cacure.model.FormElement; import com.healthcit.cacure.model.Role.RoleCode; import com.healthcit.cacure.model.breadcrumb.BreadCrumb; import com.healthcit.cacure.model.breadcrumb.FormDetailsBreadCrumb; import com.healthcit.cacure.security.UnauthorizedException; import com.healthcit.cacure.utils.Constants; import com.healthcit.cacure.web.controller.BreadCrumbsSupporter; /** * Controller for view questionList page. * @author vetali * */ @Controller @Service @RemoteProxy public class FormElementListController extends BaseFormElementController implements BreadCrumbsSupporter<BreadCrumb> { private static final String QUERY_PARAM = "query"; private static final String CATEGORY_ID_PARAM = "categoryId"; private static final int MAX_HEADER_LENGTH = 28; public static final String FORM_KEY = "form"; private static final Logger log = Logger.getLogger(FormElementListController.class); @Autowired private QuestionAnswerManager qaManager; @Autowired private FormManager formManager; @Autowired private UserManager userManager; @Autowired private UserManager userService; @ModelAttribute("questionLibraryFormExist") public Boolean isQuestionLibraryExist() { return this.formManager.getQuestionLibraryForm() != null; } @RequestMapping(value = Constants.QUESTION_LISTING_URI, method = RequestMethod.GET) public ModelAndView showFormElements( @RequestParam(value = FORM_ID_NAME, required = true) Long formId, @RequestParam(value = QUERY_PARAM, required = false) String searchText, @RequestParam(value = CATEGORY_ID_PARAM, required = false) long[] categoryIds) { return getModelAndView(formId, searchText, categoryIds); } @RequestMapping(value=Constants.QUESTION_LISTING_SKIP_URI) public ModelAndView showSkipQuestionList( @RequestParam(value = FORM_ID_NAME, required = true) Long formId, @RequestParam(value = "questionId", required = true) Long formElementId) { BaseForm form = formManager.getForm(formId); String viewName = "questionListSkip"; List<BaseForm> forms = formManager.getModuleForms(form.getModule().getId()); ModelAndView mav = new ModelAndView(viewName); // initialize with view name ModelMap model = mav.getModelMap(); if(formElementId != null) { FormElement formElement = qaManager.getFormElement(formElementId); SkipAffecteesBean dependencies = qaManager.getAllPossibleSkipAffectees(formElement); model.addAttribute("formElementId", formElementId); model.addAttribute("dependencies", dependencies); } model.addAttribute("forms", forms); log.debug("in QuestionListController.showSkipQuestionList(): formId: " + formId + " formElementId: " + formElementId); return mav; } /** * Delete question * @param question * @return */ @SuppressWarnings("deprecation") @RequestMapping(value = Constants.QUESTION_LISTING_URI, method = RequestMethod.GET, params=Constants.DELETE_CMD_PARAM) public View deleteFormElement( @RequestParam(value = "qId", required = true) Long elementId, @RequestParam(value = "formId", required = true) Long formId, @RequestParam(value = Constants.DELETE_CMD_PARAM, required = true) boolean delete) { validateEditOperation(elementId); qaManager.deleteFormElementByID(elementId); return new RedirectView (Constants.QUESTION_LISTING_URI+ "?formId=" + formId, true); } @RequestMapping(value = Constants.ADD_QUESTION_TO_LIBRARY_URI, method = RequestMethod.GET, params = { Constants.QUESTION_ID, Constants.FORM_ID }) public View addQuestionToLibrary( @RequestParam(Constants.QUESTION_ID) Long questionId, @RequestParam(Constants.FORM_ID) Long formId) { if(!this.userService.isCurrentUserInRole(RoleCode.ROLE_ADMIN) && !this.userService.isCurrentUserInRole(RoleCode.ROLE_LIBRARIAN)) { throw new UnauthorizedException("You have no permissions to add question to the library."); } this.formManager.addQuestionToQuestionLibrary(questionId); return new RedirectView(Constants.QUESTION_LISTING_URI + "?"+Constants.FORM_ID+"=" + formId, true); } /** * @param formId Long * @param categoryIds * @param searchText * @return view with form entity that fetches list of Question items */ private ModelAndView getModelAndView(Long formId, String searchText, long[] categoryIds) { BaseForm form = null; // List<? extends FormElement> questions = qaManager.getAllFormElementsWithChildren(formId); List<FormElement> elements = qaManager.getFormElementsByTextWithinCategories(formId, searchText, categoryIds); //getting QuestionnaireForm entity if (!elements.isEmpty()) { form = elements.get(0).getForm(); // form.setElements(elements); } else { form = formManager.getForm(formId); } String formName = form.getName(); String shortFormName = form.getName(); if(shortFormName != null && shortFormName.length() > MAX_HEADER_LENGTH) { shortFormName = shortFormName.substring(0, MAX_HEADER_LENGTH) + "..."; } ModelAndView mav = new ModelAndView("questionList"); // initialize with view name ModelMap model = mav.getModelMap(); model.addAttribute(FORM_KEY, form); model.addAttribute("elements", elements); model.addAttribute("shortFormName", shortFormName); model.addAttribute("formName", formName); return mav; } @ModelAttribute("categories") public List<Category> getCategories() { return categoryManager.getLibraryQuestionsCategories(); } /** * This method must stay in this class to utilize validation of editing accesibility * @param sourceQuestionId * @param targetQuestionId * @param before * @throws IOException * @throws InterruptedException */ /* @RemoteMethod public void reorderQuestions(Long sourceQuestionId, Long targetQuestionId, boolean before) throws IOException, InterruptedException { validateEditOperation(sourceQuestionId); qaManager.reorderQuestions(sourceQuestionId, targetQuestionId, before); } */ @RemoteMethod public void reorderFormElements(Long sourceQuestionId, Long targetQuestionId, boolean before) throws IOException, InterruptedException { validateEditOperation(sourceQuestionId); qaManager.reorderFormElements(sourceQuestionId, targetQuestionId, before); } @Override public BreadCrumb setBreadCrumb(ModelMap modelMap) { BaseForm form = (BaseForm) modelMap.get(FORM_KEY); if(form != null) { FormDetailsBreadCrumb breadCrumb = new FormDetailsBreadCrumb(form); modelMap.addAttribute(Constants.BREAD_CRUMB, breadCrumb); return breadCrumb; } return null; } @Override public List<BreadCrumb.Link> getAllLinks(HttpServletRequest req) { // TODO Need to filter out some links /*long formId = Long.parseLong(req.getParameter(FORM_ID_NAME)); String query = req.getParameter(QUERY_PARAM); String categoryIdsStr = req.getParameter(CATEGORY_ID_PARAM); long[] categoryIds = null; if(StringUtils.isNotBlank(categoryIdsStr)) { String[] splitedCategoryIds = categoryIdsStr.split(" *, *"); categoryIds = new long[splitedCategoryIds.length]; for (int i = 0; i < splitedCategoryIds.length; i++) { categoryIds[i] = Long.parseLong(splitedCategoryIds[i]); } } List<FormElement> elements = qaManager.getFormElementsByTextWithinCategories(formId, query, categoryIds); ArrayList<Link> links = new ArrayList<BreadCrumb.Link>(); for (FormElement fe : elements) { links.add(new Link(fe.getDescription(), Constants.QUESTION_LISTING_URI + "?" + Constants.FORM_ID + "=" + fe.getForm().getId() + "&" + Constants.MODULE_ID + "=" + fe.getForm().getModule().getId(), null)); } return links;*/ return new ArrayList<BreadCrumb.Link>(); } }
/* * Copyright (C) 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package benchmarks.regression; import com.google.caliper.Param; import com.google.caliper.Runner; import com.google.caliper.SimpleBenchmark; import java.io.*; import java.nio.*; import java.nio.channels.*; import java.util.Arrays; import java.util.Collection; public class ByteBufferBenchmark extends SimpleBenchmark { public enum MyByteOrder { BIG(ByteOrder.BIG_ENDIAN), LITTLE(ByteOrder.LITTLE_ENDIAN); final ByteOrder byteOrder; MyByteOrder(ByteOrder byteOrder) { this.byteOrder = byteOrder; } } @Param private MyByteOrder byteOrder; @Param({"true", "false"}) private boolean aligned; enum MyBufferType { DIRECT, HEAP, MAPPED; } @Param private MyBufferType bufferType; public static ByteBuffer newBuffer(MyByteOrder byteOrder, boolean aligned, MyBufferType bufferType) throws IOException { int size = aligned ? 8192 : 8192 + 8 + 1; ByteBuffer result = null; switch (bufferType) { case DIRECT: result = ByteBuffer.allocateDirect(size); break; case HEAP: result = ByteBuffer.allocate(size); break; case MAPPED: File tmpFile = new File("/sdcard/bm.tmp"); if (new File("/tmp").isDirectory()) { // We're running on the desktop. tmpFile = File.createTempFile("MappedByteBufferTest", ".tmp"); } tmpFile.createNewFile(); tmpFile.deleteOnExit(); RandomAccessFile raf = new RandomAccessFile(tmpFile, "rw"); raf.setLength(8192*8); FileChannel fc = raf.getChannel(); result = fc.map(FileChannel.MapMode.READ_WRITE, 0, fc.size()); break; } result.order(byteOrder.byteOrder); result.position(aligned ? 0 : 1); return result; } // // peeking // public void timeByteBuffer_getByte(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.get(); } } } public void timeByteBuffer_getByteArray(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); byte[] dst = new byte[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(aligned ? 0 : 1); src.get(dst); } } } public void timeByteBuffer_getByte_indexed(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.get(i); } } } public void timeByteBuffer_getChar(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getChar(); } } } public void timeCharBuffer_getCharArray(int reps) throws Exception { CharBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asCharBuffer(); char[] dst = new char[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(0); src.get(dst); } } } public void timeByteBuffer_getChar_indexed(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getChar(i * 2); } } } public void timeByteBuffer_getDouble(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getDouble(); } } } public void timeDoubleBuffer_getDoubleArray(int reps) throws Exception { DoubleBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asDoubleBuffer(); double[] dst = new double[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(0); src.get(dst); } } } public void timeByteBuffer_getFloat(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getFloat(); } } } public void timeFloatBuffer_getFloatArray(int reps) throws Exception { FloatBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asFloatBuffer(); float[] dst = new float[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(0); src.get(dst); } } } public void timeByteBuffer_getInt(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getInt(); } } } public void timeIntBuffer_getIntArray(int reps) throws Exception { IntBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asIntBuffer(); int[] dst = new int[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(0); src.get(dst); } } } public void timeByteBuffer_getLong(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getLong(); } } } public void timeLongBuffer_getLongArray(int reps) throws Exception { LongBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asLongBuffer(); long[] dst = new long[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(0); src.get(dst); } } } public void timeByteBuffer_getShort(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.getShort(); } } } public void timeShortBuffer_getShortArray(int reps) throws Exception { ShortBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asShortBuffer(); short[] dst = new short[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { src.position(0); src.get(dst); } } } // // poking // public void timeByteBuffer_putByte(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(0); for (int i = 0; i < 1024; ++i) { src.put((byte) 0); } } } public void timeByteBuffer_putByteArray(int reps) throws Exception { ByteBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); byte[] src = new byte[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(aligned ? 0 : 1); dst.put(src); } } } public void timeByteBuffer_putChar(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.putChar(' '); } } } public void timeCharBuffer_putCharArray(int reps) throws Exception { CharBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asCharBuffer(); char[] src = new char[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(0); dst.put(src); } } } public void timeByteBuffer_putDouble(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.putDouble(0.0); } } } public void timeDoubleBuffer_putDoubleArray(int reps) throws Exception { DoubleBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asDoubleBuffer(); double[] src = new double[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(0); dst.put(src); } } } public void timeByteBuffer_putFloat(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.putFloat(0.0f); } } } public void timeFloatBuffer_putFloatArray(int reps) throws Exception { FloatBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asFloatBuffer(); float[] src = new float[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(0); dst.put(src); } } } public void timeByteBuffer_putInt(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.putInt(0); } } } public void timeIntBuffer_putIntArray(int reps) throws Exception { IntBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asIntBuffer(); int[] src = new int[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(0); dst.put(src); } } } public void timeByteBuffer_putLong(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.putLong(0L); } } } public void timeLongBuffer_putLongArray(int reps) throws Exception { LongBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asLongBuffer(); long[] src = new long[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(0); dst.put(src); } } } public void timeByteBuffer_putShort(int reps) throws Exception { ByteBuffer src = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType); for (int rep = 0; rep < reps; ++rep) { src.position(aligned ? 0 : 1); for (int i = 0; i < 1024; ++i) { src.putShort((short) 0); } } } public void timeShortBuffer_putShortArray(int reps) throws Exception { ShortBuffer dst = ByteBufferBenchmark.newBuffer(byteOrder, aligned, bufferType).asShortBuffer(); short[] src = new short[1024]; for (int rep = 0; rep < reps; ++rep) { for (int i = 0; i < 1024; ++i) { dst.position(0); dst.put(src); } } } /* public void time_new_byteArray(int reps) throws Exception { for (int rep = 0; rep < reps; ++rep) { byte[] bs = new byte[8192]; } } public void time_ByteBuffer_allocate(int reps) throws Exception { for (int rep = 0; rep < reps; ++rep) { ByteBuffer bs = ByteBuffer.allocate(8192); } } */ }
/** * This project is licensed under the Apache License, Version 2.0 * if the following condition is met: * (otherwise it cannot be used by anyone but the author, Kevin, only) * * The original JSON Statham project is owned by Lee, Seong Hyun (Kevin). * * -What does it mean to you? * Nothing, unless you want to take the ownership of * "the original project" (not yours or forked & modified one). * You are free to use it for both non-commercial and commercial projects * and free to modify it as the Apache License allows. * * -So why is this condition necessary? * It is only to protect the original project (See the case of Java). * * * Copyright 2009 Lee, Seong Hyun (Kevin) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.elixirian.jsonstatham.core.reflect.json2java; import static org.elixirian.kommonlee.util.MessageFormatter.*; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import org.elixirian.jsonstatham.core.KnownTypeProcessorWithReflectionJsonToJavaConverter; import org.elixirian.jsonstatham.core.KnownTypeProcessorWithReflectionJsonToJavaConverterDeciderForJsonToJava; import org.elixirian.jsonstatham.core.convertible.JsonArray; import org.elixirian.jsonstatham.core.convertible.JsonConvertible; import org.elixirian.jsonstatham.core.convertible.JsonNameValuePair; import org.elixirian.jsonstatham.core.convertible.JsonObject; import org.elixirian.jsonstatham.core.convertible.OrderedJsonObject; import org.elixirian.jsonstatham.core.convertible.UnorderedJsonObject; import org.elixirian.jsonstatham.core.util.JsonUtil; import org.elixirian.jsonstatham.exception.JsonStathamException; /** * <pre> * ___ _____ _____ * / \/ /_________ ___ ____ __ ______ / / ______ ______ * / / / ___ \ \/ //___// // / / / / ___ \/ ___ \ * / \ / _____/\ // // __ / / /___/ _____/ _____/ * /____/\____\\_____/ \__//___//___/ /__/ /________/\_____/ \_____/ * </pre> * * @author Lee, SeongHyun (Kevin) * @version 0.0.1 (2010-10-04) */ public final class JsonToJavaKnownObjectTypeProcessorDecider implements KnownTypeProcessorWithReflectionJsonToJavaConverterDeciderForJsonToJava<Class<?>> { public static final Map<Class<?>, KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>> DEFAULT_KNOWN_OBJECT_TYPE_PROCESSOR_MAP; static { final Map<Class<?>, KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>> map = new LinkedHashMap<Class<?>, KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>>(); map.put(Date.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process( @SuppressWarnings("unused") final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { if (long.class.equals(value.getClass()) || Long.class.equals(value.getClass())) { return new Date(((Long) value).longValue()); } throw new JsonStathamException(format("Unknown type [class: %s][object: %s]", valueType, value)); } }); map.put(Calendar.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process( @SuppressWarnings("unused") final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { if (long.class.equals(value.getClass()) || Long.class.equals(value.getClass())) { final Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(((Long) value).longValue()); return calendar; } throw new JsonStathamException(format("Unknown type [class: %s][object: %s]", valueType, value)); } }); // map.put(JSONObject.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { // @Override // public <T> Object process(final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, // final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, // JsonStathamException // { // return reflectionJsonToJavaConverter.createFromJsonObject(valueType, new OrgJsonJsonObject((JSONObject) value)); // } // }); map.put(OrderedJsonObject.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process(final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { final JsonObject castedValue = (JsonObject) value; if (castedValue.isNull()) { return null; } final JsonObject result = getJsonObject(valueType, castedValue); if (null != result) { return result; } return reflectionJsonToJavaConverter.createFromJsonObject(valueType, (OrderedJsonObject) value); } }); map.put(UnorderedJsonObject.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process(final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { final JsonObject castedValue = (JsonObject) value; if (castedValue.isNull()) { return null; } final JsonObject result = getJsonObject(valueType, castedValue); if (null != result) { return result; } return reflectionJsonToJavaConverter.createFromJsonObject(valueType, (UnorderedJsonObject) value); } }); map.put(JsonObject.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process(final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { final JsonObject castedValue = (JsonObject) value; if (castedValue.isNull()) { return null; } final JsonObject result = getJsonObject(valueType, castedValue); if (null != result) { return result; } return reflectionJsonToJavaConverter.createFromJsonObject(valueType, castedValue); } }); // map.put(AbstractJsonObjectConvertiblePair.class, map.put(JsonNameValuePair.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process(final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { final JsonObject castedValue = (JsonObject) value; if (castedValue.isNull() || 0 == castedValue.fieldLength()) { return null; } if (JsonNameValuePair.class.isAssignableFrom(valueType)) { final String name = castedValue.getNames()[0]; return JsonUtil.newJsonNameValuePair(name, castedValue.get(name)); } throw new JsonStathamException( format( "Unknown JsonNameValuePair (Class<?> valueType: %s) type! " + "[ReflectionJsonToJavaConverter reflectionJsonToJavaConverter: %s, Class<?> valueType: %s, Object value: %s] " + "It must be an instance of org.elixirian.jsonstatham.core.convertible.JsonNameValuePair.", valueType, reflectionJsonToJavaConverter, valueType, value)); } }); map.put(JsonConvertible.class, new KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>() { @Override public <T> Object process(final ReflectionJsonToJavaConverter reflectionJsonToJavaConverter, final Class<?> valueType, final Object value) throws IllegalArgumentException, IllegalAccessException, JsonStathamException { if (null == value) { return null; } if (value instanceof JsonObject) { final JsonObject castedValue = (JsonObject) value; if (castedValue.isNull()) { return null; } if (0 == castedValue.fieldLength()) { return null; } return value; // if (ImmutableJsonObjectConvertiblePair.class.isAssignableFrom(valueType)) // { // final String name = castedValue.getNames()[0]; // final ImmutableJsonObjectConvertiblePair<?, ?> immutableJsonObjectConvertiblePair = // new ImmutableJsonObjectConvertiblePair<Object, Object>(name, castedValue.get(name)); // return immutableJsonObjectConvertiblePair; // } // else if (MutableJsonObjectConvertiblePair.class.isAssignableFrom(valueType)) // { // final String name = castedValue.getNames()[0]; // final MutableJsonObjectConvertiblePair<?, ?> mutableJsonObjectConvertiblePair = // new MutableJsonObjectConvertiblePair<Object, Object>(name, castedValue.get(name)); // return mutableJsonObjectConvertiblePair; // } // else // { // throw new JsonStathamException( // format( // "Unknown AbstractJsonObjectConvertiblePair (Class<?> valueType: %s) type! " // + // "[ReflectionJsonToJavaConverter reflectionJsonToJavaConverter: %s, Class<?> valueType: %s, Object value: %s] " // + // "It must be either org.elixirian.jsonstatham.core.convertible.ImmutableJsonObjectConvertiblePair or org.elixirian.jsonstatham.core.convertible.MutableJsonObjectConvertiblePair.", // valueType, reflectionJsonToJavaConverter, valueType, value)); // } } else if (value instanceof JsonArray) { return value; } else { throw new JsonStathamException( format( "Unknown JsonConvertible (Class<?> valueType: %s) type! " + "[ReflectionJsonToJavaConverter reflectionJsonToJavaConverter: %s, Class<?> valueType: %s, Object value: %s] " + "It must be an instance of either org.elixirian.jsonstatham.core.convertible.JsonObject or org.elixirian.jsonstatham.core.convertible.JsonArray.", valueType, reflectionJsonToJavaConverter, valueType, value)); } } }); DEFAULT_KNOWN_OBJECT_TYPE_PROCESSOR_MAP = Collections.unmodifiableMap(map); } private static JsonObject getJsonObject(final Class<?> valueType, final JsonObject value) { return JsonObject.class.isAssignableFrom(valueType) ? value : null; } public final Map<Class<?>, KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>> KnownObjectTypeProcessorMap; public JsonToJavaKnownObjectTypeProcessorDecider() { this.KnownObjectTypeProcessorMap = DEFAULT_KNOWN_OBJECT_TYPE_PROCESSOR_MAP; } @Override public KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>> decide(final Class<?> type) { /* @formatter:off */ for (final Entry<Class<?>, KnownTypeProcessorWithReflectionJsonToJavaConverter<Class<?>>> entry : KnownObjectTypeProcessorMap.entrySet()) { /* @formatter:on */ if (entry.getKey() .isAssignableFrom(type)) { return entry.getValue(); } } return null; } }
/* * Copyright (c) 2009-2014 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.util; import com.jme3.app.Application; import com.jme3.app.state.AbstractAppState; import com.jme3.app.state.AppStateManager; import com.jme3.asset.AssetInfo; import com.jme3.asset.AssetKey; import com.jme3.asset.AssetManager; import com.jme3.asset.plugins.UrlAssetInfo; import com.jme3.input.InputManager; import com.jme3.input.controls.ActionListener; import com.jme3.input.controls.Trigger; import com.jme3.material.MatParam; import com.jme3.material.Material; import com.jme3.post.Filter; import com.jme3.post.Filter.Pass; import com.jme3.renderer.RenderManager; import com.jme3.renderer.RendererException; import com.jme3.scene.Geometry; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.jme3.scene.shape.Box; import com.jme3.shader.Shader; import java.io.File; import java.lang.reflect.Field; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; /** * This appState is for debug purpose only, and was made to provide an easy way * to test shaders, with a live update capability. * * This class provides and easy way to reload a material and catches compilation * errors when needed and displays the error in the console. * * If no error occur on compilation, the material is reloaded in the scene. * * You can either trigger the reload when pressing a key (or whatever input is * supported by Triggers you can attach to the input manager), or trigger it * when a specific file (the shader source) has been changed on the hard drive. * * Usage : * * MaterialDebugAppState matDebug = new MaterialDebugAppState(); * stateManager.attach(matDebug); * matDebug.registerBinding(new KeyTrigger(KeyInput.KEY_R), whateverGeometry); * * this will reload the material of whateverGeometry when pressing the R key. * * matDebug.registerBinding("Shaders/distort.frag", whateverGeometry); * * this will reload the material of whateverGeometry when the given file is * changed on the hard drive. * * you can also register bindings to the appState with a post process Filter * * @author Nehon */ public class MaterialDebugAppState extends AbstractAppState { private RenderManager renderManager; private AssetManager assetManager; private InputManager inputManager; private List<Binding> bindings = new ArrayList<Binding>(); private Map<Trigger,List<Binding>> fileTriggers = new HashMap<Trigger,List<Binding>> (); @Override public void initialize(AppStateManager stateManager, Application app) { renderManager = app.getRenderManager(); assetManager = app.getAssetManager(); inputManager = app.getInputManager(); for (Binding binding : bindings) { bind(binding); } super.initialize(stateManager, app); } /** * Will reload the spatial's materials whenever the trigger is fired * @param trigger the trigger * @param spat the spatial to reload */ public void registerBinding(Trigger trigger, final Spatial spat) { if(spat instanceof Geometry){ GeometryBinding binding = new GeometryBinding(trigger, (Geometry)spat); bindings.add(binding); if (isInitialized()) { bind(binding); } }else if (spat instanceof Node){ for (Spatial child : ((Node)spat).getChildren()) { registerBinding(trigger, child); } } } /** * Will reload the filter's materials whenever the trigger is fired. * @param trigger the trigger * @param filter the filter to reload */ public void registerBinding(Trigger trigger, final Filter filter) { FilterBinding binding = new FilterBinding(trigger, filter); bindings.add(binding); if (isInitialized()) { bind(binding); } } /** * Will reload the filter's materials whenever the shader file is changed * on the hard drive * @param shaderName the shader name (relative path to the asset folder or * to a registered asset path) * @param filter the filter to reload */ public void registerBinding(String shaderName, final Filter filter) { registerBinding(new FileChangedTrigger(shaderName), filter); } /** * Will reload the spatials's materials whenever the shader file is changed * on the hard drive * @param shaderName the shader name (relative path to the asset folder or * to a registered asset path) * @param spat the spatial to reload */ public void registerBinding(String shaderName, final Spatial spat) { registerBinding(new FileChangedTrigger(shaderName), spat); } private void bind(final Binding binding) { if (binding.getTrigger() instanceof FileChangedTrigger) { FileChangedTrigger t = (FileChangedTrigger) binding.getTrigger(); List<Binding> b = fileTriggers.get(t); if(b == null){ t.init(); b = new ArrayList<Binding>(); fileTriggers.put(t, b); } b.add(binding); } else { final String actionName = binding.getActionName(); inputManager.addListener(new ActionListener() { public void onAction(String name, boolean isPressed, float tpf) { if (actionName.equals(name) && isPressed) { //reloading the material binding.reload(); } } }, actionName); inputManager.addMapping(actionName, binding.getTrigger()); } } public Material reloadMaterial(Material mat) { //clear the entire cache, there might be more clever things to do, like clearing only the matdef, and the associated shaders. assetManager.clearCache(); //creating a dummy mat with the mat def of the mat to reload // Force the reloading of the asset, otherwise the new shader code will not be applied. Material dummy = new Material(assetManager, mat.getMaterialDef().getAssetName()); for (MatParam matParam : mat.getParams()) { dummy.setParam(matParam.getName(), matParam.getVarType(), matParam.getValue()); } dummy.getAdditionalRenderState().set(mat.getAdditionalRenderState()); //creating a dummy geom and assigning the dummy material to it Geometry dummyGeom = new Geometry("dummyGeom", new Box(1f, 1f, 1f)); dummyGeom.setMaterial(dummy); try { //preloading the dummyGeom, this call will compile the shader again renderManager.preloadScene(dummyGeom); } catch (RendererException e) { //compilation error, the shader code will be output to the console //the following code will output the error //System.err.println(e.getMessage()); Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, e.getMessage()); return null; } Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.INFO, "Material succesfully reloaded"); //System.out.println("Material succesfully reloaded"); return dummy; } @Override public void update(float tpf) { super.update(tpf); //To change body of generated methods, choose Tools | Templates. for (Trigger trigger : fileTriggers.keySet()) { if (trigger instanceof FileChangedTrigger) { FileChangedTrigger t = (FileChangedTrigger) trigger; if (t.shouldFire()) { List<Binding> b = fileTriggers.get(t); for (Binding binding : b) { binding.reload(); } } } } } private interface Binding { public String getActionName(); public void reload(); public Trigger getTrigger(); } private class GeometryBinding implements Binding { Trigger trigger; Geometry geom; public GeometryBinding(Trigger trigger, Geometry geom) { this.trigger = trigger; this.geom = geom; } public void reload() { Material reloadedMat = reloadMaterial(geom.getMaterial()); //if the reload is successful, we re setup the material with its params and reassign it to the box if (reloadedMat != null) { // setupMaterial(reloadedMat); geom.setMaterial(reloadedMat); } } public String getActionName() { return geom.getName() + "Reload"; } public Trigger getTrigger() { return trigger; } } private class FilterBinding implements Binding { Trigger trigger; Filter filter; public FilterBinding(Trigger trigger, Filter filter) { this.trigger = trigger; this.filter = filter; } public void reload() { Field[] fields1 = filter.getClass().getDeclaredFields(); Field[] fields2 = filter.getClass().getSuperclass().getDeclaredFields(); List<Field> fields = new ArrayList<Field>(); fields.addAll(Arrays.asList(fields1)); fields.addAll(Arrays.asList(fields2)); Material m = new Material(); Filter.Pass p = filter.new Pass(); try { for (Field field : fields) { if (field.getType().isInstance(m)) { field.setAccessible(true); Material mat = reloadMaterial((Material) field.get(filter)); if (mat == null) { return; } else { field.set(filter, mat); } } if (field.getType().isInstance(p)) { field.setAccessible(true); p = (Filter.Pass) field.get(filter); if (p!= null && p.getPassMaterial() != null) { Material mat = reloadMaterial(p.getPassMaterial()); if (mat == null) { return; } else { p.setPassMaterial(mat); } } } if (field.getName().equals("postRenderPasses")) { field.setAccessible(true); List<Pass> passes = new ArrayList<Pass>(); passes = (List<Pass>) field.get(filter); if (passes != null) { for (Pass pass : passes) { Material mat = reloadMaterial(pass.getPassMaterial()); if (mat == null) { return; } else { pass.setPassMaterial(mat); } } } } } } catch (IllegalArgumentException ex) { Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, null, ex); } } public String getActionName() { return filter.getName() + "Reload"; } public Trigger getTrigger() { return trigger; } } private class FileChangedTrigger implements Trigger { String fileName; File file; Long fileLastM; public FileChangedTrigger(String fileName) { this.fileName = fileName; } public void init() { AssetInfo info = assetManager.locateAsset(new AssetKey<Shader>(fileName)); if (info != null && info instanceof UrlAssetInfo) { try { Field f = info.getClass().getDeclaredField("url"); f.setAccessible(true); URL url = (URL) f.get(info); file = new File(url.getFile()); fileLastM = file.lastModified(); } catch (NoSuchFieldException ex) { Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, null, ex); } catch (SecurityException ex) { Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalArgumentException ex) { Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(MaterialDebugAppState.class.getName()).log(Level.SEVERE, null, ex); } } } public boolean shouldFire() { if (file.lastModified() != fileLastM) { fileLastM = file.lastModified(); return true; } return false; } public String getName() { return fileName; } public int triggerHashCode() { return 0; } } }
package com.xxmassdeveloper.mpchartexample.realm; import android.graphics.Color; import android.graphics.Typeface; import android.os.Bundle; import com.github.mikephil.charting.charts.BarLineChartBase; import com.github.mikephil.charting.charts.Chart; import com.github.mikephil.charting.components.XAxis; import com.github.mikephil.charting.components.YAxis; import com.github.mikephil.charting.data.ChartData; import com.github.mikephil.charting.formatter.PercentFormatter; import com.xxmassdeveloper.mpchartexample.custom.RealmDemoData; import com.xxmassdeveloper.mpchartexample.notimportant.DemoBase; import io.realm.Realm; import io.realm.RealmConfiguration; /** * Created by Philipp Jahoda on 05/11/15. */ public abstract class RealmBaseActivity extends DemoBase { protected Realm mRealm; protected Typeface mTf; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setTitle("Realm.io Examples"); } protected void setup(Chart<?> chart) { mTf = Typeface.createFromAsset(getAssets(), "OpenSans-Regular.ttf"); // no description text chart.getDescription().setEnabled(false); // enable touch gestures chart.setTouchEnabled(true); if (chart instanceof BarLineChartBase) { BarLineChartBase mChart = (BarLineChartBase) chart; mChart.setDrawGridBackground(false); // enable scaling and dragging mChart.setDragEnabled(true); mChart.setScaleEnabled(true); // if disabled, scaling can be done on x- and y-axis separately mChart.setPinchZoom(false); YAxis leftAxis = mChart.getAxisLeft(); leftAxis.removeAllLimitLines(); // reset all limit lines to avoid overlapping lines leftAxis.setTypeface(mTf); leftAxis.setTextSize(8f); leftAxis.setTextColor(Color.DKGRAY); leftAxis.setValueFormatter(new PercentFormatter()); XAxis xAxis = mChart.getXAxis(); xAxis.setTypeface(mTf); xAxis.setPosition(XAxis.XAxisPosition.BOTTOM); xAxis.setTextSize(8f); xAxis.setTextColor(Color.DKGRAY); mChart.getAxisRight().setEnabled(false); } } protected void styleData(ChartData data) { data.setValueTypeface(mTf); data.setValueTextSize(8f); data.setValueTextColor(Color.DKGRAY); data.setValueFormatter(new PercentFormatter()); } @Override protected void onResume() { super.onResume(); // Create a RealmConfiguration that saves the Realm file in the app's "files" directory. RealmConfiguration realmConfig = new RealmConfiguration.Builder().build(); Realm.setDefaultConfiguration(realmConfig); mRealm = Realm.getDefaultInstance(); } @Override protected void onPause() { super.onPause(); mRealm.close(); } protected void writeToDB(int objectCount) { mRealm.beginTransaction(); mRealm.delete(RealmDemoData.class); for (int i = 0; i < objectCount; i++) { float value = 40f + (float) (Math.random() * 60f); RealmDemoData d = new RealmDemoData(i, value); mRealm.copyToRealm(d); } mRealm.commitTransaction(); } protected void writeToDBStack(int objectCount) { mRealm.beginTransaction(); mRealm.delete(RealmDemoData.class); for (int i = 0; i < objectCount; i++) { float val1 = 34f + (float) (Math.random() * 12.0f); float val2 = 34f + (float) (Math.random() * 12.0f); float[] stack = new float[]{val1, val2, 100 - val1 - val2}; RealmDemoData d = new RealmDemoData(i, stack); mRealm.copyToRealm(d); } mRealm.commitTransaction(); } protected void writeToDBCandle(int objectCount) { mRealm.beginTransaction(); mRealm.delete(RealmDemoData.class); for (int i = 0; i < objectCount; i++) { float mult = 50; float val = (float) (Math.random() * 40) + mult; float high = (float) (Math.random() * 9) + 8f; float low = (float) (Math.random() * 9) + 8f; float open = (float) (Math.random() * 6) + 1f; float close = (float) (Math.random() * 6) + 1f; boolean even = i % 2 == 0; RealmDemoData d = new RealmDemoData(i, val + high, val - low, even ? val + open : val - open, even ? val - close : val + close); mRealm.copyToRealm(d); } mRealm.commitTransaction(); } protected void writeToDBBubble(int objectCount) { mRealm.beginTransaction(); mRealm.delete(RealmDemoData.class); for (int i = 0; i < objectCount; i++) { float value = 30f + (float) (Math.random() * 100.0); float size = 15f + (float) (Math.random() * 20.0); RealmDemoData d = new RealmDemoData(i, value, size); mRealm.copyToRealm(d); } mRealm.commitTransaction(); } protected void writeToDBPie() { mRealm.beginTransaction(); mRealm.delete(RealmDemoData.class); float value1 = 15f + (float) (Math.random() * 8f); float value2 = 15f + (float) (Math.random() * 8f); float value3 = 15f + (float) (Math.random() * 8f); float value4 = 15f + (float) (Math.random() * 8f); float value5 = 100f - value1 - value2 - value3 - value4; float[] values = new float[]{value1, value2, value3, value4, value5}; String[] labels = new String[]{"iOS", "Android", "WP 10", "BlackBerry", "Other"}; for (int i = 0; i < values.length; i++) { RealmDemoData d = new RealmDemoData(values[i], labels[i]); mRealm.copyToRealm(d); } mRealm.commitTransaction(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.watcher.watch; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.ScriptQueryBuilder; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.core.watcher.actions.ActionRegistry; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.actions.throttler.ActionThrottler; import org.elasticsearch.xpack.core.watcher.condition.ConditionFactory; import org.elasticsearch.xpack.core.watcher.condition.ConditionRegistry; import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; import org.elasticsearch.xpack.core.watcher.input.none.NoneInput; import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.core.watcher.transform.TransformRegistry; import org.elasticsearch.xpack.core.watcher.transform.chain.ChainTransform; import org.elasticsearch.xpack.core.watcher.transform.chain.ExecutableChainTransform; import org.elasticsearch.xpack.core.watcher.trigger.Trigger; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.core.watcher.watch.WatchField; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.actions.email.EmailAction; import org.elasticsearch.xpack.watcher.actions.email.EmailActionFactory; import org.elasticsearch.xpack.watcher.actions.email.ExecutableEmailAction; import org.elasticsearch.xpack.watcher.actions.index.ExecutableIndexAction; import org.elasticsearch.xpack.watcher.actions.index.IndexAction; import org.elasticsearch.xpack.watcher.actions.index.IndexActionFactory; import org.elasticsearch.xpack.watcher.actions.logging.ExecutableLoggingAction; import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction; import org.elasticsearch.xpack.watcher.actions.logging.LoggingActionFactory; import org.elasticsearch.xpack.watcher.actions.webhook.ExecutableWebhookAction; import org.elasticsearch.xpack.watcher.actions.webhook.WebhookAction; import org.elasticsearch.xpack.watcher.actions.webhook.WebhookActionFactory; import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.common.http.HttpMethod; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.condition.AlwaysConditionTests; import org.elasticsearch.xpack.watcher.condition.ArrayCompareCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; import org.elasticsearch.xpack.watcher.input.InputBuilders; import org.elasticsearch.xpack.watcher.input.InputFactory; import org.elasticsearch.xpack.watcher.input.InputRegistry; import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; import org.elasticsearch.xpack.watcher.input.search.ExecutableSearchInput; import org.elasticsearch.xpack.watcher.input.search.SearchInput; import org.elasticsearch.xpack.watcher.input.search.SearchInputFactory; import org.elasticsearch.xpack.watcher.input.simple.ExecutableSimpleInput; import org.elasticsearch.xpack.watcher.input.simple.SimpleInput; import org.elasticsearch.xpack.watcher.input.simple.SimpleInputFactory; import org.elasticsearch.xpack.watcher.notification.email.DataAttachment; import org.elasticsearch.xpack.watcher.notification.email.EmailService; import org.elasticsearch.xpack.watcher.notification.email.EmailTemplate; import org.elasticsearch.xpack.watcher.notification.email.HtmlSanitizer; import org.elasticsearch.xpack.watcher.notification.email.Profile; import org.elasticsearch.xpack.watcher.notification.email.attachment.EmailAttachments; import org.elasticsearch.xpack.watcher.notification.email.attachment.EmailAttachmentsParser; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateService; import org.elasticsearch.xpack.watcher.test.MockTextTemplateEngine; import org.elasticsearch.xpack.watcher.test.WatcherTestUtils; import org.elasticsearch.xpack.watcher.transform.script.ExecutableScriptTransform; import org.elasticsearch.xpack.watcher.transform.script.ScriptTransform; import org.elasticsearch.xpack.watcher.transform.script.ScriptTransformFactory; import org.elasticsearch.xpack.watcher.transform.search.ExecutableSearchTransform; import org.elasticsearch.xpack.watcher.transform.search.SearchTransform; import org.elasticsearch.xpack.watcher.transform.search.SearchTransformFactory; import org.elasticsearch.xpack.watcher.trigger.TriggerEngine; import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.trigger.schedule.CronSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.DailySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.HourlySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.MonthlySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleRegistry; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEngine; import org.elasticsearch.xpack.watcher.trigger.schedule.WeeklySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.YearlySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.support.DayOfWeek; import org.elasticsearch.xpack.watcher.trigger.schedule.support.Month; import org.elasticsearch.xpack.watcher.trigger.schedule.support.MonthTimes; import org.elasticsearch.xpack.watcher.trigger.schedule.support.WeekTimes; import org.elasticsearch.xpack.watcher.trigger.schedule.support.YearTimes; import org.junit.Before; import java.io.IOException; import java.time.Clock; import java.time.Instant; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.core.TimeValue.timeValueSeconds; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.watcher.input.InputBuilders.searchInput; import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.templateRequest; import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; public class WatchTests extends ESTestCase { private ScriptService scriptService; private Client client; private HttpClient httpClient; private EmailService emailService; private TextTemplateEngine templateEngine; private HtmlSanitizer htmlSanitizer; private XPackLicenseState licenseState; private Logger logger; private Settings settings = Settings.EMPTY; private WatcherSearchTemplateService searchTemplateService; @Before public void init() throws Exception { scriptService = mock(ScriptService.class); client = mock(Client.class); httpClient = mock(HttpClient.class); emailService = mock(EmailService.class); templateEngine = mock(TextTemplateEngine.class); htmlSanitizer = mock(HtmlSanitizer.class); licenseState = mock(XPackLicenseState.class); logger = LogManager.getLogger(WatchTests.class); searchTemplateService = mock(WatcherSearchTemplateService.class); } public void testParserSelfGenerated() throws Exception { Clock clock = Clock.fixed(Instant.now(), ZoneOffset.UTC); ZonedDateTime now = clock.instant().atZone(ZoneOffset.UTC); TransformRegistry transformRegistry = transformRegistry(); boolean includeStatus = randomBoolean(); Schedule schedule = randomSchedule(); Trigger trigger = new ScheduleTrigger(schedule); ScheduleRegistry scheduleRegistry = registry(schedule); TriggerEngine<?, ?> triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, clock); TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ExecutableInput<?, ?> input = randomInput(); InputRegistry inputRegistry = registry(input.type()); ExecutableCondition condition = AlwaysConditionTests.randomCondition(scriptService); ConditionRegistry conditionRegistry = conditionRegistry(); ExecutableTransform<?, ?> transform = randomTransform(); List<ActionWrapper> actions = randomActions(); ActionRegistry actionRegistry = registry(actions, conditionRegistry, transformRegistry); Map<String, Object> metadata = singletonMap("_key", "_val"); Map<String, ActionStatus> actionsStatuses = new HashMap<>(); for (ActionWrapper action : actions) { actionsStatuses.put(action.id(), new ActionStatus(now)); } WatchStatus watchStatus = new WatchStatus(now, unmodifiableMap(actionsStatuses)); TimeValue throttlePeriod = randomBoolean() ? null : TimeValue.timeValueSeconds(randomIntBetween(5, 10000)); final long sourceSeqNo = randomNonNegativeLong(); final long sourcePrimaryTerm = randomLongBetween(1, 200); Watch watch = new Watch( "_name", trigger, input, condition, transform, throttlePeriod, actions, metadata, watchStatus, sourceSeqNo, sourcePrimaryTerm ); BytesReference bytes = BytesReference.bytes(jsonBuilder().value(watch)); logger.info("{}", bytes.utf8ToString()); WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, clock); Watch parsedWatch = watchParser.parse("_name", includeStatus, bytes, XContentType.JSON, sourceSeqNo, sourcePrimaryTerm); if (includeStatus) { assertThat(parsedWatch.status(), equalTo(watchStatus)); } assertThat(parsedWatch.trigger(), equalTo(trigger)); assertThat(parsedWatch.input(), equalTo(input)); assertThat(parsedWatch.condition(), equalTo(condition)); if (throttlePeriod != null) { assertThat(parsedWatch.throttlePeriod().millis(), equalTo(throttlePeriod.millis())); } assertThat(parsedWatch.metadata(), equalTo(metadata)); assertThat(parsedWatch.actions(), equalTo(actions)); assertThat(parsedWatch.getSourceSeqNo(), equalTo(sourceSeqNo)); assertThat(parsedWatch.getSourcePrimaryTerm(), equalTo(sourcePrimaryTerm)); } public void testThatBothStatusFieldsCanBeRead() throws Exception { InputRegistry inputRegistry = mock(InputRegistry.class); ActionRegistry actionRegistry = mock(ActionRegistry.class); // a fake trigger service that advances past the trigger end object, which cannot be done with mocking TriggerService triggerService = new TriggerService(Collections.emptySet()) { @Override public Trigger parseTrigger(String jobName, XContentParser parser) throws IOException { while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { } return new ScheduleTrigger(randomSchedule()); } }; Clock fixedClock = Clock.fixed(Instant.now(), ZoneOffset.UTC); ClockMock clock = ClockMock.frozen(); ZonedDateTime now = Instant.ofEpochMilli(fixedClock.millis()).atZone(ZoneOffset.UTC); clock.setTime(now); List<ActionWrapper> actions = randomActions(); Map<String, ActionStatus> actionsStatuses = new HashMap<>(); for (ActionWrapper action : actions) { actionsStatuses.put(action.id(), new ActionStatus(now)); } WatchStatus watchStatus = new WatchStatus(clock.instant().atZone(ZoneOffset.UTC), unmodifiableMap(actionsStatuses)); WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, clock); XContentBuilder builder = jsonBuilder().startObject().startObject("trigger").endObject().field("status", watchStatus).endObject(); Watch watch = watchParser.parse("foo", true, BytesReference.bytes(builder), XContentType.JSON, 1L, 1L); assertThat(watch.status().state().getTimestamp().toInstant().toEpochMilli(), is(clock.millis())); for (ActionWrapper action : actions) { assertThat(watch.status().actionStatus(action.id()), is(actionsStatuses.get(action.id()))); } } public void testParserBadActions() throws Exception { ClockMock clock = ClockMock.frozen(); ScheduleRegistry scheduleRegistry = registry(randomSchedule()); TriggerEngine<?, ?> triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, clock); TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); ExecutableInput<?, ?> input = randomInput(); InputRegistry inputRegistry = registry(input.type()); TransformRegistry transformRegistry = transformRegistry(); List<ActionWrapper> actions = randomActions(); ActionRegistry actionRegistry = registry(actions, conditionRegistry, transformRegistry); XContentBuilder jsonBuilder = jsonBuilder().startObject().startArray("actions").endArray().endObject(); WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, clock); try { watchParser.parse("failure", false, BytesReference.bytes(jsonBuilder), XContentType.JSON, 1L, 1L); fail("This watch should fail to parse as actions is an array"); } catch (ElasticsearchParseException pe) { assertThat(pe.getMessage().contains("could not parse actions for watch [failure]"), is(true)); } } public void testParserDefaults() throws Exception { Schedule schedule = randomSchedule(); ScheduleRegistry scheduleRegistry = registry(schedule); TriggerEngine<?, ?> triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, Clock.systemUTC()); TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); InputRegistry inputRegistry = registry(new ExecutableNoneInput().type()); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(Collections.emptyList(), conditionRegistry, transformRegistry); XContentBuilder builder = jsonBuilder(); builder.startObject(); builder.startObject(WatchField.TRIGGER.getPreferredName()).field(ScheduleTrigger.TYPE, schedule(schedule).build()).endObject(); builder.endObject(); WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); Watch watch = watchParser.parse("failure", false, BytesReference.bytes(builder), XContentType.JSON, 1L, 1L); assertThat(watch, notNullValue()); assertThat(watch.trigger(), instanceOf(ScheduleTrigger.class)); assertThat(watch.input(), instanceOf(ExecutableNoneInput.class)); assertThat(watch.condition(), instanceOf(InternalAlwaysCondition.class)); assertThat(watch.transform(), nullValue()); assertThat(watch.actions(), notNullValue()); assertThat(watch.actions().size(), is(0)); } public void testParseWatch_verifyScriptLangDefault() throws Exception { ScheduleRegistry scheduleRegistry = registry( new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.SECONDS)) ); TriggerEngine<?, ?> triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, Clock.systemUTC()); TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); InputRegistry inputRegistry = registry(SearchInput.TYPE); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(Collections.emptyList(), conditionRegistry, transformRegistry); WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); WatcherSearchTemplateService searchTemplateService = new WatcherSearchTemplateService(scriptService, xContentRegistry()); XContentBuilder builder = jsonBuilder(); builder.startObject(); builder.startObject("trigger"); builder.startObject("schedule"); builder.field("interval", "99w"); builder.endObject(); builder.endObject(); builder.startObject("input"); builder.startObject("search"); builder.startObject("request"); builder.startObject("body"); builder.startObject("query"); builder.startObject("script"); if (randomBoolean()) { builder.field("script", "return true"); } else { builder.startObject("script"); builder.field("source", "return true"); builder.endObject(); } builder.endObject(); builder.endObject(); builder.endObject(); builder.endObject(); builder.endObject(); builder.endObject(); builder.startObject("condition"); if (randomBoolean()) { builder.field("script", "return true"); } else { builder.startObject("script"); builder.field("source", "return true"); builder.endObject(); } builder.endObject(); builder.endObject(); // parse in default mode: Watch watch = watchParser.parse("_id", false, BytesReference.bytes(builder), XContentType.JSON, 1L, 1L); assertThat(((ScriptCondition) watch.condition()).getScript().getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); WatcherSearchTemplateRequest request = ((SearchInput) watch.input().input()).getRequest(); SearchRequest searchRequest = searchTemplateService.toSearchRequest(request); assertThat(((ScriptQueryBuilder) searchRequest.source().query()).script().getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); } public void testParseWatchWithoutInput() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); builder.startObject("trigger").startObject("schedule").field("interval", "99w").endObject().endObject(); builder.startObject("condition").startObject("always").endObject().endObject(); builder.startObject("actions") .startObject("logme") .startObject("logging") .field("text", "foo") .endObject() .endObject() .endObject(); builder.endObject(); WatchParser parser = createWatchparser(); Watch watch = parser.parse("_id", false, BytesReference.bytes(builder), XContentType.JSON, 1L, 1L); assertThat(watch, is(notNullValue())); assertThat(watch.input().type(), is(NoneInput.TYPE)); } } public void testParseWatchWithoutAction() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); builder.startObject("trigger").startObject("schedule").field("interval", "99w").endObject().endObject(); builder.startObject("input").startObject("simple").endObject().endObject(); builder.startObject("condition").startObject("always").endObject().endObject(); builder.endObject(); WatchParser parser = createWatchparser(); Watch watch = parser.parse("_id", false, BytesReference.bytes(builder), XContentType.JSON, 1L, 1L); assertThat(watch, is(notNullValue())); assertThat(watch.actions(), hasSize(0)); } } public void testParseWatchWithoutTriggerDoesNotWork() throws Exception { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); builder.startObject("input").startObject("simple").endObject().endObject(); builder.startObject("condition").startObject("always").endObject().endObject(); builder.startObject("actions") .startObject("logme") .startObject("logging") .field("text", "foo") .endObject() .endObject() .endObject(); builder.endObject(); WatchParser parser = createWatchparser(); ElasticsearchParseException e = expectThrows( ElasticsearchParseException.class, () -> parser.parse("_id", false, BytesReference.bytes(builder), XContentType.JSON, 1L, 1L) ); assertThat(e.getMessage(), is("could not parse watch [_id]. missing required field [trigger]")); } } private WatchParser createWatchparser() throws Exception { LoggingAction loggingAction = new LoggingAction(new TextTemplate("foo"), null, null); List<ActionWrapper> actions = Collections.singletonList( new ActionWrapper( "_logging_", randomThrottler(), null, null, new ExecutableLoggingAction(loggingAction, logger, new MockTextTemplateEngine()), null, null ) ); ScheduleRegistry scheduleRegistry = registry( new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.SECONDS)) ); TriggerEngine<?, ?> triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, Clock.systemUTC()); TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); InputRegistry inputRegistry = registry(SimpleInput.TYPE); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(actions, conditionRegistry, transformRegistry); return new WatchParser(triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); } private static Schedule randomSchedule() { String type = randomFrom( CronSchedule.TYPE, HourlySchedule.TYPE, DailySchedule.TYPE, WeeklySchedule.TYPE, MonthlySchedule.TYPE, YearlySchedule.TYPE, IntervalSchedule.TYPE ); switch (type) { case CronSchedule.TYPE: return new CronSchedule("0/5 * * * * ? *"); case HourlySchedule.TYPE: return HourlySchedule.builder().minutes(30).build(); case DailySchedule.TYPE: return DailySchedule.builder().atNoon().build(); case WeeklySchedule.TYPE: return WeeklySchedule.builder().time(WeekTimes.builder().on(DayOfWeek.FRIDAY).atMidnight()).build(); case MonthlySchedule.TYPE: return MonthlySchedule.builder().time(MonthTimes.builder().on(1).atNoon()).build(); case YearlySchedule.TYPE: return YearlySchedule.builder().time(YearTimes.builder().in(Month.JANUARY).on(1).atMidnight()).build(); default: return new IntervalSchedule(IntervalSchedule.Interval.seconds(5)); } } private static ScheduleRegistry registry(Schedule schedule) { Set<Schedule.Parser<?>> parsers = new HashSet<>(); switch (schedule.type()) { case CronSchedule.TYPE: parsers.add(new CronSchedule.Parser()); return new ScheduleRegistry(parsers); case HourlySchedule.TYPE: parsers.add(new HourlySchedule.Parser()); return new ScheduleRegistry(parsers); case DailySchedule.TYPE: parsers.add(new DailySchedule.Parser()); return new ScheduleRegistry(parsers); case WeeklySchedule.TYPE: parsers.add(new WeeklySchedule.Parser()); return new ScheduleRegistry(parsers); case MonthlySchedule.TYPE: parsers.add(new MonthlySchedule.Parser()); return new ScheduleRegistry(parsers); case YearlySchedule.TYPE: parsers.add(new YearlySchedule.Parser()); return new ScheduleRegistry(parsers); case IntervalSchedule.TYPE: parsers.add(new IntervalSchedule.Parser()); return new ScheduleRegistry(parsers); default: throw new IllegalArgumentException("unknown schedule [" + schedule + "]"); } } private ExecutableInput<?, ?> randomInput() { String type = randomFrom(SearchInput.TYPE, SimpleInput.TYPE); switch (type) { case SearchInput.TYPE: SearchInput searchInput = searchInput(WatcherTestUtils.templateRequest(searchSource(), "idx")).timeout( randomBoolean() ? null : timeValueSeconds(between(1, 10000)) ).build(); return new ExecutableSearchInput(searchInput, client, searchTemplateService, null); default: SimpleInput simpleInput = InputBuilders.simpleInput(singletonMap("_key", "_val")).build(); return new ExecutableSimpleInput(simpleInput); } } private InputRegistry registry(String inputType) { Map<String, InputFactory<?, ?, ?>> parsers = new HashMap<>(); switch (inputType) { case SearchInput.TYPE: parsers.put(SearchInput.TYPE, new SearchInputFactory(settings, client, xContentRegistry(), scriptService)); return new InputRegistry(parsers); default: parsers.put(SimpleInput.TYPE, new SimpleInputFactory()); return new InputRegistry(parsers); } } private ConditionRegistry conditionRegistry() { Map<String, ConditionFactory> parsers = new HashMap<>(); parsers.put(InternalAlwaysCondition.TYPE, (c, id, p) -> InternalAlwaysCondition.parse(id, p)); parsers.put(NeverCondition.TYPE, (c, id, p) -> NeverCondition.parse(id, p)); parsers.put(ArrayCompareCondition.TYPE, (c, id, p) -> ArrayCompareCondition.parse(c, id, p)); parsers.put(CompareCondition.TYPE, (c, id, p) -> CompareCondition.parse(c, id, p)); parsers.put(ScriptCondition.TYPE, (c, id, p) -> ScriptCondition.parse(scriptService, id, p)); return new ConditionRegistry(parsers, ClockMock.frozen()); } private ExecutableTransform<?, ?> randomTransform() { String type = randomFrom(ScriptTransform.TYPE, SearchTransform.TYPE, ChainTransform.TYPE); TimeValue timeout = randomBoolean() ? timeValueSeconds(between(1, 10000)) : null; ZoneOffset timeZone = randomBoolean() ? ZoneOffset.UTC : null; switch (type) { case ScriptTransform.TYPE: return new ExecutableScriptTransform(new ScriptTransform(mockScript("_script")), logger, scriptService); case SearchTransform.TYPE: SearchTransform transform = new SearchTransform(templateRequest(searchSource()), timeout, timeZone); return new ExecutableSearchTransform(transform, logger, client, searchTemplateService, TimeValue.timeValueMinutes(1)); default: // chain SearchTransform searchTransform = new SearchTransform(templateRequest(searchSource()), timeout, timeZone); ScriptTransform scriptTransform = new ScriptTransform(mockScript("_script")); ChainTransform chainTransform = new ChainTransform(Arrays.asList(searchTransform, scriptTransform)); return new ExecutableChainTransform( chainTransform, logger, Arrays.asList( new ExecutableSearchTransform( new SearchTransform(templateRequest(searchSource()), timeout, timeZone), logger, client, searchTemplateService, TimeValue.timeValueMinutes(1) ), new ExecutableScriptTransform(new ScriptTransform(mockScript("_script")), logger, scriptService) ) ); } } private TransformRegistry transformRegistry() { return new TransformRegistry( Map.of( ScriptTransform.TYPE, new ScriptTransformFactory(scriptService), SearchTransform.TYPE, new SearchTransformFactory(settings, client, xContentRegistry(), scriptService) ) ); } private List<ActionWrapper> randomActions() { List<ActionWrapper> list = new ArrayList<>(); if (randomBoolean()) { EmailAction action = new EmailAction( EmailTemplate.builder().build(), null, null, Profile.STANDARD, randomFrom(DataAttachment.JSON, DataAttachment.YAML), EmailAttachments.EMPTY_ATTACHMENTS ); list.add( new ActionWrapper( "_email_" + randomAlphaOfLength(8), randomThrottler(), AlwaysConditionTests.randomCondition(scriptService), randomTransform(), new ExecutableEmailAction(action, logger, emailService, templateEngine, htmlSanitizer, Collections.emptyMap()), null, null ) ); } if (randomBoolean()) { ZoneOffset timeZone = randomBoolean() ? ZoneOffset.UTC : null; TimeValue timeout = randomBoolean() ? timeValueSeconds(between(1, 10000)) : null; WriteRequest.RefreshPolicy refreshPolicy = randomBoolean() ? null : randomFrom(WriteRequest.RefreshPolicy.values()); IndexAction action = new IndexAction( "_index", randomBoolean() ? "123" : null, randomBoolean() ? DocWriteRequest.OpType.fromId(randomFrom(new Byte[] { 0, 1 })) : null, null, timeout, timeZone, refreshPolicy ); list.add( new ActionWrapper( "_index_" + randomAlphaOfLength(8), randomThrottler(), AlwaysConditionTests.randomCondition(scriptService), randomTransform(), new ExecutableIndexAction(action, logger, client, TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(30)), null, null ) ); } if (randomBoolean()) { HttpRequestTemplate httpRequest = HttpRequestTemplate.builder("test.host", randomIntBetween(8000, 9000)) .method(randomFrom(HttpMethod.GET, HttpMethod.POST, HttpMethod.PUT)) .path(new TextTemplate("_url")) .build(); WebhookAction action = new WebhookAction(httpRequest); list.add( new ActionWrapper( "_webhook_" + randomAlphaOfLength(8), randomThrottler(), AlwaysConditionTests.randomCondition(scriptService), randomTransform(), new ExecutableWebhookAction(action, logger, httpClient, templateEngine), null, null ) ); } return list; } private ActionRegistry registry(List<ActionWrapper> actions, ConditionRegistry conditionRegistry, TransformRegistry transformRegistry) { Map<String, ActionFactory> parsers = new HashMap<>(); for (ActionWrapper action : actions) { switch (action.action().type()) { case EmailAction.TYPE: parsers.put( EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, new EmailAttachmentsParser(Collections.emptyMap())) ); break; case IndexAction.TYPE: parsers.put(IndexAction.TYPE, new IndexActionFactory(settings, client)); break; case WebhookAction.TYPE: parsers.put(WebhookAction.TYPE, new WebhookActionFactory(httpClient, templateEngine)); break; case LoggingAction.TYPE: parsers.put(LoggingAction.TYPE, new LoggingActionFactory(new MockTextTemplateEngine())); break; } } return new ActionRegistry(unmodifiableMap(parsers), conditionRegistry, transformRegistry, Clock.systemUTC(), licenseState); } private ActionThrottler randomThrottler() { return new ActionThrottler(Clock.systemUTC(), randomBoolean() ? null : timeValueSeconds(randomIntBetween(1, 10000)), licenseState); } @Override protected NamedXContentRegistry xContentRegistry() { return new NamedXContentRegistry( Arrays.asList( new NamedXContentRegistry.Entry( QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent(p) ), new NamedXContentRegistry.Entry( QueryBuilder.class, new ParseField(ScriptQueryBuilder.NAME), (p, c) -> ScriptQueryBuilder.fromXContent(p) ) ) ); } public static class ParseOnlyScheduleTriggerEngine extends ScheduleTriggerEngine { public ParseOnlyScheduleTriggerEngine(ScheduleRegistry registry, Clock clock) { super(registry, clock); } @Override public void start(Collection<Watch> jobs) {} @Override public void stop() {} @Override public void add(Watch watch) {} @Override public void pauseExecution() {} @Override public boolean remove(String jobId) { return false; } } }
/* * Copyright 2014-2017 Groupon, Inc * Copyright 2014-2017 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.payment.core.janitor; import java.io.IOException; import java.math.BigDecimal; import java.util.List; import java.util.UUID; import javax.annotation.Nullable; import javax.inject.Inject; import org.joda.time.DateTime; import org.killbill.billing.account.api.AccountInternalApi; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.events.PaymentInternalEvent; import org.killbill.billing.payment.api.PluginProperty; import org.killbill.billing.payment.api.TransactionStatus; import org.killbill.billing.payment.core.PaymentPluginServiceRegistration; import org.killbill.billing.payment.core.PaymentTransactionInfoPluginConverter; import org.killbill.billing.payment.core.sm.PaymentControlStateMachineHelper; import org.killbill.billing.payment.core.sm.PaymentStateMachineHelper; import org.killbill.billing.payment.dao.PaymentDao; import org.killbill.billing.payment.dao.PaymentModelDao; import org.killbill.billing.payment.dao.PaymentTransactionModelDao; import org.killbill.billing.payment.plugin.api.PaymentPluginApi; import org.killbill.billing.payment.plugin.api.PaymentPluginStatus; import org.killbill.billing.payment.plugin.api.PaymentTransactionInfoPlugin; import org.killbill.billing.payment.provider.DefaultNoOpPaymentInfoPlugin; import org.killbill.billing.util.callcontext.CallContext; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import org.killbill.billing.util.callcontext.TenantContext; import org.killbill.billing.util.config.definition.PaymentConfig; import org.killbill.clock.Clock; import org.killbill.commons.locker.GlobalLocker; import org.killbill.commons.locker.LockFailedException; import org.killbill.notificationq.api.NotificationEvent; import org.killbill.notificationq.api.NotificationQueue; import org.skife.config.TimeSpan; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; public class IncompletePaymentTransactionTask extends CompletionTaskBase<PaymentTransactionModelDao> { private static final Logger log = LoggerFactory.getLogger(IncompletePaymentTransactionTask.class); private static final ImmutableList<TransactionStatus> TRANSACTION_STATUSES_TO_CONSIDER = ImmutableList.<TransactionStatus>builder() .add(TransactionStatus.PENDING) .add(TransactionStatus.UNKNOWN) .build(); private final PaymentPluginServiceRegistration paymentPluginServiceRegistration; @Inject public IncompletePaymentTransactionTask(final InternalCallContextFactory internalCallContextFactory, final PaymentConfig paymentConfig, final PaymentDao paymentDao, final Clock clock, final PaymentStateMachineHelper paymentStateMachineHelper, final PaymentControlStateMachineHelper retrySMHelper, final AccountInternalApi accountInternalApi, final PaymentPluginServiceRegistration paymentPluginServiceRegistration, final GlobalLocker locker) { super(internalCallContextFactory, paymentConfig, paymentDao, clock, paymentStateMachineHelper, retrySMHelper, accountInternalApi, locker); this.paymentPluginServiceRegistration = paymentPluginServiceRegistration; } @Override public Iterable<PaymentTransactionModelDao> getItemsForIteration() { // This is not triggered by Janitor proper but instead relies on bus event + notificationQ return ImmutableList.of(); } @Override public void doIteration(final PaymentTransactionModelDao paymentTransaction) { // Nothing } public void processNotification(final JanitorNotificationKey notificationKey, final UUID userToken, final Long accountRecordId, final long tenantRecordId) { try { tryToProcessNotification(notificationKey, userToken, accountRecordId, tenantRecordId); } catch (final LockFailedException e) { log.warn("Error locking accountRecordId='{}', will attempt to retry later", accountRecordId, e); final InternalTenantContext internalTenantContext = internalCallContextFactory.createInternalTenantContext(tenantRecordId, accountRecordId); final PaymentTransactionModelDao paymentTransaction = paymentDao.getPaymentTransaction(notificationKey.getUuidKey(), internalTenantContext); if (TRANSACTION_STATUSES_TO_CONSIDER.contains(paymentTransaction.getTransactionStatus())) { insertNewNotificationForUnresolvedTransactionIfNeeded(notificationKey.getUuidKey(), paymentTransaction.getTransactionStatus(), notificationKey.getAttemptNumber(), userToken, accountRecordId, tenantRecordId); } } } private void tryToProcessNotification(final JanitorNotificationKey notificationKey, final UUID userToken, final Long accountRecordId, final long tenantRecordId) throws LockFailedException { final InternalTenantContext internalTenantContext = internalCallContextFactory.createInternalTenantContext(tenantRecordId, accountRecordId); tryToDoJanitorOperationWithAccountLock(new JanitorIterationCallback() { @Override public Void doIteration() { // State may have changed since we originally retrieved with no lock final PaymentTransactionModelDao rehydratedPaymentTransaction = paymentDao.getPaymentTransaction(notificationKey.getUuidKey(), internalTenantContext); final TenantContext tenantContext = internalCallContextFactory.createTenantContext(internalTenantContext); final PaymentModelDao payment = paymentDao.getPayment(rehydratedPaymentTransaction.getPaymentId(), internalTenantContext); final PaymentTransactionInfoPlugin undefinedPaymentTransaction = new DefaultNoOpPaymentInfoPlugin(payment.getId(), rehydratedPaymentTransaction.getId(), rehydratedPaymentTransaction.getTransactionType(), rehydratedPaymentTransaction.getAmount(), rehydratedPaymentTransaction.getCurrency(), rehydratedPaymentTransaction.getCreatedDate(), rehydratedPaymentTransaction.getCreatedDate(), PaymentPluginStatus.UNDEFINED, null, null); PaymentTransactionInfoPlugin paymentTransactionInfoPlugin; try { final PaymentPluginApi paymentPluginApi = paymentPluginServiceRegistration.getPaymentPluginApi(payment.getPaymentMethodId(), false, internalTenantContext); final List<PaymentTransactionInfoPlugin> result = paymentPluginApi.getPaymentInfo(payment.getAccountId(), payment.getId(), ImmutableList.<PluginProperty>of(), tenantContext); paymentTransactionInfoPlugin = Iterables.tryFind(result, new Predicate<PaymentTransactionInfoPlugin>() { @Override public boolean apply(final PaymentTransactionInfoPlugin input) { return input.getKbTransactionPaymentId().equals(rehydratedPaymentTransaction.getId()); } }).or(new Supplier<PaymentTransactionInfoPlugin>() { @Override public PaymentTransactionInfoPlugin get() { return undefinedPaymentTransaction; } }); } catch (final Exception e) { paymentTransactionInfoPlugin = undefinedPaymentTransaction; } updatePaymentAndTransactionIfNeeded(payment, notificationKey.getAttemptNumber(), userToken, rehydratedPaymentTransaction, paymentTransactionInfoPlugin, internalTenantContext); return null; } }, internalTenantContext); } @Override public void processPaymentEvent(final PaymentInternalEvent event, final NotificationQueue janitorQueue) { if (!TRANSACTION_STATUSES_TO_CONSIDER.contains(event.getStatus())) { return; } insertNewNotificationForUnresolvedTransactionIfNeeded(event.getPaymentTransactionId(), event.getStatus(), 0, event.getUserToken(), event.getSearchKey1(), event.getSearchKey2()); } public boolean updatePaymentAndTransactionIfNeededWithAccountLock(final PaymentModelDao payment, final PaymentTransactionModelDao paymentTransaction, final PaymentTransactionInfoPlugin paymentTransactionInfoPlugin, final InternalTenantContext internalTenantContext) { // Can happen in the GET case, see PaymentProcessor#toPayment if (!TRANSACTION_STATUSES_TO_CONSIDER.contains(paymentTransaction.getTransactionStatus())) { // Nothing to do return false; } final Boolean result = doJanitorOperationWithAccountLock(new JanitorIterationCallback() { @Override public Boolean doIteration() { final PaymentTransactionModelDao refreshedPaymentTransaction = paymentDao.getPaymentTransaction(paymentTransaction.getId(), internalTenantContext); return updatePaymentAndTransactionInternal(payment, null, null, refreshedPaymentTransaction, paymentTransactionInfoPlugin, internalTenantContext); } }, internalTenantContext); return result != null && result; } private boolean updatePaymentAndTransactionIfNeeded(final PaymentModelDao payment, final int attemptNumber, final UUID userToken, final PaymentTransactionModelDao paymentTransaction, final PaymentTransactionInfoPlugin paymentTransactionInfoPlugin, final InternalTenantContext internalTenantContext) { if (!TRANSACTION_STATUSES_TO_CONSIDER.contains(paymentTransaction.getTransactionStatus())) { // Nothing to do return false; } return updatePaymentAndTransactionInternal(payment, attemptNumber, userToken, paymentTransaction, paymentTransactionInfoPlugin, internalTenantContext); } private boolean updatePaymentAndTransactionInternal(final PaymentModelDao payment, @Nullable final Integer attemptNumber, @Nullable final UUID userToken, final PaymentTransactionModelDao paymentTransaction, final PaymentTransactionInfoPlugin paymentTransactionInfoPlugin, final InternalTenantContext internalTenantContext) { final CallContext callContext = createCallContext("IncompletePaymentTransactionTask", internalTenantContext); // First obtain the new transactionStatus, // Then compute the new paymentState; this one is mostly interesting in case of success (to compute the lastSuccessPaymentState below) final TransactionStatus transactionStatus = computeNewTransactionStatusFromPaymentTransactionInfoPlugin(paymentTransactionInfoPlugin, paymentTransaction.getTransactionStatus()); final String newPaymentState; switch (transactionStatus) { case PENDING: newPaymentState = paymentStateMachineHelper.getPendingStateForTransaction(paymentTransaction.getTransactionType()); break; case SUCCESS: newPaymentState = paymentStateMachineHelper.getSuccessfulStateForTransaction(paymentTransaction.getTransactionType()); break; case PAYMENT_FAILURE: newPaymentState = paymentStateMachineHelper.getFailureStateForTransaction(paymentTransaction.getTransactionType()); break; case PLUGIN_FAILURE: newPaymentState = paymentStateMachineHelper.getErroredStateForTransaction(paymentTransaction.getTransactionType()); break; case UNKNOWN: default: if (transactionStatus != paymentTransaction.getTransactionStatus()) { log.info("Unable to repair paymentId='{}', paymentTransactionId='{}', currentTransactionStatus='{}', newTransactionStatus='{}'", payment.getId(), paymentTransaction.getId(), paymentTransaction.getTransactionStatus(), transactionStatus); } // We can't get anything interesting from the plugin... insertNewNotificationForUnresolvedTransactionIfNeeded(paymentTransaction.getId(), transactionStatus, attemptNumber, userToken, internalTenantContext.getAccountRecordId(), internalTenantContext.getTenantRecordId()); return false; } // Our status did not change, so we just insert a new notification (attemptNumber will be incremented) if (transactionStatus == paymentTransaction.getTransactionStatus()) { log.debug("Janitor IncompletePaymentTransactionTask repairing payment {}, transaction {}, transitioning transactionStatus from {} -> {}", payment.getId(), paymentTransaction.getId(), paymentTransaction.getTransactionStatus(), transactionStatus); insertNewNotificationForUnresolvedTransactionIfNeeded(paymentTransaction.getId(), transactionStatus, attemptNumber, userToken, internalTenantContext.getAccountRecordId(), internalTenantContext.getTenantRecordId()); return false; } // Recompute new lastSuccessPaymentState. This is important to be able to allow new operations on the state machine (for e.g an AUTH_SUCCESS would now allow a CAPTURE operation) final String lastSuccessPaymentState = paymentStateMachineHelper.isSuccessState(newPaymentState) ? newPaymentState : null; // Update processedAmount and processedCurrency final BigDecimal processedAmount; if (TransactionStatus.SUCCESS.equals(transactionStatus) || TransactionStatus.PENDING.equals(transactionStatus)) { if (paymentTransactionInfoPlugin == null || paymentTransactionInfoPlugin.getAmount() == null) { processedAmount = paymentTransaction.getProcessedAmount(); } else { processedAmount = paymentTransactionInfoPlugin.getAmount(); } } else { processedAmount = BigDecimal.ZERO; } final Currency processedCurrency; if (paymentTransactionInfoPlugin == null || paymentTransactionInfoPlugin.getCurrency() == null) { processedCurrency = paymentTransaction.getProcessedCurrency(); } else { processedCurrency = paymentTransactionInfoPlugin.getCurrency(); } // Update the gatewayErrorCode, gatewayError if we got a paymentTransactionInfoPlugin final String gatewayErrorCode = paymentTransactionInfoPlugin != null ? paymentTransactionInfoPlugin.getGatewayErrorCode() : paymentTransaction.getGatewayErrorCode(); final String gatewayError = paymentTransactionInfoPlugin != null ? paymentTransactionInfoPlugin.getGatewayError() : paymentTransaction.getGatewayErrorMsg(); log.info("Repairing paymentId='{}', paymentTransactionId='{}', currentTransactionStatus='{}', newTransactionStatus='{}'", payment.getId(), paymentTransaction.getId(), paymentTransaction.getTransactionStatus(), transactionStatus); final InternalCallContext internalCallContext = internalCallContextFactory.createInternalCallContext(payment.getAccountId(), callContext); paymentDao.updatePaymentAndTransactionOnCompletion(payment.getAccountId(), paymentTransaction.getAttemptId(), payment.getId(), paymentTransaction.getTransactionType(), newPaymentState, lastSuccessPaymentState, paymentTransaction.getId(), transactionStatus, processedAmount, processedCurrency, gatewayErrorCode, gatewayError, internalCallContext); return true; } // Keep the existing currentTransactionStatus if we can't obtain a better answer from the plugin; if not, return the newTransactionStatus private TransactionStatus computeNewTransactionStatusFromPaymentTransactionInfoPlugin(final PaymentTransactionInfoPlugin input, final TransactionStatus currentTransactionStatus) { final TransactionStatus newTransactionStatus = PaymentTransactionInfoPluginConverter.toTransactionStatus(input); return (newTransactionStatus != TransactionStatus.UNKNOWN) ? newTransactionStatus : currentTransactionStatus; } @VisibleForTesting DateTime getNextNotificationTime(final TransactionStatus transactionStatus, final Integer attemptNumber, final InternalTenantContext tenantContext) { final List<TimeSpan> retries; if (TransactionStatus.UNKNOWN.equals(transactionStatus)) { retries = paymentConfig.getUnknownTransactionsRetries(tenantContext); } else if (TransactionStatus.PENDING.equals(transactionStatus)) { retries = paymentConfig.getPendingTransactionsRetries(tenantContext); } else { retries = ImmutableList.of(); log.warn("Unexpected transactionStatus='{}' from janitor, ignore...", transactionStatus); } if (attemptNumber > retries.size()) { return null; } final TimeSpan nextDelay = retries.get(attemptNumber - 1); return clock.getUTCNow().plusMillis((int) nextDelay.getMillis()); } private void insertNewNotificationForUnresolvedTransactionIfNeeded(final UUID paymentTransactionId, final TransactionStatus transactionStatus, @Nullable final Integer attemptNumber, @Nullable final UUID userToken, final Long accountRecordId, final Long tenantRecordId) { // When we come from a GET path, we don't want to insert a new notification if (attemptNumber == null) { return; } final InternalTenantContext tenantContext = internalCallContextFactory.createInternalTenantContext(tenantRecordId, accountRecordId); // Increment value before we insert final Integer newAttemptNumber = attemptNumber.intValue() + 1; final NotificationEvent key = new JanitorNotificationKey(paymentTransactionId, IncompletePaymentTransactionTask.class.toString(), newAttemptNumber); final DateTime notificationTime = getNextNotificationTime(transactionStatus, newAttemptNumber, tenantContext); // Will be null in the GET path or when we run out opf attempts.. if (notificationTime != null) { try { janitorQueue.recordFutureNotification(notificationTime, key, userToken, accountRecordId, tenantRecordId); } catch (IOException e) { log.warn("Janitor IncompletePaymentTransactionTask : Failed to insert future notification for paymentTransactionId = {}: {}", paymentTransactionId, e.getMessage()); } } } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.phpmaven.project.impl; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.URI; import java.net.URL; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.JarInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.apache.maven.execution.MavenSession; import org.apache.maven.plugin.logging.Log; import org.apache.maven.wagon.PathUtils; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.codehaus.plexus.configuration.PlexusConfigurationException; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.phpmaven.core.IComponentFactory; import org.phpmaven.phar.IPharPackagerConfiguration; import org.phpmaven.phpexec.library.PhpException; import com.google.common.base.Preconditions; /** * Static utilities for file handling. * * @author Christian Wiedemann * @author Tobias Sarnowski */ public final class FileHelper { private FileHelper() { // we only have static methods } /** * Copies over a file from the sourceDirectory to the targetDirectory preserving its relative subdirectories. * * @param sourceDirectory where the main source directory is * @param targetDirectory where the target directory is * @param sourceFile which file to copy to the target directory * @param forceOverwrite if timestamps should be ignored * @throws IOException if something goes wrong while copying */ public static void copyToFolder(File sourceDirectory, File targetDirectory, File sourceFile, boolean forceOverwrite) throws IOException { final String relativeFile = PathUtils.toRelative( sourceDirectory.getAbsoluteFile(), sourceFile.getAbsolutePath() ); final File targetFile = new File(targetDirectory, relativeFile); if (forceOverwrite) { FileUtils.copyFile(sourceFile, targetFile); } else { FileUtils.copyFileIfModified(sourceFile, targetFile); } } /** * Unzips all files to the given directory (using jar). * * @param log Logging * @param targetDirectory where to unpack the files to * @param elements list of files to unpack * @param factory component factory * @param session maven session * @throws IOException if something goes wrong while copying */ public static void unzipElements(Log log, File targetDirectory, List<String> elements, IComponentFactory factory, MavenSession session) throws IOException { Preconditions.checkArgument( !targetDirectory.exists() || targetDirectory.isDirectory(), "Destination Directory"); targetDirectory.mkdirs(); if (!targetDirectory.exists()) { throw new IllegalStateException("Could not create target directory " + targetDirectory.getAbsolutePath()); } log.debug(elements.toString()); for (String element : elements) { log.debug("unpacking " + element); final File sourceFile = new File(element); if (sourceFile.isFile()) { final int pos = sourceFile.getName().lastIndexOf('.'); String extension = sourceFile.getName(); if (pos != -1) { extension = extension.substring(pos + 1); } if ("jar".equals(extension)) { // for backward compatibility to phpmaven1; there we build jar instead of phar unjar(log, sourceFile, targetDirectory); } else if ("phar".equals(extension)) { unphar(log, targetDirectory, factory, session, sourceFile); } else if ("zip".equals(extension)) { // although jar and zips are compatible to each other this is a implementation detail of jvm. // we should not depend on it. so let us divide it. unzip(log, sourceFile, targetDirectory); } else { throw new IOException("Unknown archive format. Unable to extract " + sourceFile.getAbsolutePath()); } } } } /** * Unphar given file to destination directory. * * @param log Logging * @param targetDirectory where to unpack the files to * @param factory component factory * @param session maven session * @param sourceFile the jar source file * @throws IOException if something goes wrong while copying */ public static void unphar(Log log, File targetDirectory, IComponentFactory factory, final MavenSession session, final File sourceFile) throws IOException { log.debug("unphar " + sourceFile.getAbsolutePath()); try { final IPharPackagerConfiguration config = factory.lookup( IPharPackagerConfiguration.class, IComponentFactory.EMPTY_CONFIG, session); config.getPharPackager().extractPharTo(sourceFile, targetDirectory, log); } catch (ComponentLookupException e) { throw new IOException( "Error while execution unphar script. Unable to extract " + sourceFile.getAbsolutePath(), e); } catch (PlexusConfigurationException e) { throw new IOException( "Error while execution unphar script. Unable to extract " + sourceFile.getAbsolutePath(), e); } catch (PhpException e) { throw new IOException( "Error while execution unphar script. Unable to extract " + sourceFile.getAbsolutePath(), e); } } /** * Unpacks a jar file. * * @param log Logging * @param jarFile the jar file * @param destDir the destination directory * @throws IOException if something goes wrong */ public static void unjar(Log log, File jarFile, File destDir) throws IOException { Preconditions.checkNotNull(jarFile, "JarFile"); final JarFile jar = new JarFile(jarFile); log.debug("unjar " + jarFile.getAbsolutePath()); final Enumeration<JarEntry> items = jar.entries(); while (items.hasMoreElements()) { final JarEntry entry = items.nextElement(); unpackJarEntry(entry, jar.getInputStream(entry), destDir); } } /** * Unpacks a zip file. * * @param log Logging * @param zipFile the zip file * @param destDir the destination directory * @throws IOException if something goes wrong */ public static void unzip(Log log, File zipFile, File destDir) throws IOException { Preconditions.checkNotNull(zipFile, "ZipFile"); final ZipFile zip = new ZipFile(zipFile); log.debug("unzip " + zipFile.getAbsolutePath()); final Enumeration<? extends ZipEntry> items = zip.entries(); while (items.hasMoreElements()) { final ZipEntry entry = items.nextElement(); unpackZipEntry(entry, zip.getInputStream(entry), destDir); } } /** * Unpacks a jar URI. * * @param jarUri the jar uri * @param destDir the destination directory * @throws IOException if something goes wrong */ public static void unjar(URI jarUri, File destDir) throws IOException { Preconditions.checkNotNull(jarUri, "JarFile"); unjar(jarUri.toURL().openStream(), destDir); } /** * Unpacks a jar stream. * * @param inputStream the jar stream * @param destDir the destination directory * @throws IOException if something goes wrong */ public static void unjar(InputStream inputStream, File destDir) throws IOException { Preconditions.checkNotNull(inputStream, "InputStream"); final JarInputStream jarInputStream = new JarInputStream(inputStream); while (true) { final JarEntry entry = jarInputStream.getNextJarEntry(); if (entry == null) { break; } unpackJarEntry(entry, jarInputStream, destDir); } } /** * Unpacks a single jar entry. * * @param jarEntry the jar entry * @param jarEntryInputStream the source stream of the entry * @param destDir the destination directory * @throws IOException if something goes wrong */ public static void unpackJarEntry(JarEntry jarEntry, InputStream jarEntryInputStream, File destDir) throws IOException { Preconditions.checkNotNull(jarEntry, "JarEntry"); Preconditions.checkNotNull(jarEntryInputStream, "JarEntryInputStream"); Preconditions.checkNotNull(destDir, "Destination Directory"); Preconditions.checkArgument(!destDir.exists() || destDir.isDirectory(), "Destination Directory"); unpackZipEntry(jarEntry, jarEntryInputStream, destDir); } /** * Unpacks a single zip entry. * * @param zipEntry the zip entry * @param zipEntryInputStream the source stream of the entry * @param destDir the destination directory * @throws IOException if something goes wrong */ public static void unpackZipEntry(ZipEntry zipEntry, InputStream zipEntryInputStream, File destDir) throws IOException { Preconditions.checkNotNull(zipEntry, "ZipEntry"); Preconditions.checkNotNull(zipEntryInputStream, "ZipEntryInputStream"); Preconditions.checkNotNull(destDir, "Destination Directory"); Preconditions.checkArgument(!destDir.exists() || destDir.isDirectory(), "Destination Directory"); // final name final File destFile = new File(destDir, zipEntry.getName()); // already there if (destFile.exists()) { return; } // just a directory to create if (zipEntry.isDirectory()) { destFile.mkdirs(); return; } else { // ensure parent dir exists destFile.getParentFile().mkdirs(); } OutputStream out = null; try { out = new BufferedOutputStream(new FileOutputStream(destFile)); IOUtil.copy(new BufferedInputStream(zipEntryInputStream), out); } finally { if (out != null) out.close(); } } /** * Reads an url to string; should only be used for non-blocking connections * (f.e. jar file contents and other things). * * @param url the url to be read * @return the results * @throws IOException thrown on problems while reading. */ public static String readUrl(final URL url) throws IOException { final InputStream stream = url.openStream(); final BufferedReader in = new BufferedReader(new InputStreamReader(stream)); String inputLine; final StringBuffer result = new StringBuffer(); while ((inputLine = in.readLine()) != null) { if (result.length() > 0) { result.append("\n"); } result.append(inputLine); } in.close(); return result.toString(); } /** * Resolve a list of file wildcard expressions. * * @param fileList List of strings with filenames/wildcard expressions * @param baseDir the base folder to run the wildcards on * @param caseSensitiveMatch true if the wildcards should be run case sensitive * @return List of matching file names */ public static String[] getWildcardMatches(String[] fileList, File baseDir, boolean caseSensitiveMatch) { final DirectoryScanner scanner = new DirectoryScanner(); scanner.setIncludes(fileList); scanner.setBasedir(baseDir); scanner.setCaseSensitive(caseSensitiveMatch); scanner.scan(); return scanner.getIncludedFiles(); } /** * Counts the number of files within a directory. * @param dir directory * @return number of files */ public static int countFiles(File dir) { int result = 0; for (final File file : dir.listFiles()) { if (file.isFile()) { result ++; } else { result += countFiles(file); } } return result; } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.groupby; import java.util.List; import java.util.Map; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Counter; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInjectionInterface; import org.pentaho.di.trans.step.StepMetaInterface; import org.w3c.dom.Node; /** * Created on 02-jun-2003 * */ public class GroupByMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = GroupByMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public static final int TYPE_GROUP_NONE = 0; public static final int TYPE_GROUP_SUM = 1; public static final int TYPE_GROUP_AVERAGE = 2; public static final int TYPE_GROUP_MIN = 3; public static final int TYPE_GROUP_MAX = 4; public static final int TYPE_GROUP_COUNT_ALL = 5; public static final int TYPE_GROUP_CONCAT_COMMA = 6; public static final int TYPE_GROUP_FIRST = 7; public static final int TYPE_GROUP_LAST = 8; public static final int TYPE_GROUP_FIRST_INCL_NULL = 9; public static final int TYPE_GROUP_LAST_INCL_NULL = 10; public static final int TYPE_GROUP_CUMULATIVE_SUM = 11; public static final int TYPE_GROUP_CUMULATIVE_AVERAGE = 12; public static final int TYPE_GROUP_STANDARD_DEVIATION = 13; public static final int TYPE_GROUP_CONCAT_STRING = 14; public static final int TYPE_GROUP_COUNT_DISTINCT = 15; public static final int TYPE_GROUP_COUNT_ANY = 16; public static final String typeGroupCode[] = /* WARNING: DO NOT TRANSLATE THIS. WE ARE SERIOUS, DON'T TRANSLATE! */ { "-", "SUM", "AVERAGE", "MIN", "MAX", "COUNT_ALL", "CONCAT_COMMA", "FIRST", "LAST", "FIRST_INCL_NULL", "LAST_INCL_NULL", "CUM_SUM", "CUM_AVG", "STD_DEV","CONCAT_STRING", "COUNT_DISTINCT", "COUNT_ANY", }; public static final String typeGroupLongDesc[] = { "-", BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.SUM"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.AVERAGE"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.MIN"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.MAX"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.CONCAT_ALL"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.CONCAT_COMMA"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.FIRST"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.LAST"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.FIRST_INCL_NULL"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.LAST_INCL_NULL"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.CUMUMALTIVE_SUM"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.CUMUMALTIVE_AVERAGE"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.STANDARD_DEVIATION"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.CONCAT_STRING"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.COUNT_DISTINCT"), BaseMessages.getString(PKG, "GroupByMeta.TypeGroupLongDesc.COUNT_ANY"), }; /** All rows need to pass, adding an extra row at the end of each group/block. */ private boolean passAllRows; /** Directory to store the temp files */ private String directory; /** Temp files prefix... */ private String prefix; /** Indicate that some rows don't need to be considered : TODO: make work in GUI & worker */ private boolean aggregateIgnored; /** name of the boolean field that indicates we need to ignore the row : TODO: make work in GUI & worker */ private String aggregateIgnoredField; /** Fields to group over */ private String groupField[]; /** Name of aggregate field */ private String aggregateField[]; /** Field name to group over */ private String subjectField[]; /** Type of aggregate */ private int aggregateType[]; /** Value to use as separator for ex */ private String valueField[]; /** Add a linenr in the group, resetting to 0 in a new group. */ private boolean addingLineNrInGroup; /** The fieldname that will contain the added integer field */ private String lineNrInGroupField; /** Flag to indicate that we always give back one row. Defaults to true for existing transformations. */ private boolean alwaysGivingBackOneRow; public GroupByMeta() { super(); // allocate BaseStepMeta } /** * @return Returns the aggregateField. */ public String[] getAggregateField() { return aggregateField; } /** * @param aggregateField The aggregateField to set. */ public void setAggregateField(String[] aggregateField) { this.aggregateField = aggregateField; } /** * @return Returns the aggregateIgnored. */ public boolean isAggregateIgnored() { return aggregateIgnored; } /** * @param aggregateIgnored The aggregateIgnored to set. */ public void setAggregateIgnored(boolean aggregateIgnored) { this.aggregateIgnored = aggregateIgnored; } /** * @return Returns the aggregateIgnoredField. */ public String getAggregateIgnoredField() { return aggregateIgnoredField; } /** * @param aggregateIgnoredField The aggregateIgnoredField to set. */ public void setAggregateIgnoredField(String aggregateIgnoredField) { this.aggregateIgnoredField = aggregateIgnoredField; } /** * @return Returns the aggregateType. */ public int[] getAggregateType() { return aggregateType; } /** * @param aggregateType The aggregateType to set. */ public void setAggregateType(int[] aggregateType) { this.aggregateType = aggregateType; } /** * @return Returns the groupField. */ public String[] getGroupField() { return groupField; } /** * @param groupField The groupField to set. */ public void setGroupField(String[] groupField) { this.groupField = groupField; } /** * @return Returns the passAllRows. */ public boolean passAllRows() { return passAllRows; } /** * @param passAllRows The passAllRows to set. */ public void setPassAllRows(boolean passAllRows) { this.passAllRows = passAllRows; } /** * @return Returns the subjectField. */ public String[] getSubjectField() { return subjectField; } /** * @param subjectField The subjectField to set. */ public void setSubjectField(String[] subjectField) { this.subjectField = subjectField; } /** * @return Returns the valueField. */ public String[] getValueField() { return valueField; } /** * @param separatorField The valueField to set. */ public void setValueField(String[] valueField) { this.valueField = valueField; } public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException { readData(stepnode); } public void allocate(int sizegroup, int nrfields) { groupField = new String[sizegroup]; aggregateField = new String[nrfields]; subjectField = new String[nrfields]; aggregateType = new int[nrfields]; valueField= new String[nrfields]; } public Object clone() { Object retval = super.clone(); return retval; } private void readData(Node stepnode) throws KettleXMLException { try { passAllRows = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "all_rows")); //$NON-NLS-1$ //$NON-NLS-2$ aggregateIgnored = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "ignore_aggregate")); //$NON-NLS-1$ //$NON-NLS-2$ aggregateIgnoredField = XMLHandler.getTagValue(stepnode, "field_ignore"); //$NON-NLS-1$ directory = XMLHandler.getTagValue(stepnode, "directory"); //$NON-NLS-1$ prefix = XMLHandler.getTagValue(stepnode, "prefix"); //$NON-NLS-1$ addingLineNrInGroup = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "add_linenr")); // $NON-NLS-1$ lineNrInGroupField = XMLHandler.getTagValue(stepnode, "linenr_fieldname"); Node groupn = XMLHandler.getSubNode(stepnode, "group"); //$NON-NLS-1$ Node fields = XMLHandler.getSubNode(stepnode, "fields"); //$NON-NLS-1$ int sizegroup = XMLHandler.countNodes(groupn, "field"); //$NON-NLS-1$ int nrfields = XMLHandler.countNodes(fields, "field"); //$NON-NLS-1$ allocate(sizegroup, nrfields); for (int i=0;i<sizegroup;i++) { Node fnode = XMLHandler.getSubNodeByNr(groupn, "field", i); //$NON-NLS-1$ groupField[i] = XMLHandler.getTagValue(fnode, "name"); //$NON-NLS-1$ } boolean hasNumberOfValues = false; for (int i=0;i<nrfields;i++) { Node fnode = XMLHandler.getSubNodeByNr(fields, "field", i); //$NON-NLS-1$ aggregateField[i] = XMLHandler.getTagValue(fnode, "aggregate"); //$NON-NLS-1$ subjectField[i] = XMLHandler.getTagValue(fnode, "subject"); //$NON-NLS-1$ aggregateType[i] = getType(XMLHandler.getTagValue(fnode, "type")); //$NON-NLS-1$ if (aggregateType[i]==TYPE_GROUP_COUNT_ALL || aggregateType[i]==TYPE_GROUP_COUNT_DISTINCT || aggregateType[i]==TYPE_GROUP_COUNT_ANY) { hasNumberOfValues = true; } valueField[i] = XMLHandler.getTagValue(fnode, "valuefield"); } String giveBackRow = XMLHandler.getTagValue(stepnode, "give_back_row"); // $NON-NLS-1$ if (Const.isEmpty(giveBackRow)) { alwaysGivingBackOneRow = hasNumberOfValues; } else { alwaysGivingBackOneRow = "Y".equalsIgnoreCase( giveBackRow ); // $NON-NLS-1$ } } catch(Exception e) { throw new KettleXMLException(BaseMessages.getString(PKG, "GroupByMeta.Exception.UnableToLoadStepInfoFromXML"), e); //$NON-NLS-1$ } } public static final int getType(String desc) { for (int i=0;i<typeGroupCode.length;i++) { if (typeGroupCode[i].equalsIgnoreCase(desc)) return i; } for (int i=0;i<typeGroupLongDesc.length;i++) { if (typeGroupLongDesc[i].equalsIgnoreCase(desc)) return i; } return 0; } public static final String getTypeDesc(int i) { if (i<0 || i>=typeGroupCode.length) return null; return typeGroupCode[i]; } public static final String getTypeDescLong(int i) { if (i<0 || i>=typeGroupLongDesc.length) return null; return typeGroupLongDesc[i]; } public void setDefault() { directory="%%java.io.tmpdir%%"; //$NON-NLS-1$ prefix="grp"; //$NON-NLS-1$ passAllRows = false; aggregateIgnored = false; aggregateIgnoredField = null; int sizegroup= 0; int nrfields = 0; allocate( sizegroup, nrfields ); } public void getFields(RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) { // re-assemble a new row of metadata // RowMetaInterface fields = new RowMeta(); if (!passAllRows) { // Add the grouping fields in the correct order... // for (int i=0;i<groupField.length;i++) { ValueMetaInterface valueMeta = r.searchValueMeta(groupField[i]); if (valueMeta!=null) { fields.addValueMeta(valueMeta); } } } else { // Add all the original fields from the incoming row meta // fields.addRowMeta(r); } // Re-add aggregates // for (int i=0;i<subjectField.length;i++) { ValueMetaInterface subj = r.searchValueMeta(subjectField[i]); if (subj!=null || aggregateType[i]==TYPE_GROUP_COUNT_ANY) { String value_name = aggregateField[i]; int value_type = ValueMetaInterface.TYPE_NONE; int length = -1; int precision = -1; switch(aggregateType[i]) { case TYPE_GROUP_SUM : case TYPE_GROUP_AVERAGE : case TYPE_GROUP_CUMULATIVE_SUM : case TYPE_GROUP_CUMULATIVE_AVERAGE : case TYPE_GROUP_FIRST : case TYPE_GROUP_LAST : case TYPE_GROUP_FIRST_INCL_NULL : case TYPE_GROUP_LAST_INCL_NULL : case TYPE_GROUP_MIN : case TYPE_GROUP_MAX : value_type = subj.getType(); break; case TYPE_GROUP_COUNT_DISTINCT : case TYPE_GROUP_COUNT_ANY : case TYPE_GROUP_COUNT_ALL : value_type = ValueMetaInterface.TYPE_INTEGER; break; case TYPE_GROUP_CONCAT_COMMA : value_type = ValueMetaInterface.TYPE_STRING; break; case TYPE_GROUP_STANDARD_DEVIATION : value_type = ValueMetaInterface.TYPE_NUMBER; break; case TYPE_GROUP_CONCAT_STRING : value_type = ValueMetaInterface.TYPE_STRING; break; default: break; } // Change type from integer to number in case off averages for cumulative average // if (aggregateType[i]==TYPE_GROUP_CUMULATIVE_AVERAGE && value_type==ValueMetaInterface.TYPE_INTEGER) { value_type = ValueMetaInterface.TYPE_NUMBER; precision=-1; length=-1; } else if (aggregateType[i]==TYPE_GROUP_COUNT_ALL || aggregateType[i]==TYPE_GROUP_COUNT_DISTINCT || aggregateType[i]==TYPE_GROUP_COUNT_ANY ) { length = ValueMetaInterface.DEFAULT_INTEGER_LENGTH; precision = 0; } // If it ain't numeric, we change it to Number // else if (aggregateType[i]==TYPE_GROUP_SUM && value_type!=ValueMetaInterface.TYPE_INTEGER && value_type!=ValueMetaInterface.TYPE_NUMBER && value_type!=ValueMetaInterface.TYPE_BIGNUMBER) { value_type = ValueMetaInterface.TYPE_NUMBER; precision=-1; length=-1; } if (value_type != ValueMetaInterface.TYPE_NONE) { ValueMetaInterface v = new ValueMeta(value_name, value_type); v.setOrigin(origin); v.setLength(length, precision); fields.addValueMeta(v); } } } if (passAllRows) { // If we pass all rows, we can add a line nr in the group... if (addingLineNrInGroup && !Const.isEmpty(lineNrInGroupField)) { ValueMetaInterface lineNr = new ValueMeta(lineNrInGroupField, ValueMetaInterface.TYPE_INTEGER); lineNr.setLength(ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0); lineNr.setOrigin(origin); fields.addValueMeta(lineNr); } } // Now that we have all the fields we want, we should clear the original row and replace the values... // r.clear(); r.addRowMeta(fields); } public String getXML() { StringBuffer retval = new StringBuffer(500); retval.append(" ").append(XMLHandler.addTagValue("all_rows", passAllRows)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("ignore_aggregate", aggregateIgnored)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("field_ignore", aggregateIgnoredField)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("directory", directory)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("prefix", prefix)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("add_linenr", addingLineNrInGroup)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("linenr_fieldname", lineNrInGroupField)); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("give_back_row", alwaysGivingBackOneRow)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" <group>").append(Const.CR); //$NON-NLS-1$ for (int i=0;i<groupField.length;i++) { retval.append(" <field>").append(Const.CR); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("name", groupField[i])); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" </field>").append(Const.CR); //$NON-NLS-1$ } retval.append(" </group>").append(Const.CR); //$NON-NLS-1$ retval.append(" <fields>").append(Const.CR); //$NON-NLS-1$ for (int i=0;i<subjectField.length;i++) { retval.append(" <field>").append(Const.CR); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("aggregate", aggregateField[i])); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("subject", subjectField[i])); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("type", getTypeDesc(aggregateType[i]))); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("valuefield", valueField[i])); retval.append(" </field>").append(Const.CR); //$NON-NLS-1$ } retval.append(" </fields>").append(Const.CR); //$NON-NLS-1$ return retval.toString(); } public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { passAllRows = rep.getStepAttributeBoolean(id_step, "all_rows"); //$NON-NLS-1$ aggregateIgnored = rep.getStepAttributeBoolean(id_step, "ignore_aggregate"); //$NON-NLS-1$ aggregateIgnoredField = rep.getStepAttributeString (id_step, "field_ignore"); //$NON-NLS-1$ directory = rep.getStepAttributeString (id_step, "directory"); //$NON-NLS-1$ prefix = rep.getStepAttributeString (id_step, "prefix"); //$NON-NLS-1$ addingLineNrInGroup = rep.getStepAttributeBoolean(id_step, "add_linenr"); // $NON-NLS-1$ lineNrInGroupField = rep.getStepAttributeString(id_step, "linenr_fieldname"); // $NON-NLS-1$ int groupsize = rep.countNrStepAttributes(id_step, "group_name"); //$NON-NLS-1$ int nrvalues = rep.countNrStepAttributes(id_step, "aggregate_name"); //$NON-NLS-1$ allocate(groupsize, nrvalues); for (int i=0;i<groupsize;i++) { groupField[i] = rep.getStepAttributeString(id_step, i, "group_name"); //$NON-NLS-1$ } boolean hasNumberOfValues = false; for (int i=0;i<nrvalues;i++) { aggregateField[i] = rep.getStepAttributeString(id_step, i, "aggregate_name"); //$NON-NLS-1$ subjectField[i] = rep.getStepAttributeString(id_step, i, "aggregate_subject"); //$NON-NLS-1$ aggregateType[i] = getType( rep.getStepAttributeString(id_step, i, "aggregate_type") ); //$NON-NLS-1$ if (aggregateType[i]==TYPE_GROUP_COUNT_ALL || aggregateType[i]==TYPE_GROUP_COUNT_DISTINCT || aggregateType[i]==TYPE_GROUP_COUNT_ANY) { hasNumberOfValues = true; } valueField[i] = rep.getStepAttributeString(id_step, i, "aggregate_value_field"); //$NON-NLS-1$ } alwaysGivingBackOneRow = rep.getStepAttributeBoolean(id_step, 0, "give_back_row", hasNumberOfValues); // $NON-NLS-1$ } catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "GroupByMeta.Exception.UnexpectedErrorInReadingStepInfoFromRepository"), e); //$NON-NLS-1$ } } public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "all_rows", passAllRows); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "ignore_aggregate", aggregateIgnored); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "field_ignore", aggregateIgnoredField); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "directory", directory); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "prefix", prefix); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "add_linenr", addingLineNrInGroup); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "linenr_fieldname", lineNrInGroupField); // $NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "give_back_row", alwaysGivingBackOneRow); // $NON-NLS-1$ for (int i=0;i<groupField.length;i++) { rep.saveStepAttribute(id_transformation, id_step, i, "group_name", groupField[i]); //$NON-NLS-1$ } for (int i=0;i<subjectField.length;i++) { rep.saveStepAttribute(id_transformation, id_step, i, "aggregate_name", aggregateField[i]); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "aggregate_subject", subjectField[i]); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "aggregate_type", getTypeDesc(aggregateType[i])); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "aggregate_value_field", valueField[i]); } } catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "GroupByMeta.Exception.UnableToSaveStepInfoToRepository")+id_step, e); //$NON-NLS-1$ } } public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) { CheckResult cr; if (input.length>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "GroupByMeta.CheckResult.ReceivingInfoOK"), stepMeta); //$NON-NLS-1$ remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "GroupByMeta.CheckResult.NoInputError"), stepMeta); //$NON-NLS-1$ remarks.add(cr); } } public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans) { return new GroupBy(stepMeta, stepDataInterface, cnr, transMeta, trans); } public StepDataInterface getStepData() { return new GroupByData(); } /** * @return Returns the directory. */ public String getDirectory() { return directory; } /** * @param directory The directory to set. */ public void setDirectory(String directory) { this.directory = directory; } /** * @return Returns the prefix. */ public String getPrefix() { return prefix; } /** * @param prefix The prefix to set. */ public void setPrefix(String prefix) { this.prefix = prefix; } /** * @return the addingLineNrInGroup */ public boolean isAddingLineNrInGroup() { return addingLineNrInGroup; } /** * @param addingLineNrInGroup the addingLineNrInGroup to set */ public void setAddingLineNrInGroup(boolean addingLineNrInGroup) { this.addingLineNrInGroup = addingLineNrInGroup; } /** * @return the lineNrInGroupField */ public String getLineNrInGroupField() { return lineNrInGroupField; } /** * @param lineNrInGroupField the lineNrInGroupField to set */ public void setLineNrInGroupField(String lineNrInGroupField) { this.lineNrInGroupField = lineNrInGroupField; } /** * @return the alwaysGivingBackOneRow */ public boolean isAlwaysGivingBackOneRow() { return alwaysGivingBackOneRow; } /** * @param alwaysGivingBackOneRow the alwaysGivingBackOneRow to set */ public void setAlwaysGivingBackOneRow(boolean alwaysGivingBackOneRow) { this.alwaysGivingBackOneRow = alwaysGivingBackOneRow; } @Override public StepMetaInjectionInterface getStepMetaInjectionInterface() { return new GroupByMetaInjection(this); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * An object representing a container instance host device. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ecs-2014-11-13/Device" target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Device implements Serializable, Cloneable, StructuredPojo { /** * <p> * The path for the device on the host container instance. * </p> */ private String hostPath; /** * <p> * The path inside the container at which to expose the host device. * </p> */ private String containerPath; /** * <p> * The explicit permissions to provide to the container for the device. By default, the container has permissions * for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * </p> */ private com.amazonaws.internal.SdkInternalList<String> permissions; /** * <p> * The path for the device on the host container instance. * </p> * * @param hostPath * The path for the device on the host container instance. */ public void setHostPath(String hostPath) { this.hostPath = hostPath; } /** * <p> * The path for the device on the host container instance. * </p> * * @return The path for the device on the host container instance. */ public String getHostPath() { return this.hostPath; } /** * <p> * The path for the device on the host container instance. * </p> * * @param hostPath * The path for the device on the host container instance. * @return Returns a reference to this object so that method calls can be chained together. */ public Device withHostPath(String hostPath) { setHostPath(hostPath); return this; } /** * <p> * The path inside the container at which to expose the host device. * </p> * * @param containerPath * The path inside the container at which to expose the host device. */ public void setContainerPath(String containerPath) { this.containerPath = containerPath; } /** * <p> * The path inside the container at which to expose the host device. * </p> * * @return The path inside the container at which to expose the host device. */ public String getContainerPath() { return this.containerPath; } /** * <p> * The path inside the container at which to expose the host device. * </p> * * @param containerPath * The path inside the container at which to expose the host device. * @return Returns a reference to this object so that method calls can be chained together. */ public Device withContainerPath(String containerPath) { setContainerPath(containerPath); return this; } /** * <p> * The explicit permissions to provide to the container for the device. By default, the container has permissions * for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * </p> * * @return The explicit permissions to provide to the container for the device. By default, the container has * permissions for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * @see DeviceCgroupPermission */ public java.util.List<String> getPermissions() { if (permissions == null) { permissions = new com.amazonaws.internal.SdkInternalList<String>(); } return permissions; } /** * <p> * The explicit permissions to provide to the container for the device. By default, the container has permissions * for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * </p> * * @param permissions * The explicit permissions to provide to the container for the device. By default, the container has * permissions for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * @see DeviceCgroupPermission */ public void setPermissions(java.util.Collection<String> permissions) { if (permissions == null) { this.permissions = null; return; } this.permissions = new com.amazonaws.internal.SdkInternalList<String>(permissions); } /** * <p> * The explicit permissions to provide to the container for the device. By default, the container has permissions * for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setPermissions(java.util.Collection)} or {@link #withPermissions(java.util.Collection)} if you want to * override the existing values. * </p> * * @param permissions * The explicit permissions to provide to the container for the device. By default, the container has * permissions for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * @return Returns a reference to this object so that method calls can be chained together. * @see DeviceCgroupPermission */ public Device withPermissions(String... permissions) { if (this.permissions == null) { setPermissions(new com.amazonaws.internal.SdkInternalList<String>(permissions.length)); } for (String ele : permissions) { this.permissions.add(ele); } return this; } /** * <p> * The explicit permissions to provide to the container for the device. By default, the container has permissions * for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * </p> * * @param permissions * The explicit permissions to provide to the container for the device. By default, the container has * permissions for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * @return Returns a reference to this object so that method calls can be chained together. * @see DeviceCgroupPermission */ public Device withPermissions(java.util.Collection<String> permissions) { setPermissions(permissions); return this; } /** * <p> * The explicit permissions to provide to the container for the device. By default, the container has permissions * for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * </p> * * @param permissions * The explicit permissions to provide to the container for the device. By default, the container has * permissions for <code>read</code>, <code>write</code>, and <code>mknod</code> for the device. * @return Returns a reference to this object so that method calls can be chained together. * @see DeviceCgroupPermission */ public Device withPermissions(DeviceCgroupPermission... permissions) { com.amazonaws.internal.SdkInternalList<String> permissionsCopy = new com.amazonaws.internal.SdkInternalList<String>(permissions.length); for (DeviceCgroupPermission value : permissions) { permissionsCopy.add(value.toString()); } if (getPermissions() == null) { setPermissions(permissionsCopy); } else { getPermissions().addAll(permissionsCopy); } return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getHostPath() != null) sb.append("HostPath: ").append(getHostPath()).append(","); if (getContainerPath() != null) sb.append("ContainerPath: ").append(getContainerPath()).append(","); if (getPermissions() != null) sb.append("Permissions: ").append(getPermissions()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Device == false) return false; Device other = (Device) obj; if (other.getHostPath() == null ^ this.getHostPath() == null) return false; if (other.getHostPath() != null && other.getHostPath().equals(this.getHostPath()) == false) return false; if (other.getContainerPath() == null ^ this.getContainerPath() == null) return false; if (other.getContainerPath() != null && other.getContainerPath().equals(this.getContainerPath()) == false) return false; if (other.getPermissions() == null ^ this.getPermissions() == null) return false; if (other.getPermissions() != null && other.getPermissions().equals(this.getPermissions()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getHostPath() == null) ? 0 : getHostPath().hashCode()); hashCode = prime * hashCode + ((getContainerPath() == null) ? 0 : getContainerPath().hashCode()); hashCode = prime * hashCode + ((getPermissions() == null) ? 0 : getPermissions().hashCode()); return hashCode; } @Override public Device clone() { try { return (Device) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.ecs.model.transform.DeviceMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2018-2021 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.snomed.datastore.index.taxonomy; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import java.util.Set; import com.b2international.collections.longs.LongKeyMap; import com.b2international.collections.longs.LongList; import com.b2international.collections.longs.LongSet; import com.b2international.snowowl.snomed.datastore.ConcreteDomainFragment; import com.b2international.snowowl.snomed.datastore.StatementFragment; /** * Represents a snapshot of the ontology for reasoner input and normal form generation. * * @since 7.0 */ public final class ReasonerTaxonomy implements IReasonerTaxonomy { private final InternalIdMap conceptMap; private final LongKeyMap<String> fullySpecifiedNames; private final InternalIdEdges statedAncestors; private final InternalIdEdges statedDescendants; private final InternalSctIdSet definingConcepts; private final InternalSctIdSet exhaustiveConcepts; private final InternalIdMultimap<StatementFragment> statedRelationships; private final InternalIdMultimap<StatementFragment> axiomNonIsARelationships; private final InternalIdMultimap<StatementFragment> existingInferredRelationships; private final InternalIdMultimap<StatementFragment> additionalGroupedRelationships; private final InternalIdMultimap<String> axioms; private final LongSet neverGroupedTypeIds; private final Set<PropertyChain> propertyChains; private final InternalIdMultimap<ConcreteDomainFragment> statedConcreteDomainMembers; private final InternalIdMultimap<ConcreteDomainFragment> inferredConcreteDomainMembers; private final InternalIdMultimap<ConcreteDomainFragment> additionalGroupedConcreteDomainMembers; private final InternalIdEdges inferredAncestors; private final InternalSctIdSet unsatisfiableConcepts; private final InternalSctIdMultimap equivalentConcepts; private final LongList iterationOrder; /*package*/ ReasonerTaxonomy( final InternalIdMap conceptMap, final LongKeyMap<String> fullySpecifiedNames, final InternalIdEdges statedAncestors, final InternalIdEdges statedDescendants, final InternalSctIdSet definingConcepts, final InternalSctIdSet exhaustiveConcepts, final InternalIdMultimap<StatementFragment> statedRelationships, final InternalIdMultimap<StatementFragment> axiomNonIsARelationships, final InternalIdMultimap<StatementFragment> existingInferredRelationships, final InternalIdMultimap<StatementFragment> additionalGroupedRelationships, final InternalIdMultimap<String> axioms, final LongSet neverGroupedTypeIds, final Set<PropertyChain> propertyChains, final InternalIdMultimap<ConcreteDomainFragment> statedConcreteDomainMembers, final InternalIdMultimap<ConcreteDomainFragment> inferredConcreteDomainMembers, final InternalIdMultimap<ConcreteDomainFragment> additionalGroupedConcreteDomainMembers, final InternalIdEdges inferredAncestors, final InternalSctIdSet unsatisfiableConcepts, final InternalSctIdMultimap equivalentConcepts, final LongList iterationOrder) { this.conceptMap = conceptMap; this.fullySpecifiedNames = fullySpecifiedNames; this.statedAncestors = statedAncestors; this.statedDescendants = statedDescendants; this.definingConcepts = definingConcepts; this.exhaustiveConcepts = exhaustiveConcepts; this.statedRelationships = statedRelationships; this.axiomNonIsARelationships = axiomNonIsARelationships; this.existingInferredRelationships = existingInferredRelationships; this.additionalGroupedRelationships = additionalGroupedRelationships; this.axioms = axioms; this.neverGroupedTypeIds = neverGroupedTypeIds; this.propertyChains = propertyChains; this.statedConcreteDomainMembers = statedConcreteDomainMembers; this.inferredConcreteDomainMembers = inferredConcreteDomainMembers; this.additionalGroupedConcreteDomainMembers = additionalGroupedConcreteDomainMembers; this.inferredAncestors = inferredAncestors; this.unsatisfiableConcepts = unsatisfiableConcepts; this.equivalentConcepts = equivalentConcepts; this.iterationOrder = iterationOrder; } public InternalIdMap getConceptMap() { return conceptMap; } public LongKeyMap<String> getFullySpecifiedNames() { return fullySpecifiedNames; } public InternalIdEdges getStatedAncestors() { return statedAncestors; } public InternalIdEdges getStatedDescendants() { return statedDescendants; } public InternalIdEdges getInferredAncestors() { return checkNotNull(inferredAncestors, "Inferred ancestors are unset on this taxonomy."); } @Override public InternalSctIdSet getUnsatisfiableConcepts() { return checkNotNull(unsatisfiableConcepts, "Unsatisfiable concept IDs are unset on this taxonomy."); } @Override public InternalSctIdMultimap getEquivalentConcepts() { return checkNotNull(equivalentConcepts, "Inferred equivalences are unset on this taxonomy."); } public InternalSctIdSet getDefiningConcepts() { return definingConcepts; } public InternalSctIdSet getExhaustiveConcepts() { return exhaustiveConcepts; } public InternalIdMultimap<StatementFragment> getStatedRelationships() { return statedRelationships; } public InternalIdMultimap<StatementFragment> getAxiomNonIsARelationships() { return axiomNonIsARelationships; } public InternalIdMultimap<StatementFragment> getExistingInferredRelationships() { return existingInferredRelationships; } public InternalIdMultimap<StatementFragment> getAdditionalGroupedRelationships() { return additionalGroupedRelationships; } public InternalIdMultimap<String> getAxioms() { return axioms; } public LongSet getNeverGroupedTypeIds() { return neverGroupedTypeIds; } public Set<PropertyChain> getPropertyChains() { return propertyChains; } public InternalIdMultimap<ConcreteDomainFragment> getStatedConcreteDomainMembers() { return statedConcreteDomainMembers; } public InternalIdMultimap<ConcreteDomainFragment> getInferredConcreteDomainMembers() { return inferredConcreteDomainMembers; } public InternalIdMultimap<ConcreteDomainFragment> getAdditionalGroupedConcreteDomainMembers() { return additionalGroupedConcreteDomainMembers; } public LongList getIterationOrder() { return iterationOrder; } public ReasonerTaxonomy withInferences(final InternalIdEdges newInferredAncestors, final InternalSctIdSet newUnsatisfiableConcepts, final InternalSctIdMultimap newEquivalentConcepts, final LongList iterationOrder) { checkNotNull(newInferredAncestors, "Inferred ancestors may not be null."); checkNotNull(newUnsatisfiableConcepts, "Inferred unsatisfiable concepts may not be null."); checkNotNull(newEquivalentConcepts, "Inferred equivalent concept sets may not be null."); checkNotNull(iterationOrder, "Inferred concept iteration order may not be null."); checkState(this.inferredAncestors == null, "Inferred ancestors are already present in this taxonomy."); checkState(this.unsatisfiableConcepts == null, "Inferred unsatisfiable concepts are already present in this taxonomy."); checkState(this.equivalentConcepts == null, "Inferred equivalent concept sets are already present in this taxonomy."); checkState(this.iterationOrder == null, "Inferred concept iteration order is already set in this taxonomy."); return new ReasonerTaxonomy(conceptMap, fullySpecifiedNames, statedAncestors, statedDescendants, definingConcepts, exhaustiveConcepts, statedRelationships, axiomNonIsARelationships, existingInferredRelationships, additionalGroupedRelationships, axioms, neverGroupedTypeIds, propertyChains, statedConcreteDomainMembers, inferredConcreteDomainMembers, additionalGroupedConcreteDomainMembers, newInferredAncestors, newUnsatisfiableConcepts, newEquivalentConcepts, iterationOrder); } }
package net.jxta.impl.cm; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.net.URI; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Queue; import net.jxta.id.IDFactory; import net.jxta.impl.cm.Srdi.Entry; import net.jxta.impl.util.FakeSystemClock; import net.jxta.impl.util.JavaSystemClock; import net.jxta.impl.util.TimeUtils; import net.jxta.impl.util.threads.TaskManager; import net.jxta.peer.PeerID; import net.jxta.peergroup.PeerGroup; import net.jxta.peergroup.PeerGroupID; import net.jxta.test.util.JUnitRuleMockery; import org.jmock.Expectations; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public abstract class AbstractSrdiIndexBackendTest { public static final PeerID PEER_ID = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E504720503250335D5E0326CF3E4271A498E9D5CB98C7C703")); public static final PeerID PEER_ID_2 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E50472050325033212AC0685A254A879825EC23B36214EE03")); public static final PeerID PEER_ID_3 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E5047205032503364652E32BCBC4C8596D3CFE9613AE68903")); public static final PeerID PEER_ID_4 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E504720503250337B1043A089C6481B85A9CE0D4586662A03")); public static final PeerID PEER_ID_5 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E50472050325033624C1724F1CF4038BACF9C81719672A003")); public static final PeerID PEER_ID_6 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E5047205032503383D5217E1EBD4A97AA38C5DC3A32130903")); public static final PeerID PEER_ID_7 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E504720503250337B083C30A3F74643884195C51FD4894E03")); public static final PeerID PEER_ID_8 = PeerID.create(URI.create("urn:jxta:uuid-59616261646162614E504720503250332B22AC234DCD40A3902FB7073613E9E403")); public static final PeerGroupID GROUP_ID_1 = PeerGroupID.create(URI.create("urn:jxta:uuid-7B96885D59E6498CB1E4C380479967CE02")); public static final PeerGroupID GROUP_ID_2 = PeerGroupID.create(URI.create("urn:jxta:uuid-631A2779A3E548748586A011B837A38302")); private static final int NO_THRESHOLD = Integer.MAX_VALUE; public abstract String getBackendClassname(); private String oldBackendValue; protected Srdi srdiIndex; protected FakeSystemClock clock; protected EntryComparator comparator; protected PeerGroup group1; protected PeerGroup group2; protected Srdi srdiIndexForGroup2; private Srdi alternativeIndexForGroup1; protected TaskManager taskManager; @Rule public JUnitRuleMockery mockContext = new JUnitRuleMockery(); @Rule public TemporaryFolder testFileStore = new TemporaryFolder(); @Before public void setUp() throws Exception { taskManager = new TaskManager(); oldBackendValue = System.getProperty(Srdi.SRDI_INDEX_BACKEND_SYSPROP); System.setProperty(Srdi.SRDI_INDEX_BACKEND_SYSPROP, getBackendClassname()); group1 = mockContext.mock(PeerGroup.class, "group1"); group2 = mockContext.mock(PeerGroup.class, "group2"); mockContext.checking(new Expectations() {{ ignoring(group1).getTaskManager(); will(returnValue(taskManager)); ignoring(group2).getTaskManager(); will(returnValue(taskManager)); }}); mockContext.checking(createExpectationsForConstruction_withPeerGroup_IndexName(group1, GROUP_ID_1, "group1")); mockContext.checking(createExpectationsForConstruction_withPeerGroup_IndexName(group2, GROUP_ID_2, "group2")); srdiIndex = new Srdi(createBackend(group1, "testIndex"), Srdi.NO_AUTO_GC, taskManager.getScheduledExecutorService()); srdiIndexForGroup2 = new Srdi(createBackend(group2, "testIndex"), Srdi.NO_AUTO_GC, taskManager.getScheduledExecutorService()); alternativeIndexForGroup1 = new Srdi(createBackend(group1, "testIndex2"), Srdi.NO_AUTO_GC, taskManager.getScheduledExecutorService()); clock = new FakeSystemClock(); comparator = new EntryComparator(); TimeUtils.setClock(clock); } protected abstract SrdiAPI createBackend(PeerGroup group, String indexName) throws Exception; @After public void tearDown() throws Exception { srdiIndex.stop(); TimeUtils.resetClock(); taskManager.shutdown(); if(oldBackendValue == null) { System.clearProperty(Srdi.SRDI_INDEX_BACKEND_SYSPROP); } else { System.setProperty(Srdi.SRDI_INDEX_BACKEND_SYSPROP, oldBackendValue); } } @Test public void testAdd() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 10000L); List<Entry> record = srdiIndex.getRecord("a", "b", "c"); assertNotNull(record); assertEquals(1, record.size()); assertEquals(10000L, record.get(0).expiration); assertEquals(PEER_ID, record.get(0).peerid); } @Test public void testAdd_twiceShouldUpdateExpiry() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 10000L); srdiIndex.add("a", "b", "c", PEER_ID, 15000L); List<Entry> results = srdiIndex.getRecord("a", "b", "c"); assertNotNull(results); assertEquals(1, results.size()); assertEquals(15000L, results.get(0).expiration); assertEquals(PEER_ID, results.get(0).peerid); } @Test public void testAdd_calculatesAbsoluteExpiry() throws Exception { clock.currentTime = 30000L; srdiIndex.add("a", "b", "c", PEER_ID, 5000L); srdiIndex.add("a", "b", "c", PEER_ID_2, 6000L); List<Entry> results = srdiIndex.getRecord("a", "b", "c"); assertNotNull(results); assertEquals(2, results.size()); assertContains(results, comparator, new Entry(PEER_ID, 35000L), new Entry(PEER_ID_2, 36000L)); } @Test public void testRemove() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.add("a", "c", "d", PEER_ID, 1000L); // this record should not be removed, uses a different peer id srdiIndex.add("a", "c", "d", PEER_ID_2, 1000L); srdiIndex.remove(PEER_ID); // remove does not necessarily take effect until the next index GC srdiIndex.garbageCollect(); assertEquals(0, srdiIndex.getRecord("a", "b", "c").size()); List<Entry> results = srdiIndex.getRecord("a", "c", "d"); assertEquals(1, results.size()); assertContains(results, comparator, new Entry(PEER_ID_2, 1000L)); } @Test public void testBulkAddAndRemove() throws Exception { Queue<PeerID> peers = new LinkedList<PeerID>(); for(int i=0; i < 100; i++) { PeerID peer = IDFactory.newPeerID(PeerGroupID.defaultNetPeerGroupID); peers.add(peer); srdiIndex.add("a", "b", "c", peer, 1000L); } while(peers.size() > 0) { assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), peers.toArray(new PeerID[0])); srdiIndex.remove(peers.remove()); } } @Test public void testGetRecord_forMultipleMatches() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.add("a", "b", "c", PEER_ID_2, 2000L); srdiIndex.add("a", "b", "c", PEER_ID_3, 3000L); // these entries should not be returned srdiIndex.add("a", "b", "d", PEER_ID_4, 4000L); // wrong value srdiIndex.add("b", "b", "c", PEER_ID_5, 1000L); // wrong primary key srdiIndex.add("a", "c", "c", PEER_ID_6, 1000L); // wrong attribute List<Entry> results = srdiIndex.getRecord("a", "b", "c"); assertNotNull(results); assertEquals(3, results.size()); assertContains(results, comparator, new Entry(PEER_ID, 1000L), new Entry(PEER_ID_2, 2000L), new Entry(PEER_ID_3, 3000L)); } @Test public void testQuery_exactMatch() throws Exception { srdiIndex.add("a", "b", "test", PEER_ID, 1000L); srdiIndex.add("a", "b", "test", PEER_ID_2, 1000L); // these entries should not be returned srdiIndex.add("a", "b", "testing", PEER_ID_3, 1000L); // is not exactly "test" srdiIndex.add("a", "b", "tEsT", PEER_ID_4, 1000L); // wrong case srdiIndex.add("a", "c", "test", PEER_ID_5, 1000L); // wrong attribute srdiIndex.add("b", "b", "test", PEER_ID_6, 1000L); // wrong primary key List<PeerID> matches = srdiIndex.query("a", "b", "test", NO_THRESHOLD); assertNotNull(matches); assertEquals(2, matches.size()); assertContains(matches, PEER_ID, PEER_ID_2); } @Test public void testQuery_startsWith() throws Exception { srdiIndex.add("a", "b", "test", PEER_ID, 1000L); srdiIndex.add("a", "b", "testing", PEER_ID_2, 1000L); // these entries should not be returned srdiIndex.add("a", "b", "alsotesting", PEER_ID_3, 1000L); // does not start with "test" srdiIndex.add("a", "c", "test123", PEER_ID_4, 1000L); // wrong attribute srdiIndex.add("a", "b", "tEst", PEER_ID_5, 1000L); // wrong case srdiIndex.add("b", "b", "testing", PEER_ID_6, 1000L); // wrong primary key List<PeerID> results = srdiIndex.query("a", "b", "test*", NO_THRESHOLD); assertEquals(2, results.size()); assertContains(results, PEER_ID, PEER_ID_2); } @Test public void testQuery_endsWith() throws Exception { srdiIndex.add("a", "b", "alpha", PEER_ID, 1000L); srdiIndex.add("a", "b", "delta", PEER_ID_2, 1000L); srdiIndex.add("a", "b", "a", PEER_ID_3, 1000L); // these entries should not be returned srdiIndex.add("a", "b", "charlie", PEER_ID_4, 1000L); // does not end in "a" srdiIndex.add("a", "c", "alpha", PEER_ID_5, 1000L); // wrong attribute srdiIndex.add("a", "b", "alphA", PEER_ID_6, 1000L); // wrong case srdiIndex.add("b", "b", "alpha", PEER_ID_7, 1000L); // wrong primary key List<PeerID> results = srdiIndex.query("a", "b", "*a", NO_THRESHOLD); assertEquals(3, results.size()); assertContains(results, PEER_ID, PEER_ID_2, PEER_ID_3); } @Test public void testQuery_contains() throws Exception { srdiIndex.add("a", "b", "elf", PEER_ID, 1000L); srdiIndex.add("a", "b", "golfer", PEER_ID_2, 1000L); srdiIndex.add("a", "b", "lfx", PEER_ID_3, 1000L); srdiIndex.add("a", "b", "lf", PEER_ID_4, 1000L); // these entries should not be returned srdiIndex.add("a", "b", "planet", PEER_ID_5, 1000L); // does not contain "lf" srdiIndex.add("a", "c", "selfish", PEER_ID_6, 1000L); // wrong attribute srdiIndex.add("a", "b", "lFoo", PEER_ID_7, 1000L); // wrong case srdiIndex.add("b", "b", "golfer", PEER_ID_8, 1000L); // wrong primary key List<PeerID> results = srdiIndex.query("a", "b", "*lf*", NO_THRESHOLD); assertEquals(4, results.size()); assertContains(results, PEER_ID, PEER_ID_2, PEER_ID_3, PEER_ID_4); } @Test public void testQuery_withThreshold() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.add("a", "b", "c", PEER_ID_2, 1000L); srdiIndex.add("a", "b", "c", PEER_ID_3, 1000L); srdiIndex.add("a", "b", "c", PEER_ID_4, 1000L); List<PeerID> results = srdiIndex.query("a", "b", "c", 2); assertEquals(2, results.size()); assertTrue(containsXOf(results, 2, PEER_ID, PEER_ID_2, PEER_ID_3, PEER_ID_4)); } @Test public void testQuery_withLargeResultSet() throws Exception { PeerID[] ids = new PeerID[500]; PeerID[] nonExpired = new PeerID[250]; for(int i=0; i < 250; i++) { ids[i] = IDFactory.newPeerID(PeerGroupID.defaultNetPeerGroupID); srdiIndex.add("a", "b", "c", ids[i], 1000L * i); } for(int i=0; i < 250; i++) { ids[i+250] = IDFactory.newPeerID(PeerGroupID.defaultNetPeerGroupID); nonExpired[i] = ids[i+250]; srdiIndex.add("a", "b", "c", ids[i+250], 1000L * (i+250)); } assertEquals(500, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); assertTrue(containsXOf(srdiIndex.query("a", "b", "c", 100), 100, ids)); clock.currentTime = 250000; assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), nonExpired); } @Test public void testQuery_primaryKeyOnly() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.add("a", "c", "c", PEER_ID, 1000L); srdiIndex.add("a", "e", "d", PEER_ID_2, 1000L); srdiIndex.add("b", "x", "y", PEER_ID, 1000L); List<PeerID> results = srdiIndex.query("a", null, null, NO_THRESHOLD); assertEquals(2, results.size()); assertContains(results, PEER_ID, PEER_ID_2); } @Test public void testGarbageCollect() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.add("a", "b", "c", PEER_ID_2, 2000L); srdiIndex.add("a", "b", "c", PEER_ID_3, 3000L); srdiIndex.add("a", "b", "c", PEER_ID_4, 4000L); assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), PEER_ID, PEER_ID_2, PEER_ID_3, PEER_ID_4); clock.currentTime = 1500L; srdiIndex.garbageCollect(); assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), PEER_ID_2, PEER_ID_3, PEER_ID_4); clock.currentTime = 2500L; srdiIndex.garbageCollect(); assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), PEER_ID_3, PEER_ID_4); clock.currentTime = 3500L; srdiIndex.garbageCollect(); assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), PEER_ID_4); clock.currentTime = 4500L; srdiIndex.garbageCollect(); assertEquals(0, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); } @Test public void testGarbageCollect_automatic() throws Exception { TimeUtils.setClock(new JavaSystemClock()); Srdi srdiIndexWithAutoGC = new Srdi(createBackend(group1, "gcIndex"), 500L, taskManager.getScheduledExecutorService()); srdiIndexWithAutoGC.add("a", "b", "c", PEER_ID, 500L); assertEquals(1, srdiIndexWithAutoGC.query("a", "b", "c", NO_THRESHOLD).size()); Thread.sleep(1000L); assertEquals(0, srdiIndexWithAutoGC.query("a", "b", "c", NO_THRESHOLD).size()); } @Test public void testClear() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), PEER_ID); srdiIndex.clear(); assertEquals(0, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); srdiIndex.add("a", "b", "c", PEER_ID_2, 1000L); assertEquals(1, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); assertEquals(PEER_ID_2, srdiIndex.query("a", "b", "c", NO_THRESHOLD).get(0)); } @Test public void testDataSurvivesRestart() throws Exception { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.stop(); Srdi restarted = new Srdi(createBackend(group1, "testIndex"), Srdi.NO_AUTO_GC, taskManager.getScheduledExecutorService()); assertEquals(1, restarted.query("a", "b", "c", NO_THRESHOLD).size()); assertEquals(PEER_ID, restarted.query("a", "b", "c", NO_THRESHOLD).get(0)); } @Test public void testClearViaStatic() throws Exception { Srdi index = srdiIndex; index.add("a", "b", "c", PEER_ID, 1000L); index.stop(); Srdi index2 = alternativeIndexForGroup1; index2.add("a", "b", "c", PEER_ID_2, 1000L); index2.stop(); Srdi.clearSrdi(group1); Srdi restarted = new Srdi(group1, "testIndex"); assertEquals(0, restarted.query("a", "b", "c", NO_THRESHOLD).size()); restarted.stop(); Srdi restarted2 = new Srdi(group1, "testIndex2"); assertEquals(0, restarted2.query("a", "b", "c", NO_THRESHOLD).size()); restarted.stop(); } @Test public void testClearViaStatic_groupsWithSameStoreAreIsolated() { srdiIndex.add("a", "b", "c", PEER_ID, 1000L); srdiIndexForGroup2.add("a", "b", "c", PEER_ID, 1000L); srdiIndex.stop(); srdiIndexForGroup2.stop(); Srdi.clearSrdi(group1); Srdi group1IndexRestarted = new Srdi(group1, "testIndex"); Srdi group2IndexRestarted = new Srdi(group2, "testIndex"); assertTrue(group1IndexRestarted.query("a", "b", "c", NO_THRESHOLD).isEmpty()); assertContains(group2IndexRestarted.query("a", "b", "c", NO_THRESHOLD), PEER_ID); } @Test public void testAdd_GroupIsolation_withinSameStore() { checkAddIsolation(srdiIndex, srdiIndexForGroup2); } protected void checkAddIsolation(Srdi a, Srdi b) { // sanity check: there should be no results on a newly created index assertEquals(0, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); assertEquals(0, srdiIndexForGroup2.query("a", "b", "c", NO_THRESHOLD).size()); srdiIndex.add("a", "b", "c", PEER_ID, 1000L); // we should not see the result in index2 assertEquals(1, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); assertTrue(srdiIndexForGroup2.query("a", "b", "c", NO_THRESHOLD).isEmpty()); srdiIndexForGroup2.add("a", "b", "c", PEER_ID_2, 1000L); // each index should have a different peer id assertEquals(1, srdiIndex.query("a", "b", "c", NO_THRESHOLD).size()); assertContains(srdiIndex.query("a", "b", "c", NO_THRESHOLD), PEER_ID); assertEquals(1, srdiIndexForGroup2.query("a", "b", "c", NO_THRESHOLD).size()); assertContains(srdiIndexForGroup2.query("a", "b", "c", NO_THRESHOLD), PEER_ID_2); } @Test public void testClear_GroupIsolation_withinSameStore() { checkClearIsolation(srdiIndex, srdiIndexForGroup2); } protected void checkClearIsolation(Srdi a, Srdi b) { a.add("a", "b", "c", PEER_ID, 1000L); b.add("a", "b", "c", PEER_ID, 1000L); a.clear(); // clear should only have affected the first index assertTrue(a.query("a", "b", "c", NO_THRESHOLD).isEmpty()); assertContains(b.query("a", "b", "c", NO_THRESHOLD), PEER_ID); } @Test public void testRemove_GroupIsolation_withinSameStore() { checkRemoveIsolation(srdiIndex, srdiIndexForGroup2); } protected void checkRemoveIsolation(Srdi a, Srdi b) { a.add("a", "b", "c", PEER_ID, 1000L); b.add("a", "b", "c", PEER_ID, 1000L); a.remove(PEER_ID); assertTrue(a.query("a", "b", "c", NO_THRESHOLD).isEmpty()); assertContains(b.query("a", "b", "c", NO_THRESHOLD), PEER_ID); } @Test public void testAdd_IndexIsolation_withinSameGroup() { checkAddIsolation(srdiIndex, alternativeIndexForGroup1); } @Test public void testClear_IndexIsolation_withinSameGroup() { checkClearIsolation(srdiIndex, alternativeIndexForGroup1); } @Test public void testRemove_IndexIsolation_withinSameGroup() { checkRemoveIsolation(srdiIndex, srdiIndexForGroup2); } @Test public void testConstruction_withGroup_IndexName() { System.setProperty(Srdi.SRDI_INDEX_BACKEND_SYSPROP, getBackendClassname()); final PeerGroup group = mockContext.mock(PeerGroup.class); mockContext.checking(new Expectations() {{ ignoring(group).getTaskManager(); will(returnValue(taskManager)); }}); mockContext.checking(createExpectationsForConstruction_withPeerGroup_IndexName(group, GROUP_ID_1, "testGroup")); Srdi srdiIndex = new Srdi(group, "testIndex"); assertEquals(getBackendClassname(), srdiIndex.getBackendClassName()); srdiIndex.stop(); } protected abstract Expectations createExpectationsForConstruction_withPeerGroup_IndexName(final PeerGroup mockGroup, final PeerGroupID groupId, String groupName); protected <T> void assertContains(List<T> entries, T... expected) { assertContains(entries, null, expected); } protected <T> void assertContains(List<T> entries, Comparator<T> comparator, T... expected) { HashMap<T, T> toFind = new HashMap<T, T>(); for(T e : expected) { toFind.put(e, e); } for(T e : entries) { if(toFind.containsKey(e)) { T match = toFind.get(e); assertEquals(match, e); if(comparator != null) { assertEquals(0, comparator.compare(match, e)); } toFind.remove(e); } } assertTrue("Expected entries not found: " + toFind.keySet(), toFind.isEmpty()); } private class EntryComparator implements Comparator<Entry> { public int compare(Entry a, Entry b) { if(a.peerid.equals(b.peerid) && a.expiration == b.expiration) { return 0; } else if(a.expiration < b.expiration) { return -1; } return 1; } } protected <T> boolean containsXOf(Collection<T> set, int numExpected, T... expectedSet) { int numMatches = 0; for (T expected : expectedSet) { if(set.contains(expected)) { numMatches++; } } return numMatches == numExpected; } }
/* * Copyright 2016 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.ijs; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static java.util.Comparator.comparing; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.AbstractCompiler; import com.google.javascript.jscomp.CompilerPass; import com.google.javascript.jscomp.DiagnosticType; import com.google.javascript.jscomp.JSError; import com.google.javascript.jscomp.NodeTraversal; import com.google.javascript.jscomp.NodeUtil; import com.google.javascript.jscomp.Scope; import com.google.javascript.jscomp.Var; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.jstype.JSType.Nullability; import java.util.Comparator; import java.util.List; import javax.annotation.Nullable; /** * The goal of this pass is to shrink the AST, preserving only typing, not behavior. * * <p>To do this, it does things like removing function/method bodies, rvalues that are not needed, * expressions that are not declarations, etc. * * <p>This is conceptually similar to the ijar tool[1] that bazel uses to shrink jars into minimal * versions that can be used equivalently for compilation of downstream dependencies. * * <p>[1] https://github.com/bazelbuild/bazel/blob/master/third_party/ijar/README.txt */ public class ConvertToTypedInterface implements CompilerPass { static final DiagnosticType CONSTANT_WITH_SUGGESTED_TYPE = DiagnosticType.warning( "JSC_CONSTANT_WITH_SUGGESTED_TYPE", "Constants in top-level should have types explicitly specified.\n" + "You may want specify this type as:\t@const '{'{0}'}'"); static final DiagnosticType CONSTANT_WITHOUT_EXPLICIT_TYPE = DiagnosticType.warning( "JSC_CONSTANT_WITHOUT_EXPLICIT_TYPE", "Constants in top-level should have types explicitly specified."); static final DiagnosticType GOOG_SCOPE_HIDDEN_TYPE = DiagnosticType.warning( "JSC_GOOG_SCOPE_HIDDEN_TYPE", "Please do not use goog.scope to hide declarations.\n" + "It is preferable to either create an @private namespaced declaration, or migrate " + "to goog.module."); private static final ImmutableSet<String> CALLS_TO_PRESERVE = ImmutableSet.of( "Polymer", "goog.addSingletonGetter", "goog.define", "goog.forwardDeclare", "goog.module", "goog.module.declareLegacyNamespace", "goog.declareModuleId", "goog.provide", "goog.require", "goog.requireType"); private final AbstractCompiler compiler; public ConvertToTypedInterface(AbstractCompiler compiler) { this.compiler = compiler; } private static void maybeReport( AbstractCompiler compiler, Node node, DiagnosticType diagnostic, String... fillers) { String sourceName = NodeUtil.getSourceName(node); if (sourceName.endsWith("_test.js") || sourceName.endsWith("_test.closure.js")) { // Allow _test.js files and their tsickle generated // equivalents to avoid emitting errors at .i.js generation time. // We expect these files to not be consumed by any other downstream libraries. return; } compiler.report(JSError.make(node, diagnostic, fillers)); } private static void maybeWarnForConstWithoutExplicitType( AbstractCompiler compiler, PotentialDeclaration decl) { if (decl.isConstToBeInferred() && !decl.getLhs().isFromExterns() && !JsdocUtil.isPrivate(decl.getJsDoc())) { Node nameNode = decl.getLhs(); if (nameNode.getJSType() == null) { maybeReport(compiler, nameNode, CONSTANT_WITHOUT_EXPLICIT_TYPE); } else { maybeReport( compiler, nameNode, CONSTANT_WITH_SUGGESTED_TYPE, nameNode.getJSType().toAnnotationString(Nullability.EXPLICIT)); } } } @Override public void process(Node externs, Node root) { for (Node script = root.getFirstChild(); script != null; script = script.getNext()) { processFile(script); } } private void processFile(Node scriptNode) { checkArgument(scriptNode.isScript()); String sourceFileName = scriptNode.getSourceFileName(); if (AbstractCompiler.isFillFileName(sourceFileName)) { scriptNode.detach(); return; } FileInfo currentFile = new FileInfo(); NodeTraversal.traverse(compiler, scriptNode, new RemoveNonDeclarations()); NodeTraversal.traverse(compiler, scriptNode, new PropagateConstJsdoc(currentFile)); new SimplifyDeclarations(compiler, currentFile).simplifyAll(); } @Nullable private static Var findNameDeclaration(Scope scope, Node rhs) { if (!rhs.isName()) { return null; } return scope.getVar(rhs.getString()); } private static class RemoveNonDeclarations implements NodeTraversal.Callback { @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { switch (n.getToken()) { case FUNCTION: if (!ClassUtil.isConstructor(n) || !ClassUtil.hasNamedClass(n)) { Node body = n.getLastChild(); if (!body.isBlock() || body.hasChildren()) { t.reportCodeChange(body); body.replaceWith(IR.block().srcref(body)); NodeUtil.markFunctionsDeleted(body, t.getCompiler()); } } return true; case EXPR_RESULT: Node expr = n.getFirstChild(); switch (expr.getToken()) { case CALL: Node callee = expr.getFirstChild(); checkState(!callee.matchesQualifiedName("goog.scope")); if (CALLS_TO_PRESERVE.contains(callee.getQualifiedName())) { return true; } NodeUtil.deleteNode(n, t.getCompiler()); return false; case ASSIGN: Node lhs = expr.getFirstChild(); if (!lhs.isQualifiedName() || (lhs.isName() && !t.inGlobalScope() && !t.inModuleScope()) || (!ClassUtil.isThisProp(lhs) && !t.inGlobalHoistScope() && !t.inModuleHoistScope())) { NodeUtil.deleteNode(n, t.getCompiler()); return false; } return true; case GETPROP: if (!expr.isQualifiedName() || expr.getJSDocInfo() == null) { NodeUtil.deleteNode(n, t.getCompiler()); return false; } return true; default: NodeUtil.deleteNode(n, t.getCompiler()); return false; } case COMPUTED_PROP: NodeUtil.deleteNode(n, t.getCompiler()); return false; case THROW: case RETURN: case BREAK: case CONTINUE: case DEBUGGER: case EMPTY: if (NodeUtil.isStatementParent(parent)) { NodeUtil.deleteNode(n, t.getCompiler()); } return false; case LABEL: case IF: case SWITCH: case CASE: case WHILE: // First child can't have declaration. Statement itself will be removed post-order. NodeUtil.deleteNode(n.getFirstChild(), t.getCompiler()); return true; case TRY: case DO: // Second child can't have declarations. Statement itself will be removed post-order. NodeUtil.deleteNode(n.getSecondChild(), t.getCompiler()); return true; case FOR: NodeUtil.deleteNode(n.getSecondChild(), t.getCompiler()); // fall-through case FOR_OF: case FOR_AWAIT_OF: case FOR_IN: NodeUtil.deleteNode(n.getSecondChild(), t.getCompiler()); Node initializer = n.removeFirstChild(); if (initializer.isVar()) { n.getLastChild().addChildToFront(initializer); } return true; case CONST: case LET: if (!t.inGlobalScope() && !t.inModuleScope()) { NodeUtil.removeChild(parent, n); t.reportCodeChange(parent); return false; } return true; case VAR: if (!t.inGlobalHoistScope() && !t.inModuleHoistScope()) { NodeUtil.removeChild(parent, n); t.reportCodeChange(parent); return false; } return true; case MODULE_BODY: case CLASS: case DEFAULT_CASE: case BLOCK: case EXPORT: case IMPORT: return true; default: checkState(!NodeUtil.isStatement(n), n.getToken()); return true; } } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getToken()) { case TRY: case LABEL: case DEFAULT_CASE: case CASE: case DO: case WHILE: case FOR: case FOR_IN: case FOR_OF: case FOR_AWAIT_OF: case IF: case SWITCH: if (n.hasParent()) { Node children = n.removeChildren(); parent.addChildrenAfter(children, n); NodeUtil.removeChild(parent, n); t.reportCodeChange(); } break; case VAR: case LET: case CONST: splitNameDeclarationsAndRemoveDestructuring(n, t); break; case BLOCK: if (!parent.isFunction()) { parent.addChildrenAfter(n.removeChildren(), n); n.detach(); t.reportCodeChange(parent); } break; default: break; } } /** * Does three simplifications to const/let/var nodes. * * <ul> * <li>Splits them so that each declaration is a separate statement. * <li>Removes non-import and non-alias destructuring statements, which we assume are not type * declarations. * <li>Moves inline JSDoc annotations onto the declaration nodes. * </ul> */ static void splitNameDeclarationsAndRemoveDestructuring(Node n, NodeTraversal t) { checkArgument(NodeUtil.isNameDeclaration(n)); JSDocInfo sharedJsdoc = n.getJSDocInfo(); boolean isExport = n.getParent().isExport(); Node statement = isExport ? n.getParent() : n; while (n.hasChildren()) { Node lhsToSplit = n.getLastChild(); if (lhsToSplit.isDestructuringLhs() && !PotentialDeclaration.isImportRhs(lhsToSplit.getLastChild()) && !PotentialDeclaration.isAliasDeclaration(lhsToSplit, lhsToSplit.getLastChild())) { // Remove destructuring statements, which we assume are not type declarations NodeUtil.markFunctionsDeleted(lhsToSplit, t.getCompiler()); NodeUtil.removeChild(n, lhsToSplit); t.reportCodeChange(); continue; } JSDocInfo nameJsdoc = lhsToSplit.getJSDocInfo(); lhsToSplit.setJSDocInfo(null); JSDocInfo mergedJsdoc = JsdocUtil.mergeJsdocs(sharedJsdoc, nameJsdoc); if (n.hasOneChild()) { n.setJSDocInfo(mergedJsdoc); return; } // A name declaration with more than one LHS is split into separate declarations. Node rhs = lhsToSplit.hasChildren() ? lhsToSplit.removeFirstChild() : null; Node newDeclaration = NodeUtil.newDeclaration(lhsToSplit.detach(), rhs, n.getToken()).srcref(n); newDeclaration.setJSDocInfo(mergedJsdoc); if (isExport) { newDeclaration = IR.export(newDeclaration).srcref(statement); } newDeclaration.insertAfter(statement); t.reportCodeChange(); } } } private static class PropagateConstJsdoc extends ProcessConstJsdocCallback { PropagateConstJsdoc(FileInfo currentFile) { super(currentFile); } @Override protected void processConstWithRhs(NodeTraversal t, Node nameNode) { checkArgument( nameNode.isQualifiedName() || nameNode.isStringKey() || nameNode.isDestructuringLhs(), nameNode); Node jsdocNode = NodeUtil.getBestJSDocInfoNode(nameNode); JSDocInfo originalJsdoc = jsdocNode.getJSDocInfo(); Node rhs = NodeUtil.getRValueOfLValue(nameNode); JSDocInfo newJsdoc = JsdocUtil.getJSDocForRhs(rhs, originalJsdoc); if (newJsdoc == null && ClassUtil.isThisProp(nameNode)) { Var decl = findNameDeclaration(t.getScope(), rhs); newJsdoc = JsdocUtil.getJSDocForName(decl, originalJsdoc); } if (newJsdoc != null) { jsdocNode.setJSDocInfo(newJsdoc); t.reportCodeChange(); } } } private static class SimplifyDeclarations { private final AbstractCompiler compiler; private final FileInfo currentFile; /** Levels of JSDoc, starting from those most likely to be on the canonical declaration. */ enum TypingLevel { TYPED_JSDOC_DECLARATION, UNTYPED_JSDOC_DECLARATION, NO_JSDOC, } static int countDots(String name) { int count = 0; for (int i = 0; i < name.length(); i++) { if (name.charAt(i) == '.') { count++; } } return count; } static final Comparator<String> SHORT_TO_LONG = comparing(SimplifyDeclarations::countDots); static final Comparator<PotentialDeclaration> DECLARATIONS_FIRST = comparing( decl -> { JSDocInfo jsdoc = decl.getJsDoc(); if (jsdoc == null) { return TypingLevel.NO_JSDOC; } if (jsdoc.getTypeNodes().isEmpty()) { return TypingLevel.UNTYPED_JSDOC_DECLARATION; } return TypingLevel.TYPED_JSDOC_DECLARATION; }); SimplifyDeclarations(AbstractCompiler compiler, FileInfo currentFile) { this.compiler = compiler; this.currentFile = currentFile; } private void removeDuplicateDeclarations() { for (String name : currentFile.getDeclarations().keySet()) { if (name.startsWith("this.")) { continue; } List<PotentialDeclaration> declList = currentFile.getDeclarations().get(name); declList.sort(DECLARATIONS_FIRST); while (declList.size() > 1) { // Don't remove the first declaration (at index 0) PotentialDeclaration decl = declList.remove(1); decl.remove(compiler); } } } void simplifyAll() { // Remove duplicate assignments to the same symbol removeDuplicateDeclarations(); // Simplify all names in the top-level scope. @SuppressWarnings("StreamToIterable") Iterable<String> seenNames = currentFile.getDeclarations().keySet().stream().sorted(SHORT_TO_LONG)::iterator; for (String name : seenNames) { for (PotentialDeclaration decl : currentFile.getDeclarations().get(name)) { processDeclaration(name, decl); } } } private void processDeclaration(String name, PotentialDeclaration decl) { if (shouldRemove(name, decl)) { decl.remove(compiler); return; } if (decl.getRhs() != null && decl.getRhs().isFunction()) { processFunction(decl.getRhs()); } else if (decl.getRhs() != null && isClass(decl.getRhs())) { processClass(decl.getRhs()); } setUndeclaredToUnusableType(decl); decl.simplify(compiler); } private void processClass(Node n) { checkArgument(isClass(n)); for (Node member = n.getLastChild().getFirstChild(); member != null; ) { Node next = member.getNext(); if (member.isEmpty()) { NodeUtil.deleteNode(member, compiler); } else { processFunction(member.getLastChild()); } member = next; } } private void processFunction(Node n) { checkArgument(n.isFunction()); processFunctionParameters(n.getSecondChild()); } private void processFunctionParameters(Node paramList) { checkArgument(paramList.isParamList()); for (Node arg = paramList.getFirstChild(); arg != null; arg = arg.getNext()) { if (arg.isDefaultValue()) { Node rhs = arg.getLastChild(); rhs.replaceWith(NodeUtil.newUndefinedNode(rhs)); compiler.reportChangeToEnclosingScope(arg); } } } private static boolean isClass(Node n) { return n.isClass() || NodeUtil.isCallTo(n, "goog.defineClass"); } private static String rootName(String qualifiedName) { int dotIndex = qualifiedName.indexOf('.'); if (dotIndex == -1) { return qualifiedName; } return qualifiedName.substring(0, dotIndex); } private boolean shouldRemove(String name, PotentialDeclaration decl) { if ("$jscomp".equals(rootName(name))) { if (decl.isDetached()) { return true; } // These are created by goog.scope processing, but clash with each other // and should not be depended on. if (decl.getRhs() != null && decl.getRhs().isClass() || decl.getJsDoc() != null && decl.getJsDoc().containsTypeDefinition()) { maybeReport(compiler, decl.getLhs(), GOOG_SCOPE_HIDDEN_TYPE); } return true; } // This looks like an update rather than a declaration in this file. return !name.startsWith("this.") && !decl.isDefiniteDeclaration() && !currentFile.isPrefixProvided(name) && !currentFile.isStrictPrefixDeclared(name); } private void setUndeclaredToUnusableType(PotentialDeclaration decl) { Node nameNode = decl.getLhs(); JSDocInfo jsdoc = decl.getJsDoc(); if (decl.shouldPreserve() || NodeUtil.isNamespaceDecl(nameNode) || (decl.getRhs() != null && NodeUtil.isCallTo(decl.getRhs(), "Symbol")) || (jsdoc != null && jsdoc.containsDeclaration() && !decl.isConstToBeInferred())) { return; } maybeWarnForConstWithoutExplicitType(compiler, decl); Node jsdocNode = NodeUtil.getBestJSDocInfoNode(nameNode); jsdocNode.setJSDocInfo(JsdocUtil.getUnusableTypeJSDoc(jsdoc)); } } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.views.toolbar; import android.content.Context; import android.graphics.drawable.Animatable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import com.facebook.drawee.backends.pipeline.Fresco; import com.facebook.drawee.controller.BaseControllerListener; import com.facebook.drawee.drawable.ScalingUtils; import com.facebook.drawee.generic.GenericDraweeHierarchy; import com.facebook.drawee.generic.GenericDraweeHierarchyBuilder; import com.facebook.drawee.interfaces.DraweeController; import com.facebook.drawee.view.DraweeHolder; import com.facebook.drawee.view.MultiDraweeHolder; import com.facebook.imagepipeline.image.ImageInfo; import com.facebook.imagepipeline.image.QualityInfo; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.uimanager.PixelUtil; import javax.annotation.Nullable; /** * Custom implementation of the {@link Toolbar} widget that adds support for remote images in logo * and navigationIcon using fresco. */ public class ReactToolbar extends Toolbar { private static final String PROP_ACTION_ICON = "icon"; private static final String PROP_ACTION_SHOW = "show"; private static final String PROP_ACTION_SHOW_WITH_TEXT = "showWithText"; private static final String PROP_ACTION_TITLE = "title"; private static final String PROP_ICON_URI = "uri"; private static final String PROP_ICON_WIDTH = "width"; private static final String PROP_ICON_HEIGHT = "height"; private final DraweeHolder mLogoHolder; private final DraweeHolder mNavIconHolder; private final DraweeHolder mOverflowIconHolder; private final MultiDraweeHolder<GenericDraweeHierarchy> mActionsHolder = new MultiDraweeHolder<>(); private IconControllerListener mLogoControllerListener; private IconControllerListener mNavIconControllerListener; private IconControllerListener mOverflowIconControllerListener; /** * Attaches specific icon width & height to a BaseControllerListener which will be used to * create the Drawable */ private abstract class IconControllerListener extends BaseControllerListener<ImageInfo> { private final DraweeHolder mHolder; private IconImageInfo mIconImageInfo; public IconControllerListener(DraweeHolder holder) { mHolder = holder; } public void setIconImageInfo(IconImageInfo iconImageInfo) { mIconImageInfo = iconImageInfo; } @Override public void onFinalImageSet(String id, @Nullable ImageInfo imageInfo, @Nullable Animatable animatable) { super.onFinalImageSet(id, imageInfo, animatable); final ImageInfo info = mIconImageInfo != null ? mIconImageInfo : imageInfo; setDrawable(new DrawableWithIntrinsicSize(mHolder.getTopLevelDrawable(), info)); } protected abstract void setDrawable(Drawable d); } private class ActionIconControllerListener extends IconControllerListener { private final MenuItem mItem; ActionIconControllerListener(MenuItem item, DraweeHolder holder) { super(holder); mItem = item; } @Override protected void setDrawable(Drawable d) { mItem.setIcon(d); ReactToolbar.this.requestLayout(); } } /** * Simple implementation of ImageInfo, only providing width & height */ private static class IconImageInfo implements ImageInfo { private int mWidth; private int mHeight; public IconImageInfo(int width, int height) { mWidth = width; mHeight = height; } @Override public int getWidth() { return mWidth; } @Override public int getHeight() { return mHeight; } @Override public QualityInfo getQualityInfo() { return null; } } public ReactToolbar(Context context) { super(context); mLogoHolder = DraweeHolder.create(createDraweeHierarchy(), context); mNavIconHolder = DraweeHolder.create(createDraweeHierarchy(), context); mOverflowIconHolder = DraweeHolder.create(createDraweeHierarchy(), context); mLogoControllerListener = new IconControllerListener(mLogoHolder) { @Override protected void setDrawable(Drawable d) { setLogo(d); } }; mNavIconControllerListener = new IconControllerListener(mNavIconHolder) { @Override protected void setDrawable(Drawable d) { setNavigationIcon(d); } }; mOverflowIconControllerListener = new IconControllerListener(mOverflowIconHolder) { @Override protected void setDrawable(Drawable d) { setOverflowIcon(d); } }; } private final Runnable mLayoutRunnable = new Runnable() { @Override public void run() { measure( MeasureSpec.makeMeasureSpec(getWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(getHeight(), MeasureSpec.EXACTLY)); layout(getLeft(), getTop(), getRight(), getBottom()); } }; @Override public void requestLayout() { super.requestLayout(); // The toolbar relies on a measure + layout pass happening after it calls requestLayout(). // Without this, certain calls (e.g. setLogo) only take effect after a second invalidation. post(mLayoutRunnable); } @Override public void onDetachedFromWindow() { super.onDetachedFromWindow(); detachDraweeHolders(); } @Override public void onStartTemporaryDetach() { super.onStartTemporaryDetach(); detachDraweeHolders(); } @Override public void onAttachedToWindow() { super.onAttachedToWindow(); attachDraweeHolders(); } @Override public void onFinishTemporaryDetach() { super.onFinishTemporaryDetach(); attachDraweeHolders(); } private void detachDraweeHolders() { mLogoHolder.onDetach(); mNavIconHolder.onDetach(); mOverflowIconHolder.onDetach(); mActionsHolder.onDetach(); } private void attachDraweeHolders() { mLogoHolder.onAttach(); mNavIconHolder.onAttach(); mOverflowIconHolder.onAttach(); mActionsHolder.onAttach(); } /* package */ void setLogoSource(@Nullable ReadableMap source) { setIconSource(source, mLogoControllerListener, mLogoHolder); } /* package */ void setNavIconSource(@Nullable ReadableMap source) { setIconSource(source, mNavIconControllerListener, mNavIconHolder); } /* package */ void setOverflowIconSource(@Nullable ReadableMap source) { setIconSource(source, mOverflowIconControllerListener, mOverflowIconHolder); } /* package */ void setActions(@Nullable ReadableArray actions) { Menu menu = getMenu(); menu.clear(); mActionsHolder.clear(); if (actions != null) { for (int i = 0; i < actions.size(); i++) { ReadableMap action = actions.getMap(i); MenuItem item = menu.add(Menu.NONE, Menu.NONE, i, action.getString(PROP_ACTION_TITLE)); if (action.hasKey(PROP_ACTION_ICON)) { setMenuItemIcon(item, action.getMap(PROP_ACTION_ICON)); } int showAsAction = action.hasKey(PROP_ACTION_SHOW) ? action.getInt(PROP_ACTION_SHOW) : MenuItem.SHOW_AS_ACTION_NEVER; if (action.hasKey(PROP_ACTION_SHOW_WITH_TEXT) && action.getBoolean(PROP_ACTION_SHOW_WITH_TEXT)) { showAsAction = showAsAction | MenuItem.SHOW_AS_ACTION_WITH_TEXT; } item.setShowAsAction(showAsAction); } } } private void setMenuItemIcon(final MenuItem item, ReadableMap iconSource) { DraweeHolder<GenericDraweeHierarchy> holder = DraweeHolder.create(createDraweeHierarchy(), getContext()); ActionIconControllerListener controllerListener = new ActionIconControllerListener(item, holder); controllerListener.setIconImageInfo(getIconImageInfo(iconSource)); setIconSource(iconSource, controllerListener, holder); mActionsHolder.add(holder); } /** * Sets an icon for a specific icon source. If the uri indicates an icon * to be somewhere remote (http/https) or on the local filesystem, it uses fresco to load it. * Otherwise it loads the Drawable from the Resources and directly returns it via a callback */ private void setIconSource(ReadableMap source, IconControllerListener controllerListener, DraweeHolder holder) { String uri = source != null ? source.getString(PROP_ICON_URI) : null; if (uri == null) { controllerListener.setIconImageInfo(null); controllerListener.setDrawable(null); } else if (uri.startsWith("http://") || uri.startsWith("https://") || uri.startsWith("file://")) { controllerListener.setIconImageInfo(getIconImageInfo(source)); DraweeController controller = Fresco.newDraweeControllerBuilder() .setUri(Uri.parse(uri)) .setControllerListener(controllerListener) .setOldController(holder.getController()) .build(); holder.setController(controller); holder.getTopLevelDrawable().setVisible(true, true); } else { controllerListener.setDrawable(getDrawableByName(uri)); } } private GenericDraweeHierarchy createDraweeHierarchy() { return new GenericDraweeHierarchyBuilder(getResources()) .setActualImageScaleType(ScalingUtils.ScaleType.FIT_CENTER) .setFadeDuration(0) .build(); } private int getDrawableResourceByName(String name) { return getResources().getIdentifier( name, "drawable", getContext().getPackageName()); } private Drawable getDrawableByName(String name) { int drawableResId = getDrawableResourceByName(name); if (drawableResId != 0) { return getResources().getDrawable(getDrawableResourceByName(name)); } else { return null; } } private IconImageInfo getIconImageInfo(ReadableMap source) { if (source.hasKey(PROP_ICON_WIDTH) && source.hasKey(PROP_ICON_HEIGHT)) { final int width = Math.round(PixelUtil.toPixelFromDIP(source.getInt(PROP_ICON_WIDTH))); final int height = Math.round(PixelUtil.toPixelFromDIP(source.getInt(PROP_ICON_HEIGHT))); return new IconImageInfo(width, height); } else { return null; } } }
/** * SIX OVAL - https://nakamura5akihito.github.io/ * Copyright (C) 2010 Akihito Nakamura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.opensec.six.oval.model.independent; import io.opensec.six.oval.model.ComponentType; import io.opensec.six.oval.model.ElementRef; import io.opensec.six.oval.model.Family; import io.opensec.six.oval.model.definitions.EntityObjectStringType; import io.opensec.six.oval.model.definitions.Filter; import io.opensec.six.oval.model.definitions.Set; import io.opensec.six.oval.model.definitions.SystemObjectType; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; /** * The SQL57 object is used by a sql test to define the specific database * and query to be evaluated. * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> */ public class Sql57Object extends SystemObjectType { // XSD model: // choice( // set // sequence( // engine // version // connection_string // sql // filter // ) // ) private Set set; private EntityObjectEngineType engine; //{1..1} private EntityObjectStringType version; //{1..1} private EntityObjectStringType connection_string; //{1..1} private EntityObjectStringType sql; //{1..1} private final Collection<Filter> filter = new ArrayList<Filter>(); //{0..*} /** * Constructor. */ public Sql57Object() { this( null, 0 ); } public Sql57Object( final String id, final int version ) { this( id, version, null ); } public Sql57Object( final String id, final int version, final String comment ) { super( id, version, comment ); _oval_family = Family.INDEPENDENT; _oval_component = ComponentType.SQL57; } /** */ public void setSet( final Set set ) { this.set = set; } public Set getSet() { return set; } /** */ public void setEngine( final EntityObjectEngineType engine ) { this.engine = engine; } public EntityObjectEngineType getEngine() { return engine; } /** */ public void setVersion( final EntityObjectStringType version ) { this.version = version; } public EntityObjectStringType getVersion() { return version; } /** */ public void setConnectionString( final EntityObjectStringType connection_string ) { this.connection_string = connection_string; } public EntityObjectStringType getConnectionString() { return connection_string; } /** */ public void setSql( final EntityObjectStringType sql ) { this.sql = sql; } public EntityObjectStringType getSql() { return sql; } /** */ public void setFilter( final Collection<? extends Filter> filterList ) { if (filter != filterList) { filter.clear(); if (filterList != null && filterList.size() > 0) { filter.addAll( filterList ); } } } public boolean addFilter( final Filter filter ) { if (filter == null) { return false; } return this.filter.add( filter ); } public Collection<Filter> getFilter() { return filter; } public Iterator<Filter> iterateFilter() { return filter.iterator(); } //********************************************************************* // DefinitionsElement //********************************************************************* @Override public Collection<ElementRef> ovalGetElementRef() { Collection<ElementRef> ref_list = new ArrayList<ElementRef>(); ref_list.add( getEngine() ); ref_list.add( getVersion() ); ref_list.add( getConnectionString() ); ref_list.add( getSql() ); ref_list.addAll( getFilter() ); return ref_list; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals( final Object obj ) { if (!(obj instanceof Sql57Object)) { return false; } return super.equals( obj ); } @Override public String toString() { return "sql57_object[" + super.toString() + ", set=" + getSet() + ", engine=" + getEngine() + ", version=" + getVersion() + ", conection_string=" + getConnectionString() + ", sql=" + getSql() + ", filter=" + getFilter() + "]"; } } //
package org.apache.maven.archiva.repository.content; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.commons.lang.StringUtils; import org.apache.maven.archiva.model.ArtifactReference; import org.apache.maven.archiva.repository.AbstractRepositoryLayerTestCase; import org.apache.maven.archiva.repository.ManagedRepositoryContent; import org.apache.maven.archiva.repository.layout.LayoutException; import java.io.File; /** * RepositoryRequestTest * * @version $Id$ */ public class RepositoryRequestTest extends AbstractRepositoryLayerTestCase { public void testInvalidRequestEmptyPath() { assertInvalidRequest( "" ); } public void testInvalidRequestSlashOnly() { assertInvalidRequest( "//" ); } public void testInvalidRequestNoArtifactId() { assertInvalidRequest( "groupId/jars/-1.0.jar" ); } public void testInvalidLegacyRequestBadLocation() { assertInvalidRequest( "org.apache.maven.test/jars/artifactId-1.0.war" ); } public void testInvalidRequestTooShort() { assertInvalidRequest( "org.apache.maven.test/artifactId-2.0.jar" ); } public void testInvalidDefaultRequestBadLocation() { assertInvalidRequest( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" ); } public void testValidLegacyGanymed() throws Exception { assertValid( "ch.ethz.ganymed/jars/ganymed-ssh2-build210.jar", "ch.ethz.ganymed", "ganymed-ssh2", "build210", null, "jar" ); } public void testValidDefaultGanymed() throws Exception { assertValid( "ch/ethz/ganymed/ganymed-ssh2/build210/ganymed-ssh2-build210.jar", "ch.ethz.ganymed", "ganymed-ssh2", "build210", null, "jar" ); } public void testValidLegacyJavaxComm() throws Exception { assertValid( "javax/jars/comm-3.0-u1.jar", "javax", "comm", "3.0-u1", null, "jar" ); } public void testValidDefaultJavaxComm() throws Exception { assertValid( "javax/comm/3.0-u1/comm-3.0-u1.jar", "javax", "comm", "3.0-u1", null, "jar" ); } public void testValidLegacyJavaxPersistence() throws Exception { assertValid( "javax.persistence/jars/ejb-3.0-public_review.jar", "javax.persistence", "ejb", "3.0-public_review", null, "jar" ); } public void testValidDefaultJavaxPersistence() throws Exception { assertValid( "javax/persistence/ejb/3.0-public_review/ejb-3.0-public_review.jar", "javax.persistence", "ejb", "3.0-public_review", null, "jar" ); } public void testValidLegacyMavenTestPlugin() throws Exception { assertValid( "maven/jars/maven-test-plugin-1.8.2.jar", "maven", "maven-test-plugin", "1.8.2", null, "jar" ); } public void testValidDefaultMavenTestPlugin() throws Exception { assertValid( "maven/maven-test-plugin/1.8.2/maven-test-plugin-1.8.2.pom", "maven", "maven-test-plugin", "1.8.2", null, "pom" ); } public void testValidLegacyCommonsLangJavadoc() throws Exception { assertValid( "commons-lang/javadoc.jars/commons-lang-2.1-javadoc.jar", "commons-lang", "commons-lang", "2.1", "javadoc", "javadoc" ); } public void testValidDefaultCommonsLangJavadoc() throws Exception { assertValid( "commons-lang/commons-lang/2.1/commons-lang-2.1-javadoc.jar", "commons-lang", "commons-lang", "2.1", "javadoc", "javadoc" ); } public void testValidLegacyDerbyPom() throws Exception { assertValid( "org.apache.derby/poms/derby-10.2.2.0.pom", "org.apache.derby", "derby", "10.2.2.0", null, "pom" ); // Starting slash should not prevent detection. assertValid( "/org.apache.derby/poms/derby-10.2.2.0.pom", "org.apache.derby", "derby", "10.2.2.0", null, "pom" ); } public void testValidDefaultDerbyPom() throws Exception { assertValid( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0.pom", "org.apache.derby", "derby", "10.2.2.0", null, "pom" ); } public void testValidLegacyGeronimoEjbSpec() throws Exception { assertValid( "org.apache.geronimo.specs/jars/geronimo-ejb_2.1_spec-1.0.1.jar", "org.apache.geronimo.specs", "geronimo-ejb_2.1_spec", "1.0.1", null, "jar" ); } public void testValidDefaultGeronimoEjbSpec() throws Exception { assertValid( "org/apache/geronimo/specs/geronimo-ejb_2.1_spec/1.0.1/geronimo-ejb_2.1_spec-1.0.1.jar", "org.apache.geronimo.specs", "geronimo-ejb_2.1_spec", "1.0.1", null, "jar" ); } public void testValidLegacyLdapSnapshot() throws Exception { assertValid( "directory-clients/poms/ldap-clients-0.9.1-SNAPSHOT.pom", "directory-clients", "ldap-clients", "0.9.1-SNAPSHOT", null, "pom" ); } public void testValidDefaultLdapSnapshot() throws Exception { assertValid( "directory-clients/ldap-clients/0.9.1-SNAPSHOT/ldap-clients-0.9.1-SNAPSHOT.pom", "directory-clients", "ldap-clients", "0.9.1-SNAPSHOT", null, "pom" ); } public void testValidLegacyTestArchSnapshot() throws Exception { assertValid( "test.maven-arch/poms/test-arch-2.0.3-SNAPSHOT.pom", "test.maven-arch", "test-arch", "2.0.3-SNAPSHOT", null, "pom" ); } public void testValidDefaultTestArchSnapshot() throws Exception { assertValid( "test/maven-arch/test-arch/2.0.3-SNAPSHOT/test-arch-2.0.3-SNAPSHOT.pom", "test.maven-arch", "test-arch", "2.0.3-SNAPSHOT", null, "pom" ); } public void testValidLegacyOddDottedArtifactId() throws Exception { assertValid( "com.company.department/poms/com.company.department.project-0.2.pom", "com.company.department", "com.company.department.project", "0.2", null, "pom" ); } public void testValidDefaultOddDottedArtifactId() throws Exception { assertValid( "com/company/department/com.company.department.project/0.2/com.company.department.project-0.2.pom", "com.company.department", "com.company.department.project", "0.2", null, "pom" ); } public void testValidLegacyTimestampedSnapshot() throws Exception { assertValid( "org.apache.archiva.test/jars/redonkulous-3.1-beta-1-20050831.101112-42.jar", "org.apache.archiva.test", "redonkulous", "3.1-beta-1-20050831.101112-42", null, "jar" ); } public void testValidDefaultTimestampedSnapshot() throws Exception { assertValid( "org/apache/archiva/test/redonkulous/3.1-beta-1-SNAPSHOT/redonkulous-3.1-beta-1-20050831.101112-42.jar", "org.apache.archiva.test", "redonkulous", "3.1-beta-1-20050831.101112-42", null, "jar" ); } public void testIsSupportFile() { assertTrue( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.sha1" ) ); assertTrue( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.md5" ) ); assertTrue( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.asc" ) ); assertTrue( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.pgp" ) ); assertTrue( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1" ) ); assertTrue( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.md5" ) ); assertFalse( repoRequest.isSupportFile( "test.maven-arch/poms/test-arch-2.0.3-SNAPSHOT.pom" ) ); assertFalse( repoRequest.isSupportFile( "test/maven-arch/test-arch/2.0.3-SNAPSHOT/test-arch-2.0.3-SNAPSHOT.jar" ) ); assertFalse( repoRequest.isSupportFile( "org/apache/archiva/archiva-api/1.0/archiva-api-1.0.xml.zip" ) ); assertFalse( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz" ) ); assertFalse( repoRequest.isSupportFile( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml" ) ); assertFalse( repoRequest.isSupportFile( "org/apache/derby/derby/maven-metadata.xml" ) ); } public void testIsMetadata() { assertTrue( repoRequest.isMetadata( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml" )); assertTrue( repoRequest.isMetadata( "org/apache/derby/derby/maven-metadata.xml" )); assertFalse( repoRequest.isMetadata( "test.maven-arch/poms/test-arch-2.0.3-SNAPSHOT.pom" ) ); assertFalse( repoRequest.isMetadata( "test/maven-arch/test-arch/2.0.3-SNAPSHOT/test-arch-2.0.3-SNAPSHOT.jar" ) ); assertFalse( repoRequest.isMetadata( "org/apache/archiva/archiva-api/1.0/archiva-api-1.0.xml.zip" ) ); assertFalse( repoRequest.isMetadata( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz" ) ); assertFalse( repoRequest.isMetadata( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.pgp" ) ); assertFalse( repoRequest.isMetadata( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1" ) ); } public void testIsMetadataSupportFile() { assertFalse( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml" )); assertFalse( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/maven-metadata.xml" )); assertTrue( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/maven-metadata.xml.sha1" )); assertTrue( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/maven-metadata.xml.md5" )); assertFalse( repoRequest.isMetadataSupportFile( "test.maven-arch/poms/test-arch-2.0.3-SNAPSHOT.pom" ) ); assertFalse( repoRequest.isMetadataSupportFile( "test/maven-arch/test-arch/2.0.3-SNAPSHOT/test-arch-2.0.3-SNAPSHOT.jar" ) ); assertFalse( repoRequest.isMetadataSupportFile( "org/apache/archiva/archiva-api/1.0/archiva-api-1.0.xml.zip" ) ); assertFalse( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz" ) ); assertFalse( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.pgp" ) ); assertTrue( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1" ) ); assertTrue( repoRequest.isMetadataSupportFile( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.md5" ) ); } public void testIsDefault() { assertFalse( repoRequest.isDefault( "test.maven-arch/poms/test-arch-2.0.3-SNAPSHOT.pom" ) ); assertFalse( repoRequest.isDefault( "directory-clients/poms/ldap-clients-0.9.1-SNAPSHOT.pom" ) ); assertFalse( repoRequest.isDefault( "commons-lang/jars/commons-lang-2.1-javadoc.jar" ) ); assertTrue( repoRequest.isDefault( "test/maven-arch/test-arch/2.0.3-SNAPSHOT/test-arch-2.0.3-SNAPSHOT.jar" ) ); assertTrue( repoRequest.isDefault( "org/apache/archiva/archiva-api/1.0/archiva-api-1.0.xml.zip" ) ); assertTrue( repoRequest.isDefault( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz" ) ); assertTrue( repoRequest.isDefault( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.pgp" ) ); assertTrue( repoRequest.isDefault( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1" ) ); assertTrue( repoRequest.isDefault( "eclipse/jdtcore/maven-metadata.xml" ) ); assertTrue( repoRequest.isDefault( "eclipse/jdtcore/maven-metadata.xml.sha1" ) ); assertTrue( repoRequest.isDefault( "eclipse/jdtcore/maven-metadata.xml.md5" ) ); assertFalse( repoRequest.isDefault( null ) ); assertFalse( repoRequest.isDefault( "" ) ); assertFalse( repoRequest.isDefault( "foo" ) ); assertFalse( repoRequest.isDefault( "some.short/path" ) ); } public void testIsLegacy() { assertTrue( repoRequest.isLegacy( "test.maven-arch/poms/test-arch-2.0.3-SNAPSHOT.pom" ) ); assertTrue( repoRequest.isLegacy( "directory-clients/poms/ldap-clients-0.9.1-SNAPSHOT.pom" ) ); assertTrue( repoRequest.isLegacy( "commons-lang/jars/commons-lang-2.1-javadoc.jar" ) ); assertFalse( repoRequest.isLegacy( "test/maven-arch/test-arch/2.0.3-SNAPSHOT/test-arch-2.0.3-SNAPSHOT.jar" ) ); assertFalse( repoRequest.isLegacy( "org/apache/archiva/archiva-api/1.0/archiva-api-1.0.xml.zip" ) ); assertFalse( repoRequest.isLegacy( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz" ) ); assertFalse( repoRequest.isLegacy( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0-bin.tar.gz.pgp" ) ); assertFalse( repoRequest.isLegacy( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1" ) ); assertFalse( repoRequest.isLegacy( null ) ); assertFalse( repoRequest.isLegacy( "" ) ); assertFalse( repoRequest.isLegacy( "some.short/path" ) ); } private ManagedRepositoryContent createManagedRepo( String layout ) throws Exception { File repoRoot = getTestFile( "target/test-repo" ); return createManagedRepositoryContent( "test-internal", "Internal Test Repo", repoRoot, layout ); } /** * [MRM-481] Artifact requests with a .xml.zip extension fail with a 404 Error */ public void testToNativePathArtifactDefaultToDefaultDualExtension() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test (artifact) default to default - dual extension assertEquals( "org/project/example-presentation/3.2/example-presentation-3.2.xml.zip", repoRequest .toNativePath( "org/project/example-presentation/3.2/example-presentation-3.2.xml.zip", repository ) ); } /** * [MRM-481] Artifact requests with a .xml.zip extension fail with a 404 Error */ public void testToNativePathArtifactLegacyToDefaultDualExtension() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // TODO: this is a good test case for maven 1 -> maven 2 link, since m2 doesn't support the distribution-zip type // Test (artifact) legacy to default - dual extension // NOTE: The detection of a dual extension is flawed. assertEquals( "org/project/example-presentation/3.2.xml/example-presentation-3.2.xml.zip", repoRequest .toNativePath( "org.project/zips/example-presentation-3.2.xml.zip", repository ) ); } public void testToNativePathMetadataDefaultToDefault() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test (metadata) default to default assertEquals( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1", repoRequest .toNativePath( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml.sha1", repository ) ); } public void testNativePathPomLegacyToDefault() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test (pom) legacy to default assertEquals( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0.pom", repoRequest.toNativePath( "org.apache.derby/poms/derby-10.2.2.0.pom", repository ) ); } public void testNativePathPomLegacyToLegacy() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "legacy" ); // Test (pom) legacy to default assertEquals( "org.apache.derby/poms/derby-10.2.2.0.pom", repoRequest.toNativePath( "org.apache.derby/poms/derby-10.2.2.0.pom", repository ) ); } public void testNativePathPomLegacyToDefaultEjb() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test (pom) legacy to default assertEquals( "mygroup/myejb/1.0/myejb-1.0.jar", repoRequest.toNativePath( "mygroup/ejbs/myejb-1.0.jar", repository ) ); } public void testNativePathPomLegacyToLegacyEjb() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "legacy" ); // Test (pom) legacy to default assertEquals( "mygroup/ejbs/myejb-1.0.jar", repoRequest.toNativePath( "mygroup/ejbs/myejb-1.0.jar", repository ) ); } public void testNativePathPomLegacyToLegacyStrutsModule() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "legacy" ); // Test (pom) legacy to default assertEquals( "WebPortal/struts-modules/eventsDB-1.2.3.struts-module", repoRequest.toNativePath( "WebPortal/struts-modules/eventsDB-1.2.3.struts-module", repository ) ); } public void testNativePathSupportFileLegacyToDefault() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test (supportfile) legacy to default assertEquals( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0.jar.sha1", repoRequest .toNativePath( "org.apache.derby/jars/derby-10.2.2.0.jar.sha1", repository ) ); } public void testNativePathBadRequestTooShort() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test bad request path (too short) try { repoRequest.toNativePath( "org.apache.derby/license.txt", repository ); fail( "Should have thrown an exception about a too short path." ); } catch ( LayoutException e ) { // expected path. } } public void testNativePathBadRequestBlank() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test bad request path (too short) try { repoRequest.toNativePath( "", repository ); fail( "Should have thrown an exception about an blank request." ); } catch ( LayoutException e ) { // expected path. } } public void testNativePathBadRequestNull() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test bad request path (too short) try { repoRequest.toNativePath( null, repository ); fail( "Should have thrown an exception about an null request." ); } catch ( LayoutException e ) { // expected path. } } public void testNativePathBadRequestUnknownType() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "default" ); // Test bad request path (too short) try { repoRequest.toNativePath( "org/apache/derby/derby/10.2.2.0/license.txt", repository ); fail( "Should have thrown an exception about an invalid type." ); } catch ( LayoutException e ) { // expected path. } } public void testToNativePathLegacyMetadataDefaultToLegacy() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "legacy" ); // Test (metadata) default to legacy // Special Case: This direction is not supported, should throw a LayoutException. try { repoRequest.toNativePath( "org/apache/derby/derby/10.2.2.0/maven-metadata.xml", repository ); fail("Should have thrown a LayoutException, can't translate a maven-metadata.xml to a legacy layout."); } catch(LayoutException e) { // expected path. } } public void testNativePathPomDefaultToLegacy() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "legacy" ); // Test (pom) default to legacy assertEquals( "org.apache.derby/poms/derby-10.2.2.0.pom", repoRequest .toNativePath( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0.pom", repository ) ); } public void testNativePathSupportFileDefaultToLegacy() throws Exception { ManagedRepositoryContent repository = createManagedRepo( "legacy" ); // Test (supportfile) default to legacy assertEquals( "org.apache.derby/jars/derby-10.2.2.0.jar.sha1", repoRequest .toNativePath( "org/apache/derby/derby/10.2.2.0/derby-10.2.2.0.jar.sha1", repository ) ); } private void assertValid( String path, String groupId, String artifactId, String version, String classifier, String type ) throws Exception { String expectedId = "ArtifactReference - " + groupId + ":" + artifactId + ":" + version + ":" + ( classifier != null ? classifier + ":" : "" ) + type; ArtifactReference reference = repoRequest.toArtifactReference( path ); assertNotNull( expectedId + " - Should not be null.", reference ); assertEquals( expectedId + " - Group ID", groupId, reference.getGroupId() ); assertEquals( expectedId + " - Artifact ID", artifactId, reference.getArtifactId() ); if ( StringUtils.isNotBlank( classifier ) ) { assertEquals( expectedId + " - Classifier", classifier, reference.getClassifier() ); } assertEquals( expectedId + " - Version ID", version, reference.getVersion() ); assertEquals( expectedId + " - Type", type, reference.getType() ); } private void assertInvalidRequest( String path ) { try { repoRequest.toArtifactReference( path ); fail( "Expected a LayoutException on an invalid path [" + path + "]" ); } catch ( LayoutException e ) { /* expected path */ } } private RepositoryRequest repoRequest; @Override protected void setUp() throws Exception { super.setUp(); repoRequest = (RepositoryRequest) lookup( RepositoryRequest.class ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.server.cluster.impl; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.QueueConfiguration; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClusterTopologyListener; import org.apache.activemq.artemis.api.core.client.TopologyMember; import org.apache.activemq.artemis.api.core.management.CoreNotificationType; import org.apache.activemq.artemis.api.core.management.ManagementHelper; import org.apache.activemq.artemis.core.client.impl.AfterConnectInternalListener; import org.apache.activemq.artemis.core.client.impl.ClientSessionFactoryInternal; import org.apache.activemq.artemis.core.client.impl.ServerLocatorImpl; import org.apache.activemq.artemis.core.client.impl.ServerLocatorInternal; import org.apache.activemq.artemis.core.client.impl.Topology; import org.apache.activemq.artemis.core.client.impl.TopologyManager; import org.apache.activemq.artemis.core.client.impl.TopologyMemberImpl; import org.apache.activemq.artemis.core.config.impl.ConfigurationImpl; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.PostOffice; import org.apache.activemq.artemis.core.postoffice.impl.PostOfficeImpl; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.NodeManager; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.cluster.ActiveMQServerSideProtocolManagerFactory; import org.apache.activemq.artemis.core.server.cluster.Bridge; import org.apache.activemq.artemis.core.server.cluster.ClusterConnection; import org.apache.activemq.artemis.core.server.cluster.ClusterControl; import org.apache.activemq.artemis.core.server.cluster.ClusterManager; import org.apache.activemq.artemis.core.server.cluster.ClusterManager.IncomingInterceptorLookingForExceptionMessage; import org.apache.activemq.artemis.core.server.cluster.MessageFlowRecord; import org.apache.activemq.artemis.core.server.cluster.RemoteQueueBinding; import org.apache.activemq.artemis.core.server.group.impl.Proposal; import org.apache.activemq.artemis.core.server.group.impl.Response; import org.apache.activemq.artemis.core.server.management.ManagementService; import org.apache.activemq.artemis.core.server.management.Notification; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.utils.ExecutorFactory; import org.apache.activemq.artemis.utils.FutureLatch; import org.apache.activemq.artemis.utils.collections.TypedProperties; import org.jboss.logging.Logger; public final class ClusterConnectionImpl implements ClusterConnection, AfterConnectInternalListener, TopologyManager { private static final Logger logger = Logger.getLogger(ClusterConnectionImpl.class); private static final String SN_PREFIX = "sf."; /** * When getting member on node-up and down we have to remove the name from the transport config * as the setting we build here doesn't need to consider the name, so use the same name on all * the instances. */ private static final String TRANSPORT_CONFIG_NAME = "topology-member"; private final ExecutorFactory executorFactory; private final Executor executor; private final ActiveMQServer server; private final PostOffice postOffice; private final ManagementService managementService; private final SimpleString name; private final SimpleString address; private final long clientFailureCheckPeriod; private final long connectionTTL; private final long retryInterval; private final long callTimeout; private final long callFailoverTimeout; private final double retryIntervalMultiplier; private final long maxRetryInterval; private final int initialConnectAttempts; private final int reconnectAttempts; private final boolean useDuplicateDetection; private final MessageLoadBalancingType messageLoadBalancingType; private final int confirmationWindowSize; private final int producerWindowSize; /** * Guard for the field {@link #records}. Note that the field is {@link ConcurrentHashMap}, * however we need the guard to synchronize multiple step operations during topology updates. */ private final Object recordsGuard = new Object(); private final Map<String, MessageFlowRecord> records = new ConcurrentHashMap<>(); private final ScheduledExecutorService scheduledExecutor; private final int maxHops; private final NodeManager nodeManager; private volatile boolean started; private final String clusterUser; private final String clusterPassword; private final ClusterConnector clusterConnector; private ServerLocatorInternal serverLocator; private final TransportConfiguration connector; private final boolean allowDirectConnectionsOnly; private final Set<TransportConfiguration> allowableConnections = new HashSet<>(); private final ClusterManager manager; private final int minLargeMessageSize; // Stuff that used to be on the ClusterManager private final Topology topology; private volatile boolean stopping = false; private LiveNotifier liveNotifier = null; private final long clusterNotificationInterval; private final int clusterNotificationAttempts; private final String storeAndForwardPrefix; private boolean splitBrainDetection; public ClusterConnectionImpl(final ClusterManager manager, final TransportConfiguration[] staticTranspConfigs, final TransportConfiguration connector, final SimpleString name, final SimpleString address, final int minLargeMessageSize, final long clientFailureCheckPeriod, final long connectionTTL, final long retryInterval, final double retryIntervalMultiplier, final long maxRetryInterval, final int initialConnectAttempts, final int reconnectAttempts, final long callTimeout, final long callFailoverTimeout, final boolean useDuplicateDetection, final MessageLoadBalancingType messageLoadBalancingType, final int confirmationWindowSize, final int producerWindowSize, final ExecutorFactory executorFactory, final ActiveMQServer server, final PostOffice postOffice, final ManagementService managementService, final ScheduledExecutorService scheduledExecutor, final int maxHops, final NodeManager nodeManager, final String clusterUser, final String clusterPassword, final boolean allowDirectConnectionsOnly, final long clusterNotificationInterval, final int clusterNotificationAttempts) throws Exception { this.nodeManager = nodeManager; this.connector = connector; this.name = name; this.address = address; this.clientFailureCheckPeriod = clientFailureCheckPeriod; this.connectionTTL = connectionTTL; this.retryInterval = retryInterval; this.retryIntervalMultiplier = retryIntervalMultiplier; this.maxRetryInterval = maxRetryInterval; this.initialConnectAttempts = initialConnectAttempts; this.reconnectAttempts = reconnectAttempts; this.useDuplicateDetection = useDuplicateDetection; this.messageLoadBalancingType = messageLoadBalancingType; this.confirmationWindowSize = confirmationWindowSize; this.producerWindowSize = producerWindowSize; this.executorFactory = executorFactory; this.clusterNotificationInterval = clusterNotificationInterval; this.clusterNotificationAttempts = clusterNotificationAttempts; this.executor = executorFactory.getExecutor(); this.topology = new Topology(this, executor); this.server = server; this.postOffice = postOffice; this.managementService = managementService; this.scheduledExecutor = scheduledExecutor; this.maxHops = maxHops; this.clusterUser = clusterUser; this.clusterPassword = clusterPassword; this.allowDirectConnectionsOnly = allowDirectConnectionsOnly; this.manager = manager; this.callTimeout = callTimeout; this.callFailoverTimeout = callFailoverTimeout; this.minLargeMessageSize = minLargeMessageSize; clusterConnector = new StaticClusterConnector(staticTranspConfigs); if (staticTranspConfigs != null && staticTranspConfigs.length > 0) { // a cluster connection will connect to other nodes only if they are directly connected // through a static list of connectors or broadcasting using UDP. if (allowDirectConnectionsOnly) { for (TransportConfiguration configuration : staticTranspConfigs) { allowableConnections.add(configuration.newTransportConfig(TRANSPORT_CONFIG_NAME)); } } } this.storeAndForwardPrefix = server.getInternalNamingPrefix() + SN_PREFIX; } public ClusterConnectionImpl(final ClusterManager manager, DiscoveryGroupConfiguration dg, final TransportConfiguration connector, final SimpleString name, final SimpleString address, final int minLargeMessageSize, final long clientFailureCheckPeriod, final long connectionTTL, final long retryInterval, final double retryIntervalMultiplier, final long maxRetryInterval, final int initialConnectAttempts, final int reconnectAttempts, final long callTimeout, final long callFailoverTimeout, final boolean useDuplicateDetection, final MessageLoadBalancingType messageLoadBalancingType, final int confirmationWindowSize, final int producerWindowSize, final ExecutorFactory executorFactory, final ActiveMQServer server, final PostOffice postOffice, final ManagementService managementService, final ScheduledExecutorService scheduledExecutor, final int maxHops, final NodeManager nodeManager, final String clusterUser, final String clusterPassword, final boolean allowDirectConnectionsOnly, final long clusterNotificationInterval, final int clusterNotificationAttempts) throws Exception { this.nodeManager = nodeManager; this.connector = connector; this.name = name; this.address = address; this.clientFailureCheckPeriod = clientFailureCheckPeriod; this.connectionTTL = connectionTTL; this.retryInterval = retryInterval; this.retryIntervalMultiplier = retryIntervalMultiplier; this.maxRetryInterval = maxRetryInterval; this.minLargeMessageSize = minLargeMessageSize; this.initialConnectAttempts = initialConnectAttempts; this.reconnectAttempts = reconnectAttempts; this.callTimeout = callTimeout; this.callFailoverTimeout = callFailoverTimeout; this.useDuplicateDetection = useDuplicateDetection; this.messageLoadBalancingType = messageLoadBalancingType; this.confirmationWindowSize = confirmationWindowSize; this.producerWindowSize = producerWindowSize; this.executorFactory = executorFactory; this.clusterNotificationInterval = clusterNotificationInterval; this.clusterNotificationAttempts = clusterNotificationAttempts; this.executor = executorFactory.getExecutor(); this.topology = new Topology(this, executor); this.server = server; this.postOffice = postOffice; this.managementService = managementService; this.scheduledExecutor = scheduledExecutor; this.maxHops = maxHops; this.clusterUser = clusterUser; this.clusterPassword = clusterPassword; this.allowDirectConnectionsOnly = allowDirectConnectionsOnly; clusterConnector = new DiscoveryClusterConnector(dg); this.manager = manager; this.storeAndForwardPrefix = server.getInternalNamingPrefix() + SN_PREFIX; } @Override public void start() throws Exception { synchronized (this) { if (started) { return; } stopping = false; started = true; activate(); } } @Override public void flushExecutor() { FutureLatch future = new FutureLatch(); executor.execute(future); if (!future.await(10000)) { ActiveMQServerLogger.LOGGER.couldNotFinishExecutor(this.toString()); server.threadDump(); } } @Override public void stop() throws Exception { if (!started) { return; } stopping = true; if (logger.isDebugEnabled()) { logger.debug(this + "::stopping ClusterConnection"); } if (serverLocator != null) { serverLocator.removeClusterTopologyListener(this); } logger.debug("Cluster connection being stopped for node" + nodeManager.getNodeId() + ", server = " + this.server + " serverLocator = " + serverLocator); synchronized (this) { for (MessageFlowRecord record : records.values()) { try { record.close(); } catch (Exception ignore) { } } } if (managementService != null) { TypedProperties props = new TypedProperties(); props.putSimpleStringProperty(new SimpleString("name"), name); //nodeID can be null if there's only a backup SimpleString nodeId = nodeManager.getNodeId(); Notification notification = new Notification(nodeId == null ? null : nodeId.toString(), CoreNotificationType.CLUSTER_CONNECTION_STOPPED, props); managementService.sendNotification(notification); } executor.execute(new Runnable() { @Override public void run() { synchronized (ClusterConnectionImpl.this) { closeLocator(serverLocator); serverLocator = null; } } }); started = false; } /** * @param locator */ private void closeLocator(final ServerLocatorInternal locator) { if (locator != null) locator.close(); } private TopologyMember getLocalMember() { return topology.getMember(manager.getNodeId()); } @Override public void addClusterTopologyListener(final ClusterTopologyListener listener) { topology.addClusterTopologyListener(listener); } @Override public void removeClusterTopologyListener(final ClusterTopologyListener listener) { topology.removeClusterTopologyListener(listener); } @Override public Topology getTopology() { return topology; } @Override public void nodeAnnounced(final long uniqueEventID, final String nodeID, final String backupGroupName, final String scaleDownGroupName, final Pair<TransportConfiguration, TransportConfiguration> connectorPair, final boolean backup) { if (logger.isDebugEnabled()) { logger.debug(this + "::NodeAnnounced, backup=" + backup + nodeID + connectorPair); } TransportConfiguration live = connectorPair.getA(); TransportConfiguration backupTC = connectorPair.getB(); TopologyMemberImpl newMember = new TopologyMemberImpl(nodeID, backupGroupName, scaleDownGroupName, live, backupTC); newMember.setUniqueEventID(uniqueEventID); if (backup) { topology.updateBackup(new TopologyMemberImpl(nodeID, backupGroupName, scaleDownGroupName, live, backupTC)); } else { topology.updateMember(uniqueEventID, nodeID, newMember); } } /** This is the implementation of TopologyManager. It is used to reject eventual updates from a split brain server. * * @param uniqueEventID * @param nodeId * @param memberInput * @return */ @Override public boolean updateMember(long uniqueEventID, String nodeId, TopologyMemberImpl memberInput) { if (splitBrainDetection && nodeId.equals(nodeManager.getNodeId().toString())) { if (memberInput.getLive() != null && !memberInput.getLive().isSameParams(connector)) { ActiveMQServerLogger.LOGGER.possibleSplitBrain(nodeId, memberInput.toString()); } memberInput.setLive(connector); } return true; } /** * From topologyManager * @param uniqueEventID * @param nodeId * @return */ @Override public boolean removeMember(final long uniqueEventID, final String nodeId) { if (splitBrainDetection && nodeId.equals(nodeManager.getNodeId().toString())) { ActiveMQServerLogger.LOGGER.possibleSplitBrain(nodeId, nodeId); return false; } return true; } @Override public void setSplitBrainDetection(boolean splitBrainDetection) { this.splitBrainDetection = splitBrainDetection; } @Override public boolean isSplitBrainDetection() { return splitBrainDetection; } @Override public void onConnection(ClientSessionFactoryInternal sf) { TopologyMember localMember = getLocalMember(); if (localMember != null) { ClusterControl clusterControl = manager.getClusterController().connectToNodeInCluster(sf); try { clusterControl.authorize(); clusterControl.sendNodeAnnounce(localMember.getUniqueEventID(), manager.getNodeId(), manager.getBackupGroupName(), manager.getScaleDownGroupName(), false, localMember.getLive(), localMember.getBackup()); } catch (ActiveMQException e) { ActiveMQServerLogger.LOGGER.clusterControlAuthfailure(); } } else { ActiveMQServerLogger.LOGGER.noLocalMemborOnClusterConnection(this); } // TODO: shouldn't we send the current time here? and change the current topology? // sf.sendNodeAnnounce(System.currentTimeMillis(), // manager.getNodeId(), // false, // localMember.getConnector().a, // localMember.getConnector().b); } @Override public boolean isStarted() { return started; } @Override public SimpleString getName() { return name; } @Override public String getNodeID() { return nodeManager == null ? null : (nodeManager.getNodeId() == null ? null : nodeManager.getNodeId().toString()); } @Override public ActiveMQServer getServer() { return server; } @Override public boolean isNodeActive(String nodeId) { MessageFlowRecord rec = records.get(nodeId); if (rec == null) { return false; } return rec.getBridge().isConnected(); } @Override public long getCallTimeout() { return callTimeout; } @Override public Map<String, String> getNodes() { synchronized (recordsGuard) { Map<String, String> nodes = new HashMap<>(); for (Entry<String, MessageFlowRecord> entry : records.entrySet()) { RemotingConnection fwdConnection = entry.getValue().getBridge().getForwardingConnection(); if (fwdConnection != null) { nodes.put(entry.getKey(), fwdConnection.getRemoteAddress()); } } return nodes; } } private synchronized void activate() throws Exception { if (!started) { return; } if (logger.isDebugEnabled()) { logger.debug("Activating cluster connection nodeID=" + nodeManager.getNodeId() + " for server=" + this.server); } liveNotifier = new LiveNotifier(); liveNotifier.updateAsLive(); liveNotifier.schedule(); serverLocator = clusterConnector.createServerLocator(); if (serverLocator != null) { if (!useDuplicateDetection) { logger.debug("DuplicateDetection is disabled, sending clustered messages blocked"); } final TopologyMember currentMember = topology.getMember(manager.getNodeId()); if (currentMember == null) { // sanity check only throw new IllegalStateException("InternalError! The ClusterConnection doesn't know about its own node = " + this); } serverLocator.setNodeID(nodeManager.getNodeId().toString()); serverLocator.setIdentity("(main-ClusterConnection::" + server.toString() + ")"); serverLocator.setReconnectAttempts(0); serverLocator.setClusterConnection(true); serverLocator.setClusterTransportConfiguration(connector); serverLocator.setInitialConnectAttempts(-1); serverLocator.setClientFailureCheckPeriod(clientFailureCheckPeriod); serverLocator.setConnectionTTL(connectionTTL); serverLocator.setConfirmationWindowSize(confirmationWindowSize); // if not using duplicate detection, we will send blocked serverLocator.setBlockOnDurableSend(!useDuplicateDetection); serverLocator.setBlockOnNonDurableSend(!useDuplicateDetection); serverLocator.setCallTimeout(callTimeout); serverLocator.setCallFailoverTimeout(callFailoverTimeout); serverLocator.setProducerWindowSize(producerWindowSize); if (retryInterval > 0) { this.serverLocator.setRetryInterval(retryInterval); } serverLocator.setAfterConnectionInternalListener(this); serverLocator.setProtocolManagerFactory(ActiveMQServerSideProtocolManagerFactory.getInstance(serverLocator, server.getStorageManager())); serverLocator.start(server.getExecutorFactory().getExecutor()); } if (managementService != null) { TypedProperties props = new TypedProperties(); props.putSimpleStringProperty(new SimpleString("name"), name); Notification notification = new Notification(nodeManager.getNodeId().toString(), CoreNotificationType.CLUSTER_CONNECTION_STARTED, props); logger.debug("sending notification: " + notification); managementService.sendNotification(notification); } //we add as a listener after we have sent the cluster start notif as the listener may start sending notifs before addClusterTopologyListener(this); } @Override public TransportConfiguration getConnector() { return connector; } // ClusterTopologyListener implementation ------------------------------------------------------------------ @Override public void nodeDown(final long eventUID, final String nodeID) { /* * we dont do anything when a node down is received. The bridges will take care themselves when they should disconnect * and/or clear their bindings. This is to avoid closing a record when we don't want to. * */ } @Override public void nodeUP(final TopologyMember topologyMember, final boolean last) { if (stopping) { return; } final String nodeID = topologyMember.getNodeId(); if (logger.isDebugEnabled()) { String ClusterTestBase = "receiving nodeUP for nodeID="; logger.debug(this + ClusterTestBase + nodeID + " connectionPair=" + topologyMember); } // discard notifications about ourselves unless its from our backup if (nodeID.equals(nodeManager.getNodeId().toString())) { if (logger.isTraceEnabled()) { logger.trace(this + "::informing about backup to itself, nodeUUID=" + nodeManager.getNodeId() + ", connectorPair=" + topologyMember + ", this = " + this); } return; } // if the node is more than 1 hop away, we do not create a bridge for direct cluster connection if (allowDirectConnectionsOnly && !allowableConnections.contains(topologyMember.getLive().newTransportConfig(TRANSPORT_CONFIG_NAME))) { return; } // FIXME required to prevent cluster connections w/o discovery group // and empty static connectors to create bridges... ulgy! if (serverLocator == null) { return; } /*we don't create bridges to backups*/ if (topologyMember.getLive() == null) { if (logger.isTraceEnabled()) { logger.trace(this + " ignoring call with nodeID=" + nodeID + ", topologyMember=" + topologyMember + ", last=" + last); } return; } synchronized (recordsGuard) { try { MessageFlowRecord record = records.get(nodeID); if (record == null) { if (logger.isDebugEnabled()) { logger.debug(this + "::Creating record for nodeID=" + nodeID + ", topologyMember=" + topologyMember); } // New node - create a new flow record final SimpleString queueName = getSfQueueName(nodeID); Binding queueBinding = postOffice.getBinding(queueName); Queue queue; if (queueBinding != null) { queue = (Queue) queueBinding.getBindable(); } else { // Add binding in storage so the queue will get reloaded on startup and we can find it - it's never // actually routed to at that address though queue = server.createQueue(new QueueConfiguration(queueName).setRoutingType(RoutingType.MULTICAST).setAutoCreateAddress(true).setMaxConsumers(-1).setPurgeOnNoConsumers(false)); } // There are a few things that will behave differently when it's an internal queue // such as we don't hold groupIDs inside the SnF queue queue.setInternalQueue(true); createNewRecord(topologyMember.getUniqueEventID(), nodeID, topologyMember.getLive(), queueName, queue, true); } else { if (logger.isTraceEnabled()) { logger.trace(this + " ignored nodeUp record for " + topologyMember + " on nodeID=" + nodeID + " as the record already existed"); } } } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorUpdatingTopology(e); } } } public SimpleString getSfQueueName(String nodeID) { return new SimpleString(storeAndForwardPrefix + name + "." + nodeID); } @Override public synchronized void informClusterOfBackup() { String nodeID = server.getNodeID().toString(); TopologyMemberImpl localMember = new TopologyMemberImpl(nodeID, null, null, null, connector); topology.updateAsLive(nodeID, localMember); } @Override public ClusterConnectionMetrics getMetrics() { long messagesPendingAcknowledgement = 0; long messagesAcknowledged = 0; for (MessageFlowRecord record : records.values()) { final BridgeMetrics metrics = record.getBridge() != null ? record.getBridge().getMetrics() : null; messagesPendingAcknowledgement += metrics != null ? metrics.getMessagesPendingAcknowledgement() : 0; messagesAcknowledged += metrics != null ? metrics.getMessagesAcknowledged() : 0; } return new ClusterConnectionMetrics(messagesPendingAcknowledgement, messagesAcknowledged); } @Override public BridgeMetrics getBridgeMetrics(String nodeId) { final MessageFlowRecord record = records.get(nodeId); return record != null && record.getBridge() != null ? record.getBridge().getMetrics() : null; } private void createNewRecord(final long eventUID, final String targetNodeID, final TransportConfiguration connector, final SimpleString queueName, final Queue queue, final boolean start) throws Exception { String nodeId; synchronized (this) { if (!started) { return; } if (serverLocator == null) { return; } nodeId = serverLocator.getNodeID(); } final ServerLocatorInternal targetLocator = new ServerLocatorImpl(topology, true, connector); targetLocator.setReconnectAttempts(0); targetLocator.setInitialConnectAttempts(0); targetLocator.setClientFailureCheckPeriod(clientFailureCheckPeriod); targetLocator.setConnectionTTL(connectionTTL); targetLocator.setConfirmationWindowSize(confirmationWindowSize); targetLocator.setBlockOnDurableSend(!useDuplicateDetection); targetLocator.setBlockOnNonDurableSend(!useDuplicateDetection); targetLocator.setRetryInterval(retryInterval); targetLocator.setMaxRetryInterval(maxRetryInterval); targetLocator.setRetryIntervalMultiplier(retryIntervalMultiplier); targetLocator.setMinLargeMessageSize(minLargeMessageSize); targetLocator.setCallTimeout(serverLocator.getCallTimeout()); targetLocator.setCallFailoverTimeout(serverLocator.getCallFailoverTimeout()); // No producer flow control on the bridges by default, as we don't want to lock the queues targetLocator.setProducerWindowSize(this.producerWindowSize); targetLocator.setAfterConnectionInternalListener(this); serverLocator.setProtocolManagerFactory(ActiveMQServerSideProtocolManagerFactory.getInstance(serverLocator, server.getStorageManager())); targetLocator.setNodeID(nodeId); targetLocator.setClusterTransportConfiguration(serverLocator.getClusterTransportConfiguration()); if (retryInterval > 0) { targetLocator.setRetryInterval(retryInterval); } targetLocator.disableFinalizeCheck(); targetLocator.addIncomingInterceptor(new IncomingInterceptorLookingForExceptionMessage(manager, executorFactory.getExecutor())); MessageFlowRecordImpl record = new MessageFlowRecordImpl(targetLocator, eventUID, targetNodeID, connector, queueName, queue); ClusterConnectionBridge bridge = new ClusterConnectionBridge(this, manager, targetLocator, serverLocator, initialConnectAttempts, reconnectAttempts, retryInterval, retryIntervalMultiplier, maxRetryInterval, nodeManager.getUUID(), record.getEventUID(), record.getTargetNodeID(), record.getQueueName(), record.getQueue(), executorFactory.getExecutor(), null, null, scheduledExecutor, null, useDuplicateDetection, clusterUser, clusterPassword, server, managementService.getManagementAddress(), managementService.getManagementNotificationAddress(), record, record.getConnector(), storeAndForwardPrefix, server.getStorageManager()); targetLocator.setIdentity("(Cluster-connection-bridge::" + bridge.toString() + "::" + this.toString() + ")"); if (logger.isDebugEnabled()) { logger.debug("creating record between " + this.connector + " and " + connector + bridge); } record.setBridge(bridge); records.put(targetNodeID, record); if (start) { bridge.start(); } if ( !ConfigurationImpl.checkoutDupCacheSize(serverLocator.getConfirmationWindowSize(),server.getConfiguration().getIDCacheSize())) { ActiveMQServerLogger.LOGGER.duplicateCacheSizeWarning(server.getConfiguration().getIDCacheSize(), serverLocator.getConfirmationWindowSize()); } } // Inner classes ----------------------------------------------------------------------------------- private class MessageFlowRecordImpl implements MessageFlowRecord { private BridgeImpl bridge; private final long eventUID; private final String targetNodeID; private final TransportConfiguration connector; private final ServerLocatorInternal targetLocator; private final SimpleString queueName; private boolean disconnected = false; private final Queue queue; private final Map<SimpleString, RemoteQueueBinding> bindings = new HashMap<>(); private volatile boolean isClosed = false; private volatile boolean reset = false; private MessageFlowRecordImpl(final ServerLocatorInternal targetLocator, final long eventUID, final String targetNodeID, final TransportConfiguration connector, final SimpleString queueName, final Queue queue) { this.targetLocator = targetLocator; this.queue = queue; this.targetNodeID = targetNodeID; this.connector = connector; this.queueName = queueName; this.eventUID = eventUID; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { return "MessageFlowRecordImpl [nodeID=" + targetNodeID + ", connector=" + connector + ", queueName=" + queueName + ", queue=" + queue + ", isClosed=" + isClosed + ", reset=" + reset + "]"; } @Override public void serverDisconnected() { this.disconnected = true; } @Override public String getAddress() { return address != null ? address.toString() : ""; } /** * @return the eventUID */ public long getEventUID() { return eventUID; } /** * @return the nodeID */ public String getTargetNodeID() { return targetNodeID; } /** * @return the connector */ public TransportConfiguration getConnector() { return connector; } /** * @return the queueName */ public SimpleString getQueueName() { return queueName; } /** * @return the queue */ public Queue getQueue() { return queue; } @Override public int getMaxHops() { return maxHops; } /* * we should only ever close a record when the node itself has gone down or in the case of scale down where we know * the node is being completely destroyed and in this case we will migrate to another server/Bridge. * */ @Override public void close() throws Exception { if (logger.isTraceEnabled()) { logger.trace("Stopping bridge " + bridge); } isClosed = true; clearBindings(); if (disconnected) { bridge.disconnect(); } bridge.stop(); bridge.getExecutor().execute(new Runnable() { @Override public void run() { try { if (disconnected) { targetLocator.cleanup(); } else { targetLocator.close(); } } catch (Exception ignored) { logger.debug(ignored.getMessage(), ignored); } } }); } @Override public boolean isClosed() { return isClosed; } @Override public void reset() throws Exception { resetBindings(); } public void setBridge(final BridgeImpl bridge) { this.bridge = bridge; } @Override public Bridge getBridge() { return bridge; } @Override public synchronized void onMessage(final ClientMessage message) { if (logger.isDebugEnabled()) { logger.debug("ClusterCommunication::Flow record on " + clusterConnector + " Receiving message " + message); } try { // Reset the bindings if (message.containsProperty(PostOfficeImpl.HDR_RESET_QUEUE_DATA)) { reset = true; return; } else if (message.containsProperty(PostOfficeImpl.HDR_RESET_QUEUE_DATA_COMPLETE)) { clearDisconnectedBindings(); return; } if (!reset) { logger.debug("Notification being ignored since first reset wasn't received yet: " + message); return; } handleNotificationMessage(message); } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorHandlingMessage(e); } } private void handleNotificationMessage(ClientMessage message) throws Exception { // TODO - optimised this by just passing int in header - but filter needs to be extended to support IN with // a list of integers SimpleString type = message.getSimpleStringProperty(ManagementHelper.HDR_NOTIFICATION_TYPE); CoreNotificationType ntype = CoreNotificationType.valueOf(type.toString()); switch (ntype) { case BINDING_ADDED: { doBindingAdded(message); break; } case BINDING_REMOVED: { doBindingRemoved(message); break; } case CONSUMER_CREATED: { doConsumerCreated(message); break; } case CONSUMER_CLOSED: { doConsumerClosed(message); break; } case PROPOSAL: { doProposalReceived(message); break; } case PROPOSAL_RESPONSE: { doProposalResponseReceived(message); break; } case UNPROPOSAL: { doUnProposalReceived(message); break; } case SESSION_CREATED: { doSessionCreated(message); break; } default: { throw ActiveMQMessageBundle.BUNDLE.invalidType(ntype); } } } /* * Inform the grouping handler of a proposal * */ private synchronized void doProposalReceived(final ClientMessage message) throws Exception { if (!message.containsProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID)) { throw new IllegalStateException("proposal type is null"); } SimpleString type = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID); SimpleString val = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_VALUE); Integer hops = message.getIntProperty(ManagementHelper.HDR_DISTANCE); if (server.getGroupingHandler() == null) { throw new IllegalStateException("grouping handler is null"); } Response response = server.getGroupingHandler().receive(new Proposal(type, val), hops + 1); if (response != null) { server.getGroupingHandler().sendProposalResponse(response, 0); } } /* * Inform the grouping handler of a proposal(groupid) being removed * */ private synchronized void doUnProposalReceived(final ClientMessage message) throws Exception { if (!message.containsProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID)) { throw new IllegalStateException("proposal type is null"); } SimpleString groupId = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID); SimpleString clusterName = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_VALUE); Integer hops = message.getIntProperty(ManagementHelper.HDR_DISTANCE); if (server.getGroupingHandler() == null) { throw new IllegalStateException("grouping handler is null"); } server.getGroupingHandler().remove(groupId, clusterName, hops + 1); } /* * Inform the grouping handler of a response from a proposal * * */ private synchronized void doProposalResponseReceived(final ClientMessage message) throws Exception { if (!message.containsProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID)) { throw new IllegalStateException("proposal type is null"); } SimpleString type = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID); SimpleString val = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_VALUE); SimpleString alt = message.getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_ALT_VALUE); Integer hops = message.getIntProperty(ManagementHelper.HDR_DISTANCE); Response response = new Response(type, val, alt); if (server.getGroupingHandler() == null) { throw new IllegalStateException("grouping handler is null while sending response " + response); } server.getGroupingHandler().proposed(response); server.getGroupingHandler().sendProposalResponse(response, hops + 1); } private synchronized void clearBindings() throws Exception { logger.debug(ClusterConnectionImpl.this + " clearing bindings"); for (RemoteQueueBinding binding : new HashSet<>(bindings.values())) { removeBinding(binding.getClusterName()); } } private synchronized void resetBindings() throws Exception { logger.debug(ClusterConnectionImpl.this + " reset bindings"); for (RemoteQueueBinding binding : new HashSet<>(bindings.values())) { resetBinding(binding.getClusterName()); } } private synchronized void clearDisconnectedBindings() throws Exception { logger.debug(ClusterConnectionImpl.this + " reset bindings"); for (RemoteQueueBinding binding : new HashSet<>(bindings.values())) { if (!binding.isConnected()) { removeBinding(binding.getClusterName()); } } } @Override public synchronized void disconnectBindings() throws Exception { logger.debug(ClusterConnectionImpl.this + " disconnect bindings"); reset = false; for (RemoteQueueBinding binding : new HashSet<>(bindings.values())) { disconnectBinding(binding.getClusterName()); } } private synchronized void doBindingAdded(final ClientMessage message) throws Exception { if (logger.isTraceEnabled()) { logger.trace(ClusterConnectionImpl.this + " Adding binding " + message); } if (!message.containsProperty(ManagementHelper.HDR_DISTANCE)) { throw new IllegalStateException("distance is null"); } if (!message.containsProperty(ManagementHelper.HDR_ADDRESS)) { throw new IllegalStateException("queueAddress is null"); } if (!message.containsProperty(ManagementHelper.HDR_CLUSTER_NAME)) { throw new IllegalStateException("clusterName is null"); } if (!message.containsProperty(ManagementHelper.HDR_ROUTING_NAME)) { throw new IllegalStateException("routingName is null"); } if (!message.containsProperty(ManagementHelper.HDR_BINDING_ID)) { throw new IllegalStateException("queueID is null"); } Integer distance = message.getIntProperty(ManagementHelper.HDR_DISTANCE); SimpleString queueAddress = message.getSimpleStringProperty(ManagementHelper.HDR_ADDRESS); SimpleString clusterName = message.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); SimpleString routingName = message.getSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME); SimpleString filterString = message.getSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING); Long queueID = message.getLongProperty(ManagementHelper.HDR_BINDING_ID); RemoteQueueBinding existingBinding = (RemoteQueueBinding) postOffice.getBinding(clusterName); if (existingBinding != null) { if (queueID.equals(existingBinding.getRemoteQueueID())) { if (!existingBinding.isConnected()) { existingBinding.connect(); return; } // Sanity check - this means the binding has already been added via another bridge, probably max // hops is too high // or there are multiple cluster connections for the same address ActiveMQServerLogger.LOGGER.remoteQueueAlreadyBoundOnClusterConnection(this, clusterName); return; } //this could happen during jms non-durable failover while the qname doesn't change but qid //will be re-generated in backup. In that case a new remote binding will be created //and put it to the map and old binding removed. if (logger.isTraceEnabled()) { logger.trace("Removing binding because qid changed " + queueID + " old: " + existingBinding.getRemoteQueueID()); } removeBinding(clusterName); } RemoteQueueBinding binding = new RemoteQueueBindingImpl(server.getStorageManager().generateID(), queueAddress, clusterName, routingName, queueID, filterString, queue, bridge.getName(), distance + 1, messageLoadBalancingType); if (logger.isTraceEnabled()) { logger.trace("Adding binding " + clusterName + " into " + ClusterConnectionImpl.this); } bindings.put(clusterName, binding); try { postOffice.addBinding(binding); } catch (Exception ignore) { } } private void doBindingRemoved(final ClientMessage message) throws Exception { if (logger.isTraceEnabled()) { logger.trace(ClusterConnectionImpl.this + " Removing binding " + message); } if (!message.containsProperty(ManagementHelper.HDR_CLUSTER_NAME)) { throw new IllegalStateException("clusterName is null"); } SimpleString clusterName = message.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); removeBinding(clusterName); } private synchronized void removeBinding(final SimpleString clusterName) throws Exception { RemoteQueueBinding binding = bindings.remove(clusterName); if (binding == null) { logger.warn("Cannot remove binding, because cannot find binding for queue " + clusterName); return; } postOffice.removeBinding(binding.getUniqueName(), null, true); } private synchronized void resetBinding(final SimpleString clusterName) throws Exception { RemoteQueueBinding binding = bindings.get(clusterName); if (binding == null) { throw new IllegalStateException("Cannot find binding for queue " + clusterName); } binding.reset(); } private synchronized void disconnectBinding(final SimpleString clusterName) throws Exception { RemoteQueueBinding binding = bindings.get(clusterName); if (binding == null) { throw new IllegalStateException("Cannot find binding for queue " + clusterName); } binding.disconnect(); } private synchronized void doSessionCreated(final ClientMessage message) throws Exception { if (logger.isTraceEnabled()) { logger.trace(ClusterConnectionImpl.this + " session created " + message); } TypedProperties props = new TypedProperties(); props.putSimpleStringProperty(ManagementHelper.HDR_CONNECTION_NAME, message.getSimpleStringProperty(ManagementHelper.HDR_CONNECTION_NAME)); props.putSimpleStringProperty(ManagementHelper.HDR_REMOTE_ADDRESS, message.getSimpleStringProperty(ManagementHelper.HDR_REMOTE_ADDRESS)); props.putSimpleStringProperty(ManagementHelper.HDR_CLIENT_ID, message.getSimpleStringProperty(ManagementHelper.HDR_CLIENT_ID)); props.putSimpleStringProperty(ManagementHelper.HDR_PROTOCOL_NAME, message.getSimpleStringProperty(ManagementHelper.HDR_PROTOCOL_NAME)); props.putIntProperty(ManagementHelper.HDR_DISTANCE, message.getIntProperty(ManagementHelper.HDR_DISTANCE) + 1); managementService.sendNotification(new Notification(null, CoreNotificationType.SESSION_CREATED, props)); } private synchronized void doConsumerCreated(final ClientMessage message) throws Exception { if (logger.isTraceEnabled()) { logger.trace(ClusterConnectionImpl.this + " Consumer created " + message); } if (!message.containsProperty(ManagementHelper.HDR_DISTANCE)) { throw new IllegalStateException("distance is null"); } if (!message.containsProperty(ManagementHelper.HDR_CLUSTER_NAME)) { throw new IllegalStateException("clusterName is null"); } Integer distance = message.getIntProperty(ManagementHelper.HDR_DISTANCE); SimpleString clusterName = message.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); message.putIntProperty(ManagementHelper.HDR_DISTANCE, distance + 1); SimpleString filterString = message.getSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING); RemoteQueueBinding binding = bindings.get(clusterName); if (binding == null) { throw new IllegalStateException("Cannot find binding for " + clusterName + " on " + ClusterConnectionImpl.this); } binding.addConsumer(filterString); // Need to propagate the consumer add TypedProperties props = new TypedProperties(); props.putSimpleStringProperty(ManagementHelper.HDR_ADDRESS, binding.getAddress()); props.putSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME, clusterName); props.putSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME, binding.getRoutingName()); props.putIntProperty(ManagementHelper.HDR_DISTANCE, distance + 1); Queue theQueue = (Queue) binding.getBindable(); props.putIntProperty(ManagementHelper.HDR_CONSUMER_COUNT, theQueue.getConsumerCount()); if (filterString != null) { props.putSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING, filterString); } Notification notification = new Notification(null, CoreNotificationType.CONSUMER_CREATED, props); managementService.sendNotification(notification); } private synchronized void doConsumerClosed(final ClientMessage message) throws Exception { if (logger.isTraceEnabled()) { logger.trace(ClusterConnectionImpl.this + " Consumer closed " + message); } if (!message.containsProperty(ManagementHelper.HDR_DISTANCE)) { throw new IllegalStateException("distance is null"); } if (!message.containsProperty(ManagementHelper.HDR_CLUSTER_NAME)) { throw new IllegalStateException("clusterName is null"); } Integer distance = message.getIntProperty(ManagementHelper.HDR_DISTANCE); SimpleString clusterName = message.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); message.putIntProperty(ManagementHelper.HDR_DISTANCE, distance + 1); SimpleString filterString = message.getSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING); RemoteQueueBinding binding = bindings.get(clusterName); if (binding == null) { throw new IllegalStateException("Cannot find binding for " + clusterName); } binding.removeConsumer(filterString); // Need to propagate the consumer close TypedProperties props = new TypedProperties(); props.putSimpleStringProperty(ManagementHelper.HDR_ADDRESS, binding.getAddress()); props.putSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME, clusterName); props.putSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME, binding.getRoutingName()); props.putIntProperty(ManagementHelper.HDR_DISTANCE, distance + 1); Queue theQueue = (Queue) binding.getBindable(); props.putIntProperty(ManagementHelper.HDR_CONSUMER_COUNT, theQueue.getConsumerCount()); if (filterString != null) { props.putSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING, filterString); } Notification notification = new Notification(null, CoreNotificationType.CONSUMER_CLOSED, props); managementService.sendNotification(notification); } } // for testing only public Map<String, MessageFlowRecord> getRecords() { return records; } @Override public String toString() { return "ClusterConnectionImpl@" + System.identityHashCode(this) + "[nodeUUID=" + nodeManager.getNodeId() + ", connector=" + connector + ", address=" + address + ", server=" + server + "]"; } @Override public String describe() { StringWriter str = new StringWriter(); PrintWriter out = new PrintWriter(str); out.println(this); out.println("***************************************"); out.println(name + " connected to"); for (MessageFlowRecord messageFlow : records.values()) { out.println("\t Bridge = " + messageFlow.getBridge()); out.println("\t Flow Record = " + messageFlow); } out.println("***************************************"); return str.toString(); } private interface ClusterConnector { ServerLocatorInternal createServerLocator(); } private final class StaticClusterConnector implements ClusterConnector { private final TransportConfiguration[] tcConfigs; private StaticClusterConnector(TransportConfiguration[] tcConfigs) { this.tcConfigs = tcConfigs; } @Override public ServerLocatorInternal createServerLocator() { if (tcConfigs != null && tcConfigs.length > 0) { if (logger.isDebugEnabled()) { logger.debug(ClusterConnectionImpl.this + "Creating a serverLocator for " + Arrays.toString(tcConfigs)); } ServerLocatorImpl locator = new ServerLocatorImpl(topology, true, tcConfigs); locator.setClusterConnection(true); return locator; } return null; } @Override public String toString() { return "StaticClusterConnector [tcConfigs=" + Arrays.toString(tcConfigs) + "]"; } } private final class DiscoveryClusterConnector implements ClusterConnector { private final DiscoveryGroupConfiguration dg; private DiscoveryClusterConnector(DiscoveryGroupConfiguration dg) { this.dg = dg; } @Override public ServerLocatorInternal createServerLocator() { return new ServerLocatorImpl(topology, true, dg); } } @Override public boolean verify(String clusterUser0, String clusterPassword0) { return clusterUser.equals(clusterUser0) && clusterPassword.equals(clusterPassword0); } @Override public void removeRecord(String targetNodeID) { logger.debug("Removing record for: " + targetNodeID); MessageFlowRecord record = records.remove(targetNodeID); try { if (record != null) { record.close(); } } catch (Exception e) { ActiveMQServerLogger.LOGGER.failedToRemoveRecord(e); } } @Override public void disconnectRecord(String targetNodeID) { logger.debug("Disconnecting record for: " + targetNodeID); MessageFlowRecord record = records.get(targetNodeID); try { if (record != null) { record.disconnectBindings(); } } catch (Exception e) { ActiveMQServerLogger.LOGGER.failedToDisconnectBindings(e); } } private final class LiveNotifier implements Runnable { int notificationsSent = 0; @Override public void run() { resendLive(); schedule(); } public void schedule() { if (started && !stopping && notificationsSent++ < clusterNotificationAttempts) { scheduledExecutor.schedule(this, clusterNotificationInterval, TimeUnit.MILLISECONDS); } } public void updateAsLive() { if (!stopping && started) { topology.updateAsLive(manager.getNodeId(), new TopologyMemberImpl(manager.getNodeId(), manager.getBackupGroupName(), manager.getScaleDownGroupName(), connector, null)); } } public void resendLive() { if (!stopping && started) { topology.resendNode(manager.getNodeId()); } } } }
/* * Copyright 2018-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.testutil; import static java.nio.charset.StandardCharsets.UTF_8; import com.facebook.buck.io.file.MostFiles; import com.facebook.buck.rules.keys.config.impl.BuckVersion; import com.facebook.buck.util.BuckConstant; import com.facebook.buck.util.environment.Platform; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.net.URL; import java.nio.channels.Channels; import java.nio.file.DirectoryStream; import java.nio.file.FileSystem; import java.nio.file.FileSystemNotFoundException; import java.nio.file.FileSystems; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.spi.FileSystemProvider; import java.util.ArrayDeque; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.Queue; import java.util.Set; /** * {@link AbstractWorkspace} is a directory that contains a Buck project, complete with build files. * It requires that its implementations provide a way of running a buckCommand. * * <p>It provides ways of adding testdata to the Buck projects, complete with BuildFiles, and ways * of overriding the copied .buckconfig by adding configs to a BuckConfigLocal. */ public abstract class AbstractWorkspace { /** * Describes at what fixtureLevel a file was in when it was saved. Needed since when we save * fixtured files, they will no longer have those fixtures attached */ private enum FixtureLevel { NOT_A_FIXTURE(0), FIXTURE(1), TEST_CLASS_FIXTURE(2), TEST_CASE_FIXTURE(3); private final int level; FixtureLevel(int level) { this.level = level; } /** @return integer value of the fixture level */ @JsonValue public int getLevel() { return level; } } protected static final String FIXTURE_SUFFIX = "fixture"; protected static final String EXPECTED_SUFFIX = "expected"; protected Path destPath; private final Map<String, Map<String, String>> localConfigs = new HashMap<>(); private boolean firstTemplateAdded = false; private Optional<String> testClassFixtureSuffix = Optional.empty(); private Optional<String> testCaseFixtureSuffix = Optional.empty(); private Map<Path, FixtureLevel> writtenFileFixtureLevel = new HashMap<>(); /** * Constructor for AbstractWorkspace * * @param destPath is the folder where the project will be stored. It is generally a temporary * folder */ protected AbstractWorkspace(Path destPath) { this.destPath = destPath; } /** * Constructor for AbstractWorkspace. Please note that if you use this constructor, you MUST set * destPath before using methods that rely on it. */ protected AbstractWorkspace() {} /** * After attaching testClassName and testName as fixture suffixes, the files with those suffixes * will replace any with just the fixture suffix. * * <p>Example: BUCK.fixture < BUCK.fixtureTestClass < BUCK.fixtureTestClassTestCase */ public void attachTestSpecificFixtureSuffixes(String testClassName, String testName) { testClassFixtureSuffix = Optional.of(FIXTURE_SUFFIX + testClassName); testCaseFixtureSuffix = Optional.of(testClassFixtureSuffix.get() + testName); } private Map<String, String> getBuckConfigLocalSection(String section) { Map<String, String> newValue = new HashMap<>(); Map<String, String> oldValue = localConfigs.putIfAbsent(section, newValue); if (oldValue != null) { return oldValue; } return newValue; } private void saveBuckConfigLocal() throws IOException { writeContentsToPath(convertToBuckConfig(localConfigs), ".buckconfig.local"); } protected static String convertToBuckConfig(Map<String, Map<String, String>> configs) { StringBuilder contents = new StringBuilder(); for (Map.Entry<String, Map<String, String>> section : configs.entrySet()) { contents.append("[").append(section.getKey()).append("]\n\n"); for (Map.Entry<String, String> option : section.getValue().entrySet()) { contents.append(option.getKey()).append(" = ").append(option.getValue()).append("\n"); } contents.append("\n"); } return contents.toString(); } /** * Overrides buckconfig options with the given value at the given section's option. * * @param section is the section name where option is defined * @param key is the option name * @param value is the new value for the option * @throws IOException when saving the new BuckConfigLocal has an issue */ public void addBuckConfigLocalOption(String section, String key, String value) throws IOException { getBuckConfigLocalSection(section).put(key, value); saveBuckConfigLocal(); } /** * Overrides buckconfig options with the given values in a map of {section: {key: value}} * * @throws IOException when saving the new BuckConfigLocal has an issue */ public void addBuckConfigLocalOptions(Map<String, Map<String, String>> localConfigs) throws IOException { if (localConfigs.isEmpty()) { // avoid saving if there's nothing in the map return; } for (Map.Entry<String, Map<String, String>> section : localConfigs.entrySet()) { for (Map.Entry<String, String> option : section.getValue().entrySet()) { getBuckConfigLocalSection(section.getKey()).put(option.getKey(), option.getValue()); } } saveBuckConfigLocal(); } /** * Removes overriding buckconfig options at the given section's option. If the option is not being * overridden, then nothing happens. * * @param section is the section name where option is defined * @param key is the option name * @throws IOException when saving the new BuckConfigLocal has an issue */ public void removeBuckConfigLocalOption(String section, String key) throws IOException { getBuckConfigLocalSection(section).remove(key); saveBuckConfigLocal(); } /** Stamp the buck-out directory if it exists and isn't stamped already */ private void stampBuckVersion() throws IOException { if (!Files.exists(destPath.resolve(BuckConstant.getBuckOutputPath()))) { return; } try (OutputStream outputStream = new BufferedOutputStream( Channels.newOutputStream( Files.newByteChannel( destPath.resolve(BuckConstant.getBuckOutputPath().resolve(".currentversion")), ImmutableSet.<OpenOption>of( StandardOpenOption.CREATE, StandardOpenOption.WRITE))))) { outputStream.write(BuckVersion.getVersion().getBytes(Charsets.UTF_8)); } } private void ensureNoLocalBuckConfig(Path templatePath) throws IOException { if (Files.exists(templatePath.resolve(".buckconfig.local"))) { throw new IllegalStateException( "Found a .buckconfig.local in the Workspace template, which is illegal." + " Use addBuckConfigLocalOption instead."); } } private void addDefaultLocalBuckConfigs() throws IOException { // Disable the directory cache by default. Tests that want to enable it can call // `enableDirCache` on this object. Only do this if a .buckconfig.local file does not already // exist, however (we assume the test knows what it is doing at that point). addBuckConfigLocalOption("cache", "mode", ""); // Limit the number of threads by default to prevent multiple integration tests running at the // same time from creating a quadratic number of threads. Tests can disable this using // `disableThreadLimitOverride`. addBuckConfigLocalOption("build", "threads", "2"); } private void ensureWatchmanConfig() throws IOException { // We have to have .watchmanconfig on windows, otherwise we have problems with deleting stuff // from buck-out while watchman indexes/touches files. if (!Files.exists(getPath(".watchmanconfig"))) { writeContentsToPath("{\"ignore_dirs\":[\"buck-out\",\".buckd\"]}", ".watchmanconfig"); } } private FileSystem getOrCreateJarFileSystem(URI jarURI) throws IOException { try { return FileSystems.getFileSystem(jarURI); } catch (FileSystemNotFoundException e) { Map<String, String> env = new HashMap<>(); env.put("create", "true"); return FileSystems.newFileSystem(jarURI, env); } } private FixtureLevel extensionIsFixture(String extension) { if (extension.equals(FIXTURE_SUFFIX)) { return FixtureLevel.FIXTURE; } if (testClassFixtureSuffix.isPresent() && extension.equals(testClassFixtureSuffix.get())) { return FixtureLevel.TEST_CLASS_FIXTURE; } if (testCaseFixtureSuffix.isPresent() && extension.equals(testCaseFixtureSuffix.get())) { return FixtureLevel.TEST_CASE_FIXTURE; } return FixtureLevel.NOT_A_FIXTURE; } private Optional<Path> copyFilePath(Path contentPath) { String fileName = contentPath.getFileName().toString(); String extension = com.google.common.io.Files.getFileExtension(fileName); if (extension.equals(EXPECTED_SUFFIX)) { return Optional.empty(); } FixtureLevel fixtureLevel = extensionIsFixture(extension); if (fixtureLevel == FixtureLevel.NOT_A_FIXTURE) { return Optional.of(contentPath); } Path outputPath = contentPath .getParent() .resolve(com.google.common.io.Files.getNameWithoutExtension(fileName)); // Only copy fixture if it has not yet been copied, or if only less specific fixtures have // already been written if (fixtureLevel.getLevel() < writtenFileFixtureLevel.getOrDefault(outputPath, FixtureLevel.NOT_A_FIXTURE).getLevel()) { return Optional.empty(); } writtenFileFixtureLevel.put(outputPath, fixtureLevel); return Optional.of(outputPath); } private void copyTemplateContentsToDestPath( FileSystemProvider provider, Path templatePath, Path contentPath) throws IOException { Optional<Path> optionalOutputPath = copyFilePath(contentPath); if (!optionalOutputPath.isPresent()) { return; } Path outputPath = templatePath.relativize(optionalOutputPath.get()); Path targetPath = destPath.resolve(outputPath.toString()); try (InputStream inStream = provider.newInputStream(contentPath); FileOutputStream outStream = new FileOutputStream(targetPath.toString())) { byte[] buffer = new byte[inStream.available()]; inStream.read(buffer); outStream.write(buffer); } if (Platform.detect() == Platform.WINDOWS) { return; } // require that certain files are executable. // the jar process removes any granularity around this, so we give everything the permission Set<PosixFilePermission> targetPermissions = Files.getPosixFilePermissions(targetPath); targetPermissions.add(PosixFilePermission.OWNER_EXECUTE); targetPermissions.add(PosixFilePermission.GROUP_EXECUTE); targetPermissions.add(PosixFilePermission.OTHERS_EXECUTE); Files.setPosixFilePermissions(targetPath, targetPermissions); } private void preAddTemplateActions(Path templatePath) throws IOException { ensureNoLocalBuckConfig(templatePath); } private void postAddTemplateActions() throws IOException { if (!firstTemplateAdded) { firstTemplateAdded = true; stampBuckVersion(); addDefaultLocalBuckConfigs(); ensureWatchmanConfig(); } } /** * This will copy the template directory, renaming files named {@code foo.fixture} to {@code foo} * in the process. Files whose names end in {@code .expected} will not be copied. */ public void addTemplateToWorkspace(Path templatePath) throws IOException { preAddTemplateActions(templatePath); // renames those with FIXTURE_SUFFIX, removes those with EXPECTED_SUFFIX MostFiles.copyRecursively( templatePath, destPath, (Path path) -> copyFilePath(path).orElse(null)); if (Platform.detect() == Platform.WINDOWS) { // Hack for symlinks on Windows. SimpleFileVisitor<Path> copyDirVisitor = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException { // On Windows, symbolic links from git repository are checked out as normal files // containing a one-line path. In order to distinguish them, paths are read and // pointed // files are trued to locate. Once the pointed file is found, it will be copied to // target. // On NTFS length of path must be greater than 0 and less than 4096. if (attrs.size() > 0 && attrs.size() <= 4096) { String linkTo = new String(Files.readAllBytes(path), UTF_8); Path linkToFile; try { linkToFile = templatePath.resolve(linkTo); } catch (InvalidPathException e) { // Let's assume we were reading a normal text file, and not something meant to be // a // link. return FileVisitResult.CONTINUE; } if (Files.isRegularFile(linkToFile)) { Files.copy(linkToFile, path, StandardCopyOption.REPLACE_EXISTING); } else if (Files.isDirectory(linkToFile)) { Files.delete(path); MostFiles.copyRecursively(linkToFile, path); } } return FileVisitResult.CONTINUE; } }; Files.walkFileTree(destPath, copyDirVisitor); } postAddTemplateActions(); } /** * Copies a template to the Workspace regardless of what provider the template has its path * defined with. This is needed since using the "regular" copying mechanics with our destPath and * a non-default FileSystemProvider causes a ProviderMismatchException */ private void copyTemplateToWorkspace(FileSystemProvider provider, Path templatePath) throws IOException { Queue<Path> contentQueue = new ArrayDeque<>(); addDirectoryContentToQueue(provider, templatePath, contentQueue); while (!contentQueue.isEmpty()) { Path contentPath = contentQueue.remove(); if (Files.isDirectory(contentPath)) { Files.createDirectory(destPath.resolve(templatePath.relativize(contentPath).toString())); addDirectoryContentToQueue(provider, contentPath, contentQueue); } else { copyTemplateContentsToDestPath(provider, templatePath, contentPath); } } } /** * This will copy the template directory, renaming files named {@code foo.fixture} to {@code foo} * in the process. Files whose names end in {@code .expected} will not be copied. * * <p>Assumes that {@param testDataResource} is contained in a jar. */ public void addTemplateToWorkspace(URL testDataResource, String templateName) throws Exception { // not using "!/" as the ZipFileProvider (which is what is used for Jar files) treats "/" as the // root of the jar, and we want to keep it in the split String[] jarSplit = testDataResource.toURI().toString().split("!"); URI jarURI = URI.create(jarSplit[0]); FileSystem testDataFS = getOrCreateJarFileSystem(jarURI); FileSystemProvider provider = testDataFS.provider(); Path templatePath = testDataFS.getPath(jarSplit[1], templateName); preAddTemplateActions(templatePath); copyTemplateToWorkspace(provider, templatePath); postAddTemplateActions(); } private void addDirectoryContentToQueue( FileSystemProvider provider, Path dirPath, Queue<Path> contentQueue) throws IOException { try (DirectoryStream<Path> dirStream = provider.newDirectoryStream(dirPath, entry -> true)) { for (Path contents : dirStream) { contentQueue.add(contents); } } } /** * Runs Buck with the specified list of command-line arguments. * * @param args to pass to {@code buck}, so that could be {@code ["build", "//path/to:target"]}, * {@code ["project"]}, etc. * @return the result of running Buck, which includes the exit code, stdout, and stderr. */ public abstract ProcessResult runBuckCommand(String... args) throws Exception; /** * Runs Buck with the specified list of command-line arguments with the given map of environment * variables. * * @param environment set of environment variables to override * @param args to pass to {@code buck}, so that could be {@code ["build", "//path/to:target"]}, * {@code ["project"]}, etc. * @return the result of running Buck, which includes the exit code, stdout, and stderr. */ public abstract ProcessResult runBuckCommand( ImmutableMap<String, String> environment, String... args) throws Exception; /** * Gets the path the workspace is held. * * @return the path the workspace is held */ public Path getDestPath() { return destPath; } /** * Resolves the given path relative to the path the workspace is held. * * @param pathRelativeToWorkspaceRoot is the path relative to the workspace's destPath * @return absolute path of the given path relative to the workspace's destPath */ public Path getPath(Path pathRelativeToWorkspaceRoot) { return destPath.resolve(pathRelativeToWorkspaceRoot); } /** * Resolves the given path relative to the path the workspace is held. * * @param pathRelativeToWorkspaceRoot is the path relative to the workspace's destPath * @return absolute path of the given path relative to the workspace's destPath */ public Path getPath(String pathRelativeToWorkspaceRoot) { return destPath.resolve(pathRelativeToWorkspaceRoot); } /** * Resolves the given path relative to the path the workspace is held and returns its contents * * @param pathRelativeToWorkspaceRoot is the path relative to the workspace's destPath * @return file contents at absolute path of the given path relative to the workspace's destPath */ public String getFileContents(Path pathRelativeToWorkspaceRoot) throws IOException { return getFileContentsWithAbsolutePath(getPath(pathRelativeToWorkspaceRoot)); } /** * Resolves the given path relative to the path the workspace is held and returns its contents * * @param pathRelativeToWorkspaceRoot is the path relative to the workspace's destPath * @return file contents at absolute path of the given path relative to the workspace's destPath */ public String getFileContents(String pathRelativeToWorkspaceRoot) throws IOException { return getFileContentsWithAbsolutePath(getPath(pathRelativeToWorkspaceRoot)); } private String getFileContentsWithAbsolutePath(Path path) throws IOException { String platformExt = null; switch (Platform.detect()) { case LINUX: platformExt = "linux"; break; case MACOS: platformExt = "macos"; break; case WINDOWS: platformExt = "win"; break; case FREEBSD: platformExt = "freebsd"; break; case UNKNOWN: // Leave platformExt as null. break; } if (platformExt != null) { String extension = com.google.common.io.Files.getFileExtension(path.toString()); String basename = com.google.common.io.Files.getNameWithoutExtension(path.toString()); Path platformPath = extension.length() > 0 ? path.getParent() .resolve(String.format("%s.%s.%s", basename, platformExt, extension)) : path.getParent().resolve(String.format("%s.%s", basename, platformExt)); if (platformPath.toFile().exists()) { path = platformPath; } } return new String(Files.readAllBytes(path), UTF_8); } /** * Copies the file at source (relative to workspace root) to dest (relative to workspace root) * * @param source source path of file relative to workspace root * @param dest destination path of file relative to workspace root * @throws IOException */ public void copyFile(String source, String dest) throws IOException { Path destination = getPath(dest); Files.deleteIfExists(destination); Files.copy(getPath(source), destination); } /** * Copies the files at source (absolute) to pathRelativeToWorkspaceRoot * * @param source source path of file relative to workspace root * @param pathRelativeToWorkspaceRoot destination directory of files relative to workspace root * @throws IOException */ public void copyRecursively(Path source, Path pathRelativeToWorkspaceRoot) throws IOException { MostFiles.copyRecursively(source, destPath.resolve(pathRelativeToWorkspaceRoot)); } /** * Moves the file at source (relative to workspace root) to dest (relative to workspace root) * * @param source source path of file relative to workspace root * @param dest destination path of file relative to workspace root * @throws IOException */ public void move(String source, String dest) throws IOException { Files.move(getPath(source), getPath(dest)); } /** * Replaces all instances of target with replacement at the given path * * @param pathRelativeToWorkspaceRoot path of file to replace contents of * @param target string to replace * @param replacement string to replace with * @return True if any contents of the file were replaced, False otherwise * @throws IOException */ public boolean replaceFileContents( String pathRelativeToWorkspaceRoot, String target, String replacement) throws IOException { String fileContents = getFileContents(pathRelativeToWorkspaceRoot); String newFileContents = fileContents.replace(target, replacement); writeContentsToPath(newFileContents, pathRelativeToWorkspaceRoot); return !newFileContents.equals(fileContents); } /** * Create file (or overwrite existing file) with given contents at * * @param contents contents to write to the file * @param pathRelativeToWorkspaceRoot destination path of file relative to workspace root * @param options options (the same ones that Files.write takes) * @throws IOException */ public void writeContentsToPath( String contents, String pathRelativeToWorkspaceRoot, OpenOption... options) throws IOException { Files.write(getPath(pathRelativeToWorkspaceRoot), contents.getBytes(UTF_8), options); } /** @return the specified path resolved against the root of this workspace. */ public Path resolve(Path pathRelativeToWorkspaceRoot) { return destPath.resolve(pathRelativeToWorkspaceRoot); } /** @return the specified path resolved against the root of this workspace. */ public Path resolve(String pathRelativeToWorkspaceRoot) { return destPath.resolve(pathRelativeToWorkspaceRoot); } }
package com.googlecode.mp4parser.authoring.tracks.h265; import com.coremedia.iso.IsoTypeReader; import com.googlecode.mp4parser.DataSource; import com.googlecode.mp4parser.FileDataSourceImpl; import com.googlecode.mp4parser.authoring.Sample; import com.googlecode.mp4parser.authoring.SampleImpl; import com.googlecode.mp4parser.h264.read.CAVLCReader; import com.googlecode.mp4parser.util.ByteBufferByteChannel; import com.mp4parser.iso14496.part15.HevcDecoderConfigurationRecord; import java.io.EOFException; import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; /** * Created by sannies on 08.09.2014. */ public class H265TrackImplOld { public static final int VPS_NUT = 32; public static final int SPS_NUT = 33; public static final int PPS_NUT = 34; public static final int AUD_NUT = 35; public static final int PREFIX_SEI_NUT = 39; public static final int RSV_NVCL41 = 41; public static final int RSV_NVCL42 = 42; public static final int RSV_NVCL43 = 43; public static final int RSV_NVCL44 = 44; public static final int UNSPEC48 = 48; public static final int UNSPEC49 = 49; public static final int UNSPEC50 = 50; public static final int UNSPEC51 = 51; public static final int UNSPEC52 = 52; public static final int UNSPEC53 = 53; public static final int UNSPEC54 = 54; public static final int UNSPEC55 = 55; private static final int TRAIL_N = 0; private static final int TRAIL_R = 1; private static final int TSA_N = 2; private static final int TSA_R = 3; private static final int STSA_N = 4; private static final int STSA_R = 5; private static final int RADL_N = 6; private static final int RADL_R = 7; private static final int RASL_N = 8; private static final int RASL_R = 9; private static final int BLA_W_LP = 16; private static final int BLA_W_RADL = 17; private static final int BLA_N_LP = 18; private static final int IDR_W_RADL = 19; private static final int IDR_N_LP = 20; private static final int CRA_NUT = 21; private static final long BUFFER = 1024 * 1024; LinkedHashMap<Long, ByteBuffer> videoParamterSets = new LinkedHashMap<Long, ByteBuffer>(); LinkedHashMap<Long, ByteBuffer> sequenceParamterSets = new LinkedHashMap<Long, ByteBuffer>(); LinkedHashMap<Long, ByteBuffer> pictureParamterSets = new LinkedHashMap<Long, ByteBuffer>(); List<Long> syncSamples = new ArrayList<Long>(); List<Sample> samples = new ArrayList<Sample>(); public H265TrackImplOld(DataSource ds) throws IOException { LookAhead la = new LookAhead(ds); long sampleNo = 1; List<ByteBuffer> accessUnit = new ArrayList<ByteBuffer>(); int accessUnitNalType = 0; ByteBuffer nal; while ((nal = findNextNal(la)) != null) { NalUnitHeader nalUnitHeader = getNalUnitHeader(nal); //System.err.println(String.format("type: %3d - layer: %3d - tempId: %3d", // nalUnitHeader.nalUnitType, nalUnitHeader.nuhLayerId, nalUnitHeader.nuhTemporalIdPlusOne)); switch (nalUnitHeader.nalUnitType) { case VPS_NUT: videoParamterSets.put(sampleNo, nal); break; case SPS_NUT: sequenceParamterSets.put(sampleNo, nal); break; case PPS_NUT: pictureParamterSets.put(sampleNo, nal); break; } if (nalUnitHeader.nalUnitType < 32) { accessUnitNalType = nalUnitHeader.nalUnitType; // All NAL in one Access Unit Sample have same nal unit type } if (isFirstOfAU(nalUnitHeader.nalUnitType, nal, accessUnit) && !accessUnit.isEmpty()) { System.err.println("##########################"); for (ByteBuffer byteBuffer : accessUnit) { NalUnitHeader _nalUnitHeader = getNalUnitHeader(byteBuffer); System.err.println(String.format("type: %3d - layer: %3d - tempId: %3d - size: %3d", _nalUnitHeader.nalUnitType, _nalUnitHeader.nuhLayerId, _nalUnitHeader.nuhTemporalIdPlusOne, byteBuffer.limit())); } System.err.println(" ##########################"); samples.add(createSample(accessUnit)); accessUnit.clear(); sampleNo++; } accessUnit.add(nal); if (accessUnitNalType >= 16 && accessUnitNalType <= 21) { syncSamples.add(sampleNo); } } System.err.println(""); HevcDecoderConfigurationRecord hvcC = new HevcDecoderConfigurationRecord(); hvcC.setArrays(getArrays()); hvcC.setAvgFrameRate(0); } public static void main(String[] args) throws IOException { new H265TrackImplOld(new FileDataSourceImpl("c:\\content\\test-UHD-HEVC_01_FMV_Med_track1.hvc")); } private ByteBuffer findNextNal(LookAhead la) throws IOException { try { while (!la.nextThreeEquals001()) { la.discardByte(); } la.discardNext3AndMarkStart(); while (!la.nextThreeEquals000or001orEof()) { la.discardByte(); } return la.getNal(); } catch (EOFException e) { return null; } } public void profile_tier_level(int maxNumSubLayersMinus1, CAVLCReader r) throws IOException { int general_profile_space = r.readU(2, "general_profile_space "); boolean general_tier_flag = r.readBool("general_tier_flag"); int general_profile_idc = r.readU(5, "general_profile_idc"); boolean[] general_profile_compatibility_flag = new boolean[32]; for (int j = 0; j < 32; j++) { general_profile_compatibility_flag[j] = r.readBool("general_profile_compatibility_flag[" + j + "]"); } boolean general_progressive_source_flag = r.readBool("general_progressive_source_flag"); boolean general_interlaced_source_flag = r.readBool("general_interlaced_source_flag"); boolean general_non_packed_constraint_flag = r.readBool("general_non_packed_constraint_flag"); boolean general_frame_only_constraint_flag = r.readBool("general_frame_only_constraint_flag"); long general_reserved_zero_44bits = r.readU(44, "general_reserved_zero_44bits"); int general_level_idc = r.readU(8, "general_level_idc"); boolean[] sub_layer_profile_present_flag = new boolean[maxNumSubLayersMinus1]; boolean[] sub_layer_level_present_flag = new boolean[maxNumSubLayersMinus1]; for (int i = 0; i < maxNumSubLayersMinus1; i++) { sub_layer_profile_present_flag[i] = r.readBool("sub_layer_profile_present_flag[" + i + "]"); sub_layer_level_present_flag[i] = r.readBool("sub_layer_level_present_flag[" + i + "]"); } if (maxNumSubLayersMinus1 > 0) { for (int i = maxNumSubLayersMinus1; i < 8; i++) { r.readU(2, "reserved_zero_2bits"); } } int[] sub_layer_profile_space = new int[maxNumSubLayersMinus1]; boolean[] sub_layer_tier_flag = new boolean[maxNumSubLayersMinus1]; int[] sub_layer_profile_idc = new int[maxNumSubLayersMinus1]; boolean[][] sub_layer_profile_compatibility_flag = new boolean[maxNumSubLayersMinus1][32]; boolean[] sub_layer_progressive_source_flag = new boolean[maxNumSubLayersMinus1]; boolean[] sub_layer_interlaced_source_flag = new boolean[maxNumSubLayersMinus1]; boolean[] sub_layer_non_packed_constraint_flag = new boolean[maxNumSubLayersMinus1]; boolean[] sub_layer_frame_only_constraint_flag = new boolean[maxNumSubLayersMinus1]; int[] sub_layer_level_idc = new int[maxNumSubLayersMinus1]; for (int i = 0; i < maxNumSubLayersMinus1; i++) { if (sub_layer_profile_present_flag[i]) { sub_layer_profile_space[i] = r.readU(2, "sub_layer_profile_space[" + i + "]"); sub_layer_tier_flag[i] = r.readBool("sub_layer_tier_flag[" + i + "]"); sub_layer_profile_idc[i] = r.readU(5, "sub_layer_profile_idc[" + i + "]"); for (int j = 0; j < 32; j++) { sub_layer_profile_compatibility_flag[i][j] = r.readBool("sub_layer_profile_compatibility_flag[" + i + "][" + j + "]"); } sub_layer_progressive_source_flag[i] = r.readBool("sub_layer_progressive_source_flag[" + i + "]"); sub_layer_interlaced_source_flag[i] = r.readBool("sub_layer_interlaced_source_flag[" + i + "]"); sub_layer_non_packed_constraint_flag[i] = r.readBool("sub_layer_non_packed_constraint_flag[" + i + "]"); sub_layer_frame_only_constraint_flag[i] = r.readBool("sub_layer_frame_only_constraint_flag[" + i + "]"); r.readNBit(44, "reserved"); } if (sub_layer_level_present_flag[i]) sub_layer_level_idc[i] = r.readU(8, "sub_layer_level_idc"); } } public int getFrameRate(ByteBuffer vps) throws IOException { CAVLCReader r = new CAVLCReader(Channels.newInputStream(new ByteBufferByteChannel((ByteBuffer) vps.position(0)))); int vps_parameter_set_id = r.readU(4, "vps_parameter_set_id"); int vps_reserved_three_2bits = r.readU(2, "vps_reserved_three_2bits"); int vps_max_layers_minus1 = r.readU(6, "vps_max_layers_minus1"); int vps_max_sub_layers_minus1 = r.readU(3, "vps_max_sub_layers_minus1"); boolean vps_temporal_id_nesting_flag = r.readBool("vps_temporal_id_nesting_flag"); int vps_reserved_0xffff_16bits = r.readU(16, "vps_reserved_0xffff_16bits"); profile_tier_level(vps_max_sub_layers_minus1, r); boolean vps_sub_layer_ordering_info_present_flag = r.readBool("vps_sub_layer_ordering_info_present_flag"); int[] vps_max_dec_pic_buffering_minus1 = new int[vps_sub_layer_ordering_info_present_flag ? 0 : vps_max_sub_layers_minus1]; int[] vps_max_num_reorder_pics = new int[vps_sub_layer_ordering_info_present_flag ? 0 : vps_max_sub_layers_minus1]; int[] vps_max_latency_increase_plus1 = new int[vps_sub_layer_ordering_info_present_flag ? 0 : vps_max_sub_layers_minus1]; for (int i = (vps_sub_layer_ordering_info_present_flag ? 0 : vps_max_sub_layers_minus1); i <= vps_max_sub_layers_minus1; i++) { vps_max_dec_pic_buffering_minus1[i] = r.readUE("vps_max_dec_pic_buffering_minus1[" + i + "]"); vps_max_num_reorder_pics[i] = r.readUE("vps_max_dec_pic_buffering_minus1[" + i + "]"); vps_max_latency_increase_plus1[i] = r.readUE("vps_max_dec_pic_buffering_minus1[" + i + "]"); } int vps_max_layer_id = r.readU(6, "vps_max_layer_id"); int vps_num_layer_sets_minus1 = r.readUE("vps_num_layer_sets_minus1"); boolean[][] layer_id_included_flag = new boolean[vps_num_layer_sets_minus1][vps_max_layer_id]; for (int i = 1; i <= vps_num_layer_sets_minus1; i++) { for (int j = 0; j <= vps_max_layer_id; j++) { layer_id_included_flag[i][j] = r.readBool("layer_id_included_flag[" + i + "][" + j + "]"); } } boolean vps_timing_info_present_flag = r.readBool("vps_timing_info_present_flag"); if (vps_timing_info_present_flag) { long vps_num_units_in_tick = r.readU(32, "vps_num_units_in_tick"); long vps_time_scale = r.readU(32, "vps_time_scale"); boolean vps_poc_proportional_to_timing_flag = r.readBool("vps_poc_proportional_to_timing_flag"); if (vps_poc_proportional_to_timing_flag) { int vps_num_ticks_poc_diff_one_minus1 = r.readUE("vps_num_ticks_poc_diff_one_minus1"); } int vps_num_hrd_parameters = r.readUE("vps_num_hrd_parameters"); int hrd_layer_set_idx[] = new int[vps_num_hrd_parameters]; boolean cprms_present_flag[] = new boolean[vps_num_hrd_parameters]; for (int i = 0; i < vps_num_hrd_parameters; i++) { hrd_layer_set_idx[i] = r.readUE("hrd_layer_set_idx[" + i + "]"); if (i > 0) { cprms_present_flag[i] = r.readBool("cprms_present_flag[" + i + "]"); } else { cprms_present_flag[0] = true; } hrd_parameters(cprms_present_flag[i], vps_max_sub_layers_minus1, r); } } boolean vps_extension_flag = r.readBool("vps_extension_flag"); if (vps_extension_flag) { while (r.moreRBSPData()) { boolean vps_extension_data_flag = r.readBool("vps_extension_data_flag"); } } r.readTrailingBits(); return 0; } private void hrd_parameters(boolean commonInfPresentFlag, int maxNumSubLayersMinus1, CAVLCReader r) throws IOException { boolean nal_hrd_parameters_present_flag = false; boolean vcl_hrd_parameters_present_flag = false; boolean sub_pic_hrd_params_present_flag = false; if (commonInfPresentFlag) { nal_hrd_parameters_present_flag = r.readBool("nal_hrd_parameters_present_flag"); vcl_hrd_parameters_present_flag = r.readBool("vcl_hrd_parameters_present_flag"); if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) { sub_pic_hrd_params_present_flag = r.readBool("sub_pic_hrd_params_present_flag"); if (sub_pic_hrd_params_present_flag) { int tick_divisor_minus2 = r.readU(8, "tick_divisor_minus2"); int du_cpb_removal_delay_increment_length_minus1 = r.readU(5, "du_cpb_removal_delay_increment_length_minus1"); boolean sub_pic_cpb_params_in_pic_timing_sei_flag = r.readBool("sub_pic_cpb_params_in_pic_timing_sei_flag"); int dpb_output_delay_du_length_minus1 = r.readU(5, "dpb_output_delay_du_length_minus1"); } int bit_rate_scale = r.readU(4, "bit_rate_scale"); int cpb_size_scale = r.readU(4, "cpb_size_scale"); if (sub_pic_hrd_params_present_flag) { int cpb_size_du_scale = r.readU(4, "cpb_size_du_scale"); } int initial_cpb_removal_delay_length_minus1 = r.readU(5, "initial_cpb_removal_delay_length_minus1"); int au_cpb_removal_delay_length_minus1 = r.readU(5, "au_cpb_removal_delay_length_minus1"); int dpb_output_delay_length_minus1 = r.readU(5, "dpb_output_delay_length_minus1"); } } boolean fixed_pic_rate_general_flag[] = new boolean[maxNumSubLayersMinus1]; boolean fixed_pic_rate_within_cvs_flag[] = new boolean[maxNumSubLayersMinus1]; boolean low_delay_hrd_flag[] = new boolean[maxNumSubLayersMinus1]; int cpb_cnt_minus1[] = new int[maxNumSubLayersMinus1]; int elemental_duration_in_tc_minus1[] = new int[maxNumSubLayersMinus1]; for (int i = 0; i <= maxNumSubLayersMinus1; i++) { fixed_pic_rate_general_flag[i] = r.readBool("fixed_pic_rate_general_flag[" + i + "]"); if (!fixed_pic_rate_general_flag[i]) { fixed_pic_rate_within_cvs_flag[i] = r.readBool("fixed_pic_rate_within_cvs_flag[" + i + "]"); } if (fixed_pic_rate_within_cvs_flag[i]) { elemental_duration_in_tc_minus1[i] = r.readUE("elemental_duration_in_tc_minus1[" + i + "]"); } else { low_delay_hrd_flag[i] = r.readBool("low_delay_hrd_flag[" + i + "]"); } if (!low_delay_hrd_flag[i]) { cpb_cnt_minus1[i] = r.readUE("cpb_cnt_minus1[" + i + "]"); } if (nal_hrd_parameters_present_flag) { sub_layer_hrd_parameters(i, cpb_cnt_minus1[i], sub_pic_hrd_params_present_flag, r); } if (vcl_hrd_parameters_present_flag) { sub_layer_hrd_parameters(i, cpb_cnt_minus1[i], sub_pic_hrd_params_present_flag, r); } } } void sub_layer_hrd_parameters(int subLayerId, int cpbCnt, boolean sub_pic_hrd_params_present_flag, CAVLCReader r) throws IOException { int bit_rate_value_minus1[] = new int[cpbCnt]; int cpb_size_value_minus1[] = new int[cpbCnt]; int cpb_size_du_value_minus1[] = new int[cpbCnt]; int bit_rate_du_value_minus1[] = new int[cpbCnt]; boolean cbr_flag[] = new boolean[cpbCnt]; for (int i = 0; i <= cpbCnt; i++) { bit_rate_value_minus1[i] = r.readUE("bit_rate_value_minus1[" + i + "]"); cpb_size_value_minus1[i] = r.readUE("cpb_size_value_minus1[" + i + "]"); if (sub_pic_hrd_params_present_flag) { cpb_size_du_value_minus1[i] = r.readUE("cpb_size_du_value_minus1[" + i + "]"); bit_rate_du_value_minus1[i] = r.readUE("bit_rate_du_value_minus1[" + i + "]"); } cbr_flag[i] = r.readBool("cbr_flag[" + i + "]"); } } private List<HevcDecoderConfigurationRecord.Array> getArrays() { HevcDecoderConfigurationRecord.Array vpsArray = new HevcDecoderConfigurationRecord.Array(); vpsArray.array_completeness = true; vpsArray.nal_unit_type = VPS_NUT; vpsArray.nalUnits = new ArrayList<byte[]>(); for (ByteBuffer byteBuffer : videoParamterSets.values()) { byte[] ps = new byte[byteBuffer.limit()]; byteBuffer.position(0); byteBuffer.get(ps); vpsArray.nalUnits.add(ps); } HevcDecoderConfigurationRecord.Array spsArray = new HevcDecoderConfigurationRecord.Array(); spsArray.array_completeness = true; spsArray.nal_unit_type = SPS_NUT; spsArray.nalUnits = new ArrayList<byte[]>(); for (ByteBuffer byteBuffer : sequenceParamterSets.values()) { byte[] ps = new byte[byteBuffer.limit()]; byteBuffer.position(0); byteBuffer.get(ps); spsArray.nalUnits.add(ps); } HevcDecoderConfigurationRecord.Array ppsArray = new HevcDecoderConfigurationRecord.Array(); ppsArray.array_completeness = true; ppsArray.nal_unit_type = SPS_NUT; ppsArray.nalUnits = new ArrayList<byte[]>(); for (ByteBuffer byteBuffer : pictureParamterSets.values()) { byte[] ps = new byte[byteBuffer.limit()]; byteBuffer.position(0); byteBuffer.get(ps); ppsArray.nalUnits.add(ps); } return Arrays.asList(vpsArray, spsArray, ppsArray); } boolean isFirstOfAU(int nalUnitType, ByteBuffer nalUnit, List<ByteBuffer> accessUnit) { if (accessUnit.isEmpty()) { return true; } boolean vclPresentInCurrentAU = getNalUnitHeader(accessUnit.get(accessUnit.size() - 1)).nalUnitType <= 31; switch (nalUnitType) { case VPS_NUT: case SPS_NUT: case PPS_NUT: case AUD_NUT: case PREFIX_SEI_NUT: case RSV_NVCL41: case RSV_NVCL42: case RSV_NVCL43: case RSV_NVCL44: case UNSPEC48: case UNSPEC49: case UNSPEC50: case UNSPEC51: case UNSPEC52: case UNSPEC53: case UNSPEC54: case UNSPEC55: if (vclPresentInCurrentAU) { return true; } } switch (nalUnitType) { case TRAIL_N: case TRAIL_R: case TSA_N: case TSA_R: case STSA_N: case STSA_R: case RADL_N: case RADL_R: case RASL_N: case RASL_R: case BLA_W_LP: case BLA_W_RADL: case BLA_N_LP: case IDR_W_RADL: case IDR_N_LP: case CRA_NUT: byte b[] = new byte[50]; nalUnit.position(0); nalUnit.get(b); nalUnit.position(2); int firstRsbp8Bit = IsoTypeReader.readUInt8(nalUnit); return vclPresentInCurrentAU && (firstRsbp8Bit & 0x80) > 0; } return false; } public NalUnitHeader getNalUnitHeader(ByteBuffer nal) { nal.position(0); int nal_unit_header = IsoTypeReader.readUInt16(nal); NalUnitHeader nalUnitHeader = new NalUnitHeader(); nalUnitHeader.forbiddenZeroFlag = (nal_unit_header & 0x8000) >> 15; nalUnitHeader.nalUnitType = (nal_unit_header & 0x7E00) >> 9; nalUnitHeader.nuhLayerId = (nal_unit_header & 0x1F8) >> 3; nalUnitHeader.nuhTemporalIdPlusOne = (nal_unit_header & 0x7); return nalUnitHeader; } protected Sample createSample(List<ByteBuffer> nals) { byte[] sizeInfo = new byte[nals.size() * 4]; ByteBuffer sizeBuf = ByteBuffer.wrap(sizeInfo); for (ByteBuffer b : nals) { sizeBuf.putInt(b.remaining()); } ByteBuffer[] data = new ByteBuffer[nals.size() * 2]; for (int i = 0; i < nals.size(); i++) { data[2 * i] = ByteBuffer.wrap(sizeInfo, i * 4, 4); data[2 * i + 1] = nals.get(i); } return new SampleImpl(data); } public enum PARSE_STATE { AUD_SEI_SLICE, SEI_SLICE, SLICE_OES_EOB, } public static class NalUnitHeader { int forbiddenZeroFlag; int nalUnitType; int nuhLayerId; int nuhTemporalIdPlusOne; } class LookAhead { long bufferStartPos = 0; int inBufferPos = 0; DataSource dataSource; ByteBuffer buffer; long start; LookAhead(DataSource dataSource) throws IOException { this.dataSource = dataSource; fillBuffer(); } public void fillBuffer() throws IOException { buffer = dataSource.map(bufferStartPos, Math.min(dataSource.size() - bufferStartPos, BUFFER)); } boolean nextThreeEquals001() throws IOException { if (buffer.limit() - inBufferPos >= 3) { return (buffer.get(inBufferPos) == 0 && buffer.get(inBufferPos + 1) == 0 && buffer.get(inBufferPos + 2) == 1); } else { if (bufferStartPos + inBufferPos == dataSource.size()) { throw new EOFException(); } throw new RuntimeException("buffer repositioning require"); } } boolean nextThreeEquals000or001orEof() throws IOException { if (buffer.limit() - inBufferPos >= 3) { return ((buffer.get(inBufferPos) == 0 && buffer.get(inBufferPos + 1) == 0 && (buffer.get(inBufferPos + 2) == 0 || buffer.get(inBufferPos + 2) == 1))); } else { if (bufferStartPos + inBufferPos + 3 > dataSource.size()) { return bufferStartPos + inBufferPos == dataSource.size(); } else { bufferStartPos = start; inBufferPos = 0; fillBuffer(); return nextThreeEquals000or001orEof(); } } } void discardByte() { inBufferPos++; } void discardNext3AndMarkStart() { inBufferPos += 3; start = bufferStartPos + inBufferPos; } public ByteBuffer getNal() { if (start >= bufferStartPos) { buffer.position((int) (start - bufferStartPos)); Buffer sample = buffer.slice(); sample.limit((int) (inBufferPos - (start - bufferStartPos))); return (ByteBuffer) sample; } else { throw new RuntimeException("damn! NAL exceeds buffer"); // this can only happen if NAL is bigger than the buffer } } } }
package com.nwalsh.saxon; import java.util.Stack; import org.xml.sax.*; import org.w3c.dom.*; import javax.xml.transform.TransformerException; import com.icl.saxon.output.*; import com.icl.saxon.om.*; import com.icl.saxon.Controller; import com.icl.saxon.tree.AttributeCollection; /** * <p>Saxon extension to unwrap links in a result tree fragment.</p> * * <p>$Id: UnwrapLinksEmitter.java,v 1.1 2002/06/26 11:02:05 nwalsh Exp $</p> * * <p>Copyright (C) 2000, 2002 Norman Walsh.</p> * * <p>This class provides the guts of a * <a href="http://saxon.sf.net/">Saxon 6.*</a> * implementation of a link unwrapper.</p> * * <p>The general design is this: the stylesheets construct a result tree * fragment for some environment. Then the result tree fragment * is "replayed" through the UnwrapLinksEmitter; the UnwrapLinksEmitter * builds a * new result tree fragment from this event stream with top-level links unwrapped. * That RTF is returned. Note that only a <i>single</i> level of unwrapping * is performed. This is clearly a crude implementation. * </p> * * <p><b>Change Log:</b></p> * <dl> * <dt>1.0</dt> * <dd><p>Initial release.</p></dd> * </dl> * * @author Norman Walsh * <a href="mailto:ndw@nwalsh.com">ndw@nwalsh.com</a> * * @version $Id: UnwrapLinksEmitter.java,v 1.1 2002/06/26 11:02:05 nwalsh Exp $ * */ public class UnwrapLinksEmitter extends CopyEmitter { /** A stack for the preserving information about open elements. */ protected Stack elementStack = null; protected Stack saveStack = null; /** The FO namespace name. */ protected static String foURI = "http://www.w3.org/1999/XSL/Format"; /** The XHTML namespace name. */ protected static String xhURI = "http://www.w3.org/1999/xhtml"; /** Is the stylesheet currently running an FO stylesheet? */ protected boolean foStylesheet = false; /** Are we currently in a link? How deep? */ protected int linkDepth = 0; protected int skipDepth = 0; protected int htmlAFingerprint = 0; protected int xhtmlAFingerprint = 0; protected boolean inSkip = false; protected boolean tryAgain = false; /** <p>Constructor for the UnwrapLinksEmitter.</p> * * @param namePool The name pool to use for constructing elements and attributes. * @param foStylesheet Is this an FO stylesheet? */ public UnwrapLinksEmitter(Controller controller, NamePool namePool, boolean foStylesheet) { super(controller,namePool); elementStack = new Stack(); this.foStylesheet = foStylesheet; htmlAFingerprint = namePool.getFingerprint("", "a"); xhtmlAFingerprint = namePool.getFingerprint(xhURI, "a"); } /** Process start element events. */ public void startElement(int nameCode, org.xml.sax.Attributes attributes, int[] namespaces, int nscount) throws TransformerException { int thisFingerprint = namePool.getFingerprint(nameCode); boolean isLink = (thisFingerprint == htmlAFingerprint || thisFingerprint == xhtmlAFingerprint); if (isLink) { linkDepth++; tryAgain = tryAgain || inSkip; } if (isLink && linkDepth > 1 && !inSkip) { inSkip = true; // Close all the open elements saveStack = new Stack(); Stack tempStack = new Stack(); while (!elementStack.empty()) { StartElementInfo elem = (StartElementInfo) elementStack.pop(); rtfEmitter.endElement(elem.getNameCode()); saveStack.push(elem); tempStack.push(elem); } while (!tempStack.empty()) { StartElementInfo elem = (StartElementInfo) tempStack.pop(); elementStack.push(elem); } } if (inSkip) { skipDepth++; } else { } rtfEmitter.startElement(nameCode,attributes,namespaces,nscount); StartElementInfo sei = new StartElementInfo(nameCode, attributes, namespaces, nscount); elementStack.push(sei); } /** Process end element events. */ public void endElement(int nameCode) throws TransformerException { int thisFingerprint = namePool.getFingerprint(nameCode); boolean isLink = (thisFingerprint == htmlAFingerprint || thisFingerprint == xhtmlAFingerprint); rtfEmitter.endElement(nameCode); elementStack.pop(); if (isLink) { linkDepth--; } if (inSkip) { skipDepth--; inSkip = (skipDepth > 0); if (!inSkip) { // Reopen all the ones we closed before... while (!saveStack.empty()) { StartElementInfo elem = (StartElementInfo) saveStack.pop(); AttributeCollection attr = (AttributeCollection)elem.getAttributes(); AttributeCollection newAttr = new AttributeCollection(namePool); for (int acount = 0; acount < attr.getLength(); acount++) { String localName = attr.getLocalName(acount); String type = attr.getType(acount); String value = attr.getValue(acount); String uri = attr.getURI(acount); String prefix = ""; if (localName.indexOf(':') > 0) { prefix = localName.substring(0, localName.indexOf(':')); localName = localName.substring(localName.indexOf(':')+1); } if (uri.equals("") && ((foStylesheet && localName.equals("id")) || (!foStylesheet && (localName.equals("id") || localName.equals("name"))))) { // skip this attribute } else { newAttr.addAttribute(prefix, uri, localName, type, value); } } rtfEmitter.startElement(elem.getNameCode(), newAttr, elem.getNamespaces(), elem.getNSCount()); } } } } public boolean tryAgain() throws TransformerException { return tryAgain; } /** * <p>A private class for maintaining the information required to call * the startElement method.</p> * * <p>In order to close and reopen elements, information about those * elements has to be maintained. This class is just the little record * that we push on the stack to keep track of that info.</p> */ private class StartElementInfo { private int _nameCode; org.xml.sax.Attributes _attributes; int[] _namespaces; int _nscount; public StartElementInfo(int nameCode, org.xml.sax.Attributes attributes, int[] namespaces, int nscount) { _nameCode = nameCode; _attributes = attributes; _namespaces = namespaces; _nscount = nscount; } public int getNameCode() { return _nameCode; } public org.xml.sax.Attributes getAttributes() { return _attributes; } public int[] getNamespaces() { return _namespaces; } public int getNSCount() { return _nscount; } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.alerts.actions.pagerduty; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.hawkular.alerts.actions.api.ActionMessage; import org.hawkular.alerts.actions.api.ActionPluginListener; import org.hawkular.alerts.actions.api.ActionPluginSender; import org.hawkular.alerts.actions.api.ActionResponseMessage; import org.hawkular.alerts.actions.api.MsgLogger; import org.hawkular.alerts.actions.api.Plugin; import org.hawkular.alerts.actions.api.Sender; import org.hawkular.alerts.api.json.JsonUtil; import org.hawkular.alerts.api.model.action.Action; import org.hawkular.alerts.api.model.event.Alert; import org.hawkular.alerts.api.model.event.Event; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.InstanceCreator; import com.squareup.pagerduty.incidents.NotifyResult; import com.squareup.pagerduty.incidents.PagerDuty; import com.squareup.pagerduty.incidents.Trigger; import retrofit.RestAdapter; import retrofit.converter.GsonConverter; /** * Listens to pagerduty bus notifications and interacts with PagerDuty REST API. * * @author Thomas Segismont */ @Plugin(name = "pagerduty") public class PagerDutyPlugin implements ActionPluginListener { static final String API_KEY_PROPERTY = "org.hawkular.actions.pagerduty.api.key"; static final String API_KEY = System.getProperty(API_KEY_PROPERTY); private final MsgLogger msgLog = MsgLogger.LOGGER; Map<String, String> defaultProperties = new HashMap<>(); PagerDuty pagerDuty; @Sender ActionPluginSender sender; private static final String MESSAGE_PROCESSED = "PROCESSED"; private static final String MESSAGE_FAILED = "FAILED"; public PagerDutyPlugin() { defaultProperties.put("description", "Default PagerDuty plugin description"); setup(); } @Override public Set<String> getProperties() { return defaultProperties.keySet(); } @Override public Map<String, String> getDefaultProperties() { return defaultProperties; } @Override public void process(ActionMessage msg) throws Exception { if (pagerDuty == null) { msgLog.errorCannotProcessMessage("pagerduty", "Plugin is not started"); return; } Trigger trigger; NotifyResult result = null; try { trigger = new Trigger.Builder(prepareMessage(msg)).build(); result = pagerDuty.notify(trigger); } catch (Exception e) { msgLog.errorCannotProcessMessage("pagerduty", e.getMessage()); } if (result != null && !"success".equals(result.status())) { msgLog.errorCannotProcessMessage("pagerduty", result.message()); Action failedAction = msg.getAction(); failedAction.setResult(MESSAGE_FAILED); sendResult(failedAction); } else { msgLog.infoActionReceived("pagerduty", msg.toString()); Action successAction = msg.getAction(); successAction.setResult(MESSAGE_PROCESSED); sendResult(successAction); } } void setup() { if (isBlank(API_KEY)) { String msg = "Configure " + API_KEY; msgLog.errorCannotBeStarted("pagerduty", msg); return; } try { InstanceCreator<NotifyResult> notifyResultCreator = buildNotifyResultCreator(); Gson gson = new GsonBuilder().registerTypeAdapter(NotifyResult.class, notifyResultCreator).create(); RestAdapter restAdapter = new RestAdapter.Builder().setEndpoint("https://events.pagerduty.com") .setConverter(new GsonConverter(gson)).build(); pagerDuty = PagerDuty.create(API_KEY, restAdapter); } catch (Exception e) { msgLog.errorCannotBeStarted("pagerduty", e.getLocalizedMessage()); } } InstanceCreator<NotifyResult> buildNotifyResultCreator() { Constructor<NotifyResult> constructor; try { constructor = NotifyResult.class.getDeclaredConstructor( String.class, String.class, String.class ); if (!constructor.isAccessible()) { constructor.setAccessible(true); } } catch (Exception e) { throw new RuntimeException("Pager Duty Java client is not compatible", e); } NotifyResult notifyResult; try { notifyResult = constructor.newInstance("1", "2", "3"); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException("Pager Duty Java client is not compatible", e); } if (!( "1".equals(notifyResult.status()) && "2".equals(notifyResult.message()) && "3".equals(notifyResult.incidentKey()) )) { throw new RuntimeException("Pager Duty Java client is not compatible"); } return type -> { try { return constructor.newInstance("", "", ""); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } }; } private boolean isBlank(String value) { return value == null || value.trim().isEmpty(); } private String prepareMessage(ActionMessage msg) { String preparedMsg = null; Event event = msg.getAction() != null ? msg.getAction().getEvent() : null; if (event != null) { if (event instanceof Alert) { Alert alert = (Alert) event; preparedMsg = "Alert : " + alert.getTriggerId() + " at " + alert.getCtime() + " -- Severity: " + alert.getSeverity().toString(); } else { preparedMsg = "Event [" + event.getCategory() + "] " + event.getText() + " at " + event.getCtime(); } } else { preparedMsg = "Message received without data at " + System.currentTimeMillis(); msgLog.warnMessageReceivedWithoutPayload("pagerduty"); } return preparedMsg; } private void sendResult(Action action) { if (sender == null) { throw new IllegalStateException("ActionPluginSender is not present in the plugin"); } if (action == null) { throw new IllegalStateException("Action to update result must be not null"); } ActionResponseMessage newMessage = sender.createMessage(ActionResponseMessage.Operation.RESULT); newMessage.getPayload().put("action", JsonUtil.toJson(action)); try { sender.send(newMessage); } catch (Exception e) { msgLog.error("Error sending ActionResponseMessage", e); } } }
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.powermock.core; import org.powermock.core.spi.MethodInvocationControl; import org.powermock.core.spi.NewInvocationControl; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Hold mock objects that should be used instead of the concrete implementation. * Mock transformers may use this class to gather information on which classes * and methods that are mocked. */ public class MockRepository { private static Set<Object> objectsToAutomaticallyReplayAndVerify = new IdentityHashSet<Object>(); private static Map<Class<?>, NewInvocationControl<?>> newSubstitutions = new HashMap<Class<?>, NewInvocationControl<?>>(); /** * Holds info about general method invocation mocks for classes. */ private static Map<Class<?>, MethodInvocationControl> classMocks = new HashMap<Class<?>, MethodInvocationControl>(); /** * Holds info about general method invocation mocks for instances. */ private static Map<Object, MethodInvocationControl> instanceMocks = new ListMap<Object, MethodInvocationControl>(); /** * Holds info about which methods that should return a substitute/another * instance instead of the default instance. */ private static Map<Method, Object> substituteReturnValues = new HashMap<Method, Object>(); /** * Holds info about which methods that are proxied. */ private static Map<Method, InvocationHandler> methodProxies = new HashMap<Method, InvocationHandler>(); /** * Holds info about which class that should have their static initializers * suppressed. */ private static Set<String> suppressStaticInitializers = new HashSet<String>(); /** * Sometimes mock frameworks needs to store additional state. They can do * this using this key/value based approach. */ private static Map<String, Object> additionalState = new HashMap<String, Object>(); /** * Set of constructors that should be suppressed. */ private static final Set<Constructor<?>> suppressConstructor = new HashSet<Constructor<?>>(); /** * Set of methods that should be suppressed. */ private static final Set<Method> suppressMethod = new HashSet<Method>(); /** * Set of methods that should be suppressed. */ private static final Set<Field> suppressField = new HashSet<Field>(); /** * Set of field types that should always be suppressed regardless of * instance. */ private static final Set<String> suppressFieldTypes = new HashSet<String>(); /** * Set of runnables that will be executed after the test (method) is completed. */ private static final Set<Runnable> afterMethodRunners = new HashSet<Runnable>(); /** * Clear all state of the mock repository except for static initializers. * The reason for not clearing static initializers is that when running in a * suite with many tests the clear method is invoked after each test. This * means that before the test that needs to suppress the static initializer * has been reach the state of the MockRepository would have been wiped out. * This is generally not a problem because most state will be added again * but suppression of static initializers are different because this state * can only be set once per class per CL. That's why we cannot remove this * state. */ public synchronized static void clear() { newSubstitutions.clear(); classMocks.clear(); instanceMocks.clear(); objectsToAutomaticallyReplayAndVerify.clear(); additionalState.clear(); suppressConstructor.clear(); suppressMethod.clear(); substituteReturnValues.clear(); suppressField.clear(); suppressFieldTypes.clear(); methodProxies.clear(); for (Runnable runnable : afterMethodRunners) { runnable.run(); } afterMethodRunners.clear(); } /** * Removes an object from the MockRepository if it exists. */ public static void remove(Object mock) { if (mock instanceof Class<?>) { if (newSubstitutions.containsKey(mock)) { newSubstitutions.remove(mock); } if (classMocks.containsKey(mock)) { classMocks.remove(mock); } } else if (instanceMocks.containsKey(mock)) { instanceMocks.remove(mock); } } public static synchronized MethodInvocationControl getStaticMethodInvocationControl(Class<?> type) { return classMocks.get(type); } public static synchronized MethodInvocationControl putStaticMethodInvocationControl(Class<?> type, MethodInvocationControl invocationControl) { return classMocks.put(type, invocationControl); } public static synchronized MethodInvocationControl removeClassMethodInvocationControl(Class<?> type) { return classMocks.remove(type); } public static synchronized MethodInvocationControl getInstanceMethodInvocationControl(Object instance) { return instanceMocks.get(instance); } public static synchronized MethodInvocationControl putInstanceMethodInvocationControl(Object instance, MethodInvocationControl invocationControl) { return instanceMocks.put(instance, invocationControl); } public static synchronized MethodInvocationControl removeInstanceMethodInvocationControl(Class<?> type) { return classMocks.remove(type); } public static synchronized NewInvocationControl<?> getNewInstanceControl(Class<?> type) { return newSubstitutions.get(type); } public static synchronized NewInvocationControl<?> putNewInstanceControl(Class<?> type, NewInvocationControl<?> control) { return newSubstitutions.put(type, control); } /** * Add a fully qualified class name for a class that should have its static * initializers suppressed. * * @param className * The fully qualified class name for a class that should have * its static initializers suppressed. */ public static synchronized void addSuppressStaticInitializer(String className) { suppressStaticInitializers.add(className); } /** * Remove a fully qualified class name for a class that should no longer * have its static initializers suppressed. * * @param className * The fully qualified class name for a class that should no * longer have its static initializers suppressed. */ public static synchronized void removeSuppressStaticInitializer(String className) { suppressStaticInitializers.remove(className); } /** * Check whether or not a class with the fully qualified name should have * its static initializers suppressed. * * @param className * {@code true} if class with the fully qualified name * {@code className} should have its static initializers * suppressed, {@code false} otherwise. */ public static synchronized boolean shouldSuppressStaticInitializerFor(String className) { return suppressStaticInitializers.contains(className); } /** * @return All classes that should be automatically replayed or verified. */ public static synchronized Set<Object> getObjectsToAutomaticallyReplayAndVerify() { return Collections.unmodifiableSet(objectsToAutomaticallyReplayAndVerify); } /** * Add classes that should be automatically replayed or verified. */ public static synchronized void addObjectsToAutomaticallyReplayAndVerify(Object... objects) { Collections.addAll(objectsToAutomaticallyReplayAndVerify, objects); } /** * When a mock framework API needs to store additional state not applicable * for the other methods, it may use this method to do so. * * @param key * The key under which the <tt>value</tt> is stored. * @param value * The value to store under the specified <tt>key</tt>. * @return The previous object under the specified <tt>key</tt> or * {@code null}. */ public static synchronized Object putAdditionalState(String key, Object value) { return additionalState.put(key, value); } public static synchronized Object removeAdditionalState(String key) { return additionalState.remove(key); } public static synchronized InvocationHandler removeMethodProxy(Method method) { return methodProxies.remove(method); } /** * Retrieve state based on the supplied key. */ @SuppressWarnings("unchecked") public static synchronized <T> T getAdditionalState(String key) { return (T) additionalState.get(key); } /** * Add a method to suppress. * * @param method * The method to suppress. */ public static synchronized void addMethodToSuppress(Method method) { suppressMethod.add(method); } /** * Add a field to suppress. * * @param field * The field to suppress. */ public static synchronized void addFieldToSuppress(Field field) { suppressField.add(field); } /** * Add a field type to suppress. All fields of this type will be suppressed. * * @param fieldType * The fully-qualified name to a type. All fields of this type * will be suppressed. */ public static synchronized void addFieldTypeToSuppress(String fieldType) { suppressFieldTypes.add(fieldType); } /** * Add a constructor to suppress. * * @param constructor * The constructor to suppress. */ public static synchronized void addConstructorToSuppress(Constructor<?> constructor) { suppressConstructor.add(constructor); } /** * @return {@code true} if the <tt>method</tt> should be proxied. */ public static synchronized boolean hasMethodProxy(Method method) { return methodProxies.containsKey(method); } /** * @return {@code true} if the <tt>method</tt> should be suppressed. */ public static synchronized boolean shouldSuppressMethod(Method method, Class<?> objectType) throws ClassNotFoundException { for (Method suppressedMethod : suppressMethod) { Class<?> suppressedMethodClass = suppressedMethod .getDeclaringClass(); if (suppressedMethodClass.getClass().isAssignableFrom( objectType.getClass()) && suppressedMethod.getName().equals(method.getName()) && ClassLocator.getCallerClass().getName() .equals(suppressedMethodClass.getName())) { return true; } } return false; } /** * @return {@code true} if the <tt>field</tt> should be suppressed. */ public static synchronized boolean shouldSuppressField(Field field) { return suppressField.contains(field) || suppressFieldTypes.contains(field.getType().getName()); } /** * @return {@code true} if the <tt>constructor</tt> should be * suppressed. */ public static synchronized boolean shouldSuppressConstructor(Constructor<?> constructor) { return suppressConstructor.contains(constructor); } /** * @return {@code true} if the <tt>method</tt> has a substitute return * value. */ public static synchronized boolean shouldStubMethod(Method method) { return substituteReturnValues.containsKey(method); } /** * @return The substitute return value for a particular method, may be * {@code null}. */ public static synchronized Object getMethodToStub(Method method) { return substituteReturnValues.get(method); } /** * Set a substitute return value for a method. Whenever this method will be * called the {@code value} will be returned instead. * * @return The previous substitute value if any. */ public static synchronized Object putMethodToStub(Method method, Object value) { return substituteReturnValues.put(method, value); } /** * @return The proxy for a particular method, may be {@code null}. */ public static synchronized InvocationHandler getMethodProxy(Method method) { return methodProxies.get(method); } /** * Set a proxy for a method. Whenever this method is called the invocation * handler will be invoked instead. * * @return The method proxy if any. */ public static synchronized InvocationHandler putMethodProxy(Method method, InvocationHandler invocationHandler) { return methodProxies.put(method, invocationHandler); } /** * Add a {@link Runnable} that will be executed after each test * @param runnable - an instance of {@link Runnable} that will be executed. */ public static synchronized void addAfterMethodRunner(Runnable runnable) { afterMethodRunners.add(runnable); } }
/* Copyright (c) 2014 Wolfgang Imig This file is part of the library "Java Add-in for Microsoft Office". This file must be used according to the terms of MIT License, http://opensource.org/licenses/MIT */ package com.wilutions.joa; import com.wilutions.com.AsyncResult; import com.wilutions.com.BackgTask; import com.wilutions.com.ComException; import com.wilutions.com.Dispatch; import com.wilutions.com.DispatchImpl; import com.wilutions.com.WindowHandle; import com.wilutions.com.reg.DeclRegistryValue; import com.wilutions.mslib.office.CustomTaskPane; import com.wilutions.mslib.office.MsoCTPDockPosition; import com.wilutions.mslib.office._CustomTaskPane; import com.wilutions.mslib.office._CustomTaskPaneEvents; /** * Base class for task panes. * This class wraps a native task pane COM object that is created by * the Microsoft Office application in a call to {@link OfficeAddin#createTaskPaneWindowAsync}. * When a TaskPane object is shown in {@link #showAsync(CustomTaskPane, AsyncResult)}, it is attached to the event interface * {@link _CustomTaskPaneEvents} of the COM object. * http://msdn.microsoft.com/en-us/library/microsoft.office.core._customtaskpaneevents_members.aspx * @see <a href="http://msdn.microsoft.com/en-us/library/aa942864.aspx">Custom Task Panes</a> */ public abstract class TaskPane extends DispatchImpl implements WindowHandle, _CustomTaskPaneEvents { /** * Task pane object of the Office application. */ protected CustomTaskPane customTaskPane; @DeclRegistryValue private TaskPanePosition position = new TaskPanePosition(); /** * Constructor. */ public TaskPane() { } /** * Delete the task pane's view objects. * Call this function to close the task pane. */ public void close() { if (customTaskPane != null) { // Remind dock position and visibility in persistent members. try { position.setDockPosition(customTaskPane.getDockPosition()); position.setVisible(customTaskPane.getVisible()); position.setWidth(customTaskPane.getWidth()); position.setHeight(customTaskPane.getHeight()); } catch (ComException e) { e.printStackTrace(); } // DO NOT DELETE CTP, // This causes a crash in Outlook in customTaskPane.releaseComObject //customTaskPane.Delete(); customTaskPane.releaseEvents(this); // Release the underlying COM object. customTaskPane.releaseComObject(); } customTaskPane = null; } /** * Create and show the task pane's view. * @param hwndParent Native parent window handle. * @param asyncResult Expression to be called after the new task pane was made visible. * This call is made from the Tookit's UI thread. This parameter can be null. */ protected abstract void createAndShowEmbeddedWindowAsync(final long hwndParent, AsyncResult<Boolean> asyncResult); /** * Build a Java window inside the given task pane created by Office. * @param taskPane COM object created by Office in a call to {@link OfficeAddin#createTaskPaneWindowAsync(TaskPane, String, Object, com.wilutions.com.AsyncResult)}. * @param asyncResult Callback object to be called after the new task pane is made visible. This parameter can be null. * @throws ComException Thrown, if a COM related error occurs. */ public void showAsync(final CustomTaskPane taskPane, AsyncResult<Boolean> asyncResult) throws ComException { this.customTaskPane = taskPane; // Attach this object as event handler for _CustomTaskPaneEvents. this.customTaskPane.withEvents(this); // Show the task pane at the last dock position. customTaskPane.setDockPosition(position.getDockPosition()); // Width/Height switch (position.getDockPosition().value) { case MsoCTPDockPosition._msoCTPDockPositionLeft: case MsoCTPDockPosition._msoCTPDockPositionRight: if (position.getWidth() != 0) customTaskPane.setWidth(position.getWidth()); break; case MsoCTPDockPosition._msoCTPDockPositionTop: case MsoCTPDockPosition._msoCTPDockPositionBottom: if (position.getHeight() != 0) customTaskPane.setHeight(position.getHeight()); break; default: if (position.getWidth() != 0) customTaskPane.setWidth(position.getWidth()); if (position.getHeight() != 0) customTaskPane.setHeight(position.getHeight()); } // Show the task pane if (position.isVisible()) { setVisible(true); } // OfficeAddin.createTaskPaneWindowAsync has created the task pane // and delegates the UI handling to the JoaBridgeCtrl ActiveX. // We use the JoaBridgeCtrl as the parent window for the Java window. // Get the native window handle of the JoaBridgeCtrl Dispatch ctrl = taskPane.getContentControl().as(Dispatch.class); final long hwndJoaCtrl = ((Number) ctrl._get("HWND")).longValue(); // Create view createAndShowEmbeddedWindowAsync(hwndJoaCtrl, asyncResult); } @Override public void onDockPositionStateChange(final _CustomTaskPane ctp) throws ComException { } @Override public void onVisibleStateChange(final _CustomTaskPane ctp) throws ComException { } /** * Returns true, if the task pane is visible. * @return true, if visible. * @throws ComException Thrown, if a COM related error occurs. */ public boolean isVisible() throws ComException { return customTaskPane != null && customTaskPane.getVisible(); } /** * Show or hide the task pane. * @param v * @throws ComException Thrown, if a COM related error occurs. */ public void setVisible(final boolean v) throws ComException { setVisible(v, null); } /** * Show or hide the task pane. * @param v * @param asyncResult * @throws ComException Thrown, if a COM related error occurs. */ public void setVisible(final boolean v, AsyncResult<Boolean> asyncResult) throws ComException { // Call taskPane.setVisible in background thread. // Otherwise Outlook/Java might hang in setVisible(). if (customTaskPane != null && v != isVisible()) { BackgTask.run(() -> { Throwable ex = null; try { position.setVisible(v); customTaskPane.setVisible(Boolean.valueOf(v)); } catch (ComException e) { e.printStackTrace(); ex = e; } finally { if (asyncResult != null) { asyncResult.setAsyncResult(ex == null, ex); } } }); } } /** * Return true, if Office has created a native task pane object. * @return true, if native task pane object was created. */ public boolean hasWindow() { return customTaskPane != null; } public void setDefaultWidth(int v) { position.setWidth(v); } public int getDefaultWidth() { return position.getWidth(); } public void setDefaultHeight(int v) { position.setHeight(v); } public int getDefaultHeight() { return position.getHeight(); } public TaskPanePosition getPosition() { return position; } public void setPosition(TaskPanePosition position) { this.position = position; } }
package br.com.janes.ecommerce.config; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.servlet.InstrumentedFilter; import com.codahale.metrics.servlets.MetricsServlet; import io.github.jhipster.config.JHipsterConstants; import io.github.jhipster.config.JHipsterProperties; import io.github.jhipster.web.filter.CachingHttpHeadersFilter; import io.undertow.Undertow; import io.undertow.Undertow.Builder; import io.undertow.UndertowOptions; import org.apache.commons.io.FilenameUtils; import org.junit.Before; import org.junit.Test; import org.springframework.boot.context.embedded.undertow.UndertowEmbeddedServletContainerFactory; import org.springframework.http.HttpHeaders; import org.springframework.mock.env.MockEnvironment; import org.springframework.mock.web.MockServletContext; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.xnio.OptionMap; import javax.servlet.*; import java.util.*; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.*; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.options; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Unit tests for the WebConfigurer class. * * @see WebConfigurer */ public class WebConfigurerTest { private WebConfigurer webConfigurer; private MockServletContext servletContext; private MockEnvironment env; private JHipsterProperties props; private MetricRegistry metricRegistry; @Before public void setup() { servletContext = spy(new MockServletContext()); doReturn(new MockFilterRegistration()) .when(servletContext).addFilter(anyString(), any(Filter.class)); doReturn(new MockServletRegistration()) .when(servletContext).addServlet(anyString(), any(Servlet.class)); env = new MockEnvironment(); props = new JHipsterProperties(); webConfigurer = new WebConfigurer(env, props); metricRegistry = new MetricRegistry(); webConfigurer.setMetricRegistry(metricRegistry); } @Test public void testStartUpProdServletContext() throws ServletException { env.setActiveProfiles(JHipsterConstants.SPRING_PROFILE_PRODUCTION); webConfigurer.onStartup(servletContext); assertThat(servletContext.getAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE)).isEqualTo(metricRegistry); assertThat(servletContext.getAttribute(MetricsServlet.METRICS_REGISTRY)).isEqualTo(metricRegistry); verify(servletContext).addFilter(eq("webappMetricsFilter"), any(InstrumentedFilter.class)); verify(servletContext).addServlet(eq("metricsServlet"), any(MetricsServlet.class)); verify(servletContext).addFilter(eq("cachingHttpHeadersFilter"), any(CachingHttpHeadersFilter.class)); } @Test public void testStartUpDevServletContext() throws ServletException { env.setActiveProfiles(JHipsterConstants.SPRING_PROFILE_DEVELOPMENT); webConfigurer.onStartup(servletContext); assertThat(servletContext.getAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE)).isEqualTo(metricRegistry); assertThat(servletContext.getAttribute(MetricsServlet.METRICS_REGISTRY)).isEqualTo(metricRegistry); verify(servletContext).addFilter(eq("webappMetricsFilter"), any(InstrumentedFilter.class)); verify(servletContext).addServlet(eq("metricsServlet"), any(MetricsServlet.class)); verify(servletContext, never()).addFilter(eq("cachingHttpHeadersFilter"), any(CachingHttpHeadersFilter.class)); } @Test public void testCustomizeServletContainer() { env.setActiveProfiles(JHipsterConstants.SPRING_PROFILE_PRODUCTION); UndertowEmbeddedServletContainerFactory container = new UndertowEmbeddedServletContainerFactory(); webConfigurer.customize(container); assertThat(container.getMimeMappings().get("abs")).isEqualTo("audio/x-mpeg"); assertThat(container.getMimeMappings().get("html")).isEqualTo("text/html;charset=utf-8"); assertThat(container.getMimeMappings().get("json")).isEqualTo("text/html;charset=utf-8"); if (container.getDocumentRoot() != null) { assertThat(container.getDocumentRoot().getPath()).isEqualTo(FilenameUtils.separatorsToSystem("target/www")); } Builder builder = Undertow.builder(); container.getBuilderCustomizers().forEach(c -> c.customize(builder)); OptionMap.Builder serverOptions = (OptionMap.Builder) ReflectionTestUtils.getField(builder, "serverOptions"); assertThat(serverOptions.getMap().get(UndertowOptions.ENABLE_HTTP2)).isNull(); } @Test public void testUndertowHttp2Enabled() { props.getHttp().setVersion(JHipsterProperties.Http.Version.V_2_0); UndertowEmbeddedServletContainerFactory container = new UndertowEmbeddedServletContainerFactory(); webConfigurer.customize(container); Builder builder = Undertow.builder(); container.getBuilderCustomizers().forEach(c -> c.customize(builder)); OptionMap.Builder serverOptions = (OptionMap.Builder) ReflectionTestUtils.getField(builder, "serverOptions"); assertThat(serverOptions.getMap().get(UndertowOptions.ENABLE_HTTP2)).isTrue(); } @Test public void testCorsFilterOnApiPath() throws Exception { props.getCors().setAllowedOrigins(Collections.singletonList("*")); props.getCors().setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE")); props.getCors().setAllowedHeaders(Collections.singletonList("*")); props.getCors().setMaxAge(1800L); props.getCors().setAllowCredentials(true); MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController()) .addFilters(webConfigurer.corsFilter()) .build(); mockMvc.perform( options("/api/test-cors") .header(HttpHeaders.ORIGIN, "other.domain.com") .header(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "POST")) .andExpect(status().isOk()) .andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "other.domain.com")) .andExpect(header().string(HttpHeaders.VARY, "Origin")) .andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET,POST,PUT,DELETE")) .andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true")) .andExpect(header().string(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "1800")); mockMvc.perform( get("/api/test-cors") .header(HttpHeaders.ORIGIN, "other.domain.com")) .andExpect(status().isOk()) .andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "other.domain.com")); } @Test public void testCorsFilterOnOtherPath() throws Exception { props.getCors().setAllowedOrigins(Collections.singletonList("*")); props.getCors().setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE")); props.getCors().setAllowedHeaders(Collections.singletonList("*")); props.getCors().setMaxAge(1800L); props.getCors().setAllowCredentials(true); MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController()) .addFilters(webConfigurer.corsFilter()) .build(); mockMvc.perform( get("/test/test-cors") .header(HttpHeaders.ORIGIN, "other.domain.com")) .andExpect(status().isOk()) .andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)); } @Test public void testCorsFilterDeactivated() throws Exception { props.getCors().setAllowedOrigins(null); MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController()) .addFilters(webConfigurer.corsFilter()) .build(); mockMvc.perform( get("/api/test-cors") .header(HttpHeaders.ORIGIN, "other.domain.com")) .andExpect(status().isOk()) .andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)); } @Test public void testCorsFilterDeactivated2() throws Exception { props.getCors().setAllowedOrigins(new ArrayList<>()); MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController()) .addFilters(webConfigurer.corsFilter()) .build(); mockMvc.perform( get("/api/test-cors") .header(HttpHeaders.ORIGIN, "other.domain.com")) .andExpect(status().isOk()) .andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)); } static class MockFilterRegistration implements FilterRegistration, FilterRegistration.Dynamic { @Override public void addMappingForServletNames(EnumSet<DispatcherType> dispatcherTypes, boolean isMatchAfter, String... servletNames) { } @Override public Collection<String> getServletNameMappings() { return null; } @Override public void addMappingForUrlPatterns(EnumSet<DispatcherType> dispatcherTypes, boolean isMatchAfter, String... urlPatterns) { } @Override public Collection<String> getUrlPatternMappings() { return null; } @Override public void setAsyncSupported(boolean isAsyncSupported) { } @Override public String getName() { return null; } @Override public String getClassName() { return null; } @Override public boolean setInitParameter(String name, String value) { return false; } @Override public String getInitParameter(String name) { return null; } @Override public Set<String> setInitParameters(Map<String, String> initParameters) { return null; } @Override public Map<String, String> getInitParameters() { return null; } } static class MockServletRegistration implements ServletRegistration, ServletRegistration.Dynamic { @Override public void setLoadOnStartup(int loadOnStartup) { } @Override public Set<String> setServletSecurity(ServletSecurityElement constraint) { return null; } @Override public void setMultipartConfig(MultipartConfigElement multipartConfig) { } @Override public void setRunAsRole(String roleName) { } @Override public void setAsyncSupported(boolean isAsyncSupported) { } @Override public Set<String> addMapping(String... urlPatterns) { return null; } @Override public Collection<String> getMappings() { return null; } @Override public String getRunAsRole() { return null; } @Override public String getName() { return null; } @Override public String getClassName() { return null; } @Override public boolean setInitParameter(String name, String value) { return false; } @Override public String getInitParameter(String name) { return null; } @Override public Set<String> setInitParameters(Map<String, String> initParameters) { return null; } @Override public Map<String, String> getInitParameters() { return null; } } }
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.http.jetty; import static java.util.Objects.requireNonNull; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Consumer; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.RequestLog; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.SessionIdManager; import org.eclipse.jetty.server.handler.HandlerWrapper; import org.eclipse.jetty.util.component.Container; import org.eclipse.jetty.util.component.LifeCycle; import com.linecorp.armeria.server.http.jetty.JettyServiceConfig.Bean; /** * Builds a {@link JettyService}. Use {@link JettyService#forServer(String, Server)} if you have a configured * Jetty {@link Server} instance. */ public final class JettyServiceBuilder { private final Map<String, Object> attrs = new LinkedHashMap<>(); private final List<Bean> beans = new ArrayList<>(); private final List<HandlerWrapper> handlerWrappers = new ArrayList<>(); private final List<Container.Listener> eventListeners = new ArrayList<>(); private final List<LifeCycle.Listener> lifeCycleListeners = new ArrayList<>(); private final List<Consumer<? super Server>> configurators = new ArrayList<>(); private String hostname; private Boolean dumpAfterStart; private Boolean dumpBeforeStop; private Handler handler; private RequestLog requestLog; private SessionIdManager sessionIdManager; private Long stopTimeoutMillis; /** * Sets the default hostname of the Jetty {@link Server}. */ public JettyServiceBuilder hostname(String hostname) { this.hostname = requireNonNull(hostname, "hostname"); return this; } /** * Puts the specified attribute into the Jetty {@link Server}. * * @see Server#setAttribute(String, Object) */ public JettyServiceBuilder attr(String name, Object attribute) { attrs.put(requireNonNull(name, "name"), requireNonNull(attribute, "attribute")); return this; } /** * Adds the specified bean to the Jetty {@link Server}. * * @see Server#addBean(Object) */ public JettyServiceBuilder bean(Object bean) { beans.add(new Bean(bean, null)); return this; } /** * Adds the specified bean to the Jetty {@link Server}. * * @see Server#addBean(Object, boolean) */ public JettyServiceBuilder bean(Object bean, boolean managed) { beans.add(new Bean(bean, managed)); return this; } /** * Sets whether the Jetty {@link Server} needs to dump its configuration after it started up. * * @see Server#setDumpAfterStart(boolean) */ public JettyServiceBuilder dumpAfterStart(boolean dumpAfterStart) { this.dumpAfterStart = dumpAfterStart; return this; } /** * Sets whether the Jetty {@link Server} needs to dump its configuration before it shuts down. * * @see Server#setDumpBeforeStop(boolean) */ public JettyServiceBuilder dumpBeforeStop(boolean dumpBeforeStop) { this.dumpBeforeStop = dumpBeforeStop; return this; } /** * Sets the {@link Handler} of the Jetty {@link Server}. * * @see Server#setHandler(Handler) */ public JettyServiceBuilder handler(Handler handler) { this.handler = requireNonNull(handler, "handler"); return this; } /** * Adds the specified {@link HandlerWrapper} to the Jetty {@link Server}. * * @see Server#insertHandler(HandlerWrapper) */ public JettyServiceBuilder handlerWrapper(HandlerWrapper handlerWrapper) { handlerWrappers.add(requireNonNull(handlerWrapper, "handlerWrapper")); return this; } /** * Sets the {@link RequestLog} of the Jetty {@link Server}. * * @see Server#setRequestLog(RequestLog) */ public JettyServiceBuilder requestLog(RequestLog requestLog) { this.requestLog = requireNonNull(requestLog, "requestLog"); return this; } /** * Sets the {@link SessionIdManager} of the Jetty {@link Server}. * * @see Server#setSessionIdManager(SessionIdManager) */ public JettyServiceBuilder sessionIdManager(SessionIdManager sessionIdManager) { this.sessionIdManager = requireNonNull(sessionIdManager, "sessionIdManager"); return this; } /** * Sets the graceful stop time of the {@link Server#stop()} in milliseconds. * * @see Server#setStopTimeout(long) */ public JettyServiceBuilder stopTimeoutMillis(long stopTimeoutMillis) { this.stopTimeoutMillis = stopTimeoutMillis; return this; } /** * Adds the specified event listener to the Jetty {@link Server}. */ public JettyServiceBuilder eventListener(Container.Listener eventListener) { eventListeners.add(requireNonNull(eventListener, "eventListener")); return this; } /** * Adds the specified life cycle listener to the Jetty {@link Server}. */ public JettyServiceBuilder lifeCycleListener(LifeCycle.Listener lifeCycleListener) { lifeCycleListeners.add(requireNonNull(lifeCycleListener, "lifeCycleListener")); return this; } /** * Adds a {@link Consumer} that performs additional configuration operations against * the Jetty {@link Server} created by a {@link JettyService}. */ public JettyServiceBuilder configurator(Consumer<? super Server> configurator) { configurators.add(requireNonNull(configurator, "configurator")); return this; } /** * Creates a new {@link JettyService}. */ public JettyService build() { return JettyService.forConfig(new JettyServiceConfig( hostname, dumpAfterStart, dumpBeforeStop, stopTimeoutMillis, handler, requestLog, sessionIdManager, attrs, beans, handlerWrappers, eventListeners, lifeCycleListeners, configurators)); } @Override public String toString() { return JettyServiceConfig.toString( this, hostname, dumpAfterStart, dumpBeforeStop, stopTimeoutMillis, handler, requestLog, sessionIdManager, attrs, beans, handlerWrappers, eventListeners, lifeCycleListeners, configurators); } }
package org.openapitools.model; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; import org.openapitools.model.PipelineActivityartifacts; import javax.validation.constraints.*; import javax.validation.Valid; import io.swagger.annotations.*; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.annotation.JsonTypeName; @JsonTypeName("PipelineActivity") @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaJAXRSSpecServerCodegen", date = "2022-02-13T02:22:19.792787Z[Etc/UTC]")public class PipelineActivity { private @Valid String propertyClass; private @Valid List<PipelineActivityartifacts> artifacts = new ArrayList<PipelineActivityartifacts>(); private @Valid Integer durationInMillis; private @Valid Integer estimatedDurationInMillis; private @Valid String enQueueTime; private @Valid String endTime; private @Valid String id; private @Valid String organization; private @Valid String pipeline; private @Valid String result; private @Valid String runSummary; private @Valid String startTime; private @Valid String state; private @Valid String type; private @Valid String commitId; /** **/ public PipelineActivity propertyClass(String propertyClass) { this.propertyClass = propertyClass; return this; } @ApiModelProperty(value = "") @JsonProperty("_class") public String getPropertyClass() { return propertyClass; } @JsonProperty("_class") public void setPropertyClass(String propertyClass) { this.propertyClass = propertyClass; } /** **/ public PipelineActivity artifacts(List<PipelineActivityartifacts> artifacts) { this.artifacts = artifacts; return this; } @ApiModelProperty(value = "") @JsonProperty("artifacts") public List<PipelineActivityartifacts> getArtifacts() { return artifacts; } @JsonProperty("artifacts") public void setArtifacts(List<PipelineActivityartifacts> artifacts) { this.artifacts = artifacts; } public PipelineActivity addArtifactsItem(PipelineActivityartifacts artifactsItem) { if (this.artifacts == null) { this.artifacts = new ArrayList<PipelineActivityartifacts>(); } this.artifacts.add(artifactsItem); return this; } public PipelineActivity removeArtifactsItem(PipelineActivityartifacts artifactsItem) { if (artifactsItem != null && this.artifacts != null) { this.artifacts.remove(artifactsItem); } return this; } /** **/ public PipelineActivity durationInMillis(Integer durationInMillis) { this.durationInMillis = durationInMillis; return this; } @ApiModelProperty(value = "") @JsonProperty("durationInMillis") public Integer getDurationInMillis() { return durationInMillis; } @JsonProperty("durationInMillis") public void setDurationInMillis(Integer durationInMillis) { this.durationInMillis = durationInMillis; } /** **/ public PipelineActivity estimatedDurationInMillis(Integer estimatedDurationInMillis) { this.estimatedDurationInMillis = estimatedDurationInMillis; return this; } @ApiModelProperty(value = "") @JsonProperty("estimatedDurationInMillis") public Integer getEstimatedDurationInMillis() { return estimatedDurationInMillis; } @JsonProperty("estimatedDurationInMillis") public void setEstimatedDurationInMillis(Integer estimatedDurationInMillis) { this.estimatedDurationInMillis = estimatedDurationInMillis; } /** **/ public PipelineActivity enQueueTime(String enQueueTime) { this.enQueueTime = enQueueTime; return this; } @ApiModelProperty(value = "") @JsonProperty("enQueueTime") public String getEnQueueTime() { return enQueueTime; } @JsonProperty("enQueueTime") public void setEnQueueTime(String enQueueTime) { this.enQueueTime = enQueueTime; } /** **/ public PipelineActivity endTime(String endTime) { this.endTime = endTime; return this; } @ApiModelProperty(value = "") @JsonProperty("endTime") public String getEndTime() { return endTime; } @JsonProperty("endTime") public void setEndTime(String endTime) { this.endTime = endTime; } /** **/ public PipelineActivity id(String id) { this.id = id; return this; } @ApiModelProperty(value = "") @JsonProperty("id") public String getId() { return id; } @JsonProperty("id") public void setId(String id) { this.id = id; } /** **/ public PipelineActivity organization(String organization) { this.organization = organization; return this; } @ApiModelProperty(value = "") @JsonProperty("organization") public String getOrganization() { return organization; } @JsonProperty("organization") public void setOrganization(String organization) { this.organization = organization; } /** **/ public PipelineActivity pipeline(String pipeline) { this.pipeline = pipeline; return this; } @ApiModelProperty(value = "") @JsonProperty("pipeline") public String getPipeline() { return pipeline; } @JsonProperty("pipeline") public void setPipeline(String pipeline) { this.pipeline = pipeline; } /** **/ public PipelineActivity result(String result) { this.result = result; return this; } @ApiModelProperty(value = "") @JsonProperty("result") public String getResult() { return result; } @JsonProperty("result") public void setResult(String result) { this.result = result; } /** **/ public PipelineActivity runSummary(String runSummary) { this.runSummary = runSummary; return this; } @ApiModelProperty(value = "") @JsonProperty("runSummary") public String getRunSummary() { return runSummary; } @JsonProperty("runSummary") public void setRunSummary(String runSummary) { this.runSummary = runSummary; } /** **/ public PipelineActivity startTime(String startTime) { this.startTime = startTime; return this; } @ApiModelProperty(value = "") @JsonProperty("startTime") public String getStartTime() { return startTime; } @JsonProperty("startTime") public void setStartTime(String startTime) { this.startTime = startTime; } /** **/ public PipelineActivity state(String state) { this.state = state; return this; } @ApiModelProperty(value = "") @JsonProperty("state") public String getState() { return state; } @JsonProperty("state") public void setState(String state) { this.state = state; } /** **/ public PipelineActivity type(String type) { this.type = type; return this; } @ApiModelProperty(value = "") @JsonProperty("type") public String getType() { return type; } @JsonProperty("type") public void setType(String type) { this.type = type; } /** **/ public PipelineActivity commitId(String commitId) { this.commitId = commitId; return this; } @ApiModelProperty(value = "") @JsonProperty("commitId") public String getCommitId() { return commitId; } @JsonProperty("commitId") public void setCommitId(String commitId) { this.commitId = commitId; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PipelineActivity pipelineActivity = (PipelineActivity) o; return Objects.equals(this.propertyClass, pipelineActivity.propertyClass) && Objects.equals(this.artifacts, pipelineActivity.artifacts) && Objects.equals(this.durationInMillis, pipelineActivity.durationInMillis) && Objects.equals(this.estimatedDurationInMillis, pipelineActivity.estimatedDurationInMillis) && Objects.equals(this.enQueueTime, pipelineActivity.enQueueTime) && Objects.equals(this.endTime, pipelineActivity.endTime) && Objects.equals(this.id, pipelineActivity.id) && Objects.equals(this.organization, pipelineActivity.organization) && Objects.equals(this.pipeline, pipelineActivity.pipeline) && Objects.equals(this.result, pipelineActivity.result) && Objects.equals(this.runSummary, pipelineActivity.runSummary) && Objects.equals(this.startTime, pipelineActivity.startTime) && Objects.equals(this.state, pipelineActivity.state) && Objects.equals(this.type, pipelineActivity.type) && Objects.equals(this.commitId, pipelineActivity.commitId); } @Override public int hashCode() { return Objects.hash(propertyClass, artifacts, durationInMillis, estimatedDurationInMillis, enQueueTime, endTime, id, organization, pipeline, result, runSummary, startTime, state, type, commitId); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class PipelineActivity {\n"); sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n"); sb.append(" artifacts: ").append(toIndentedString(artifacts)).append("\n"); sb.append(" durationInMillis: ").append(toIndentedString(durationInMillis)).append("\n"); sb.append(" estimatedDurationInMillis: ").append(toIndentedString(estimatedDurationInMillis)).append("\n"); sb.append(" enQueueTime: ").append(toIndentedString(enQueueTime)).append("\n"); sb.append(" endTime: ").append(toIndentedString(endTime)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" organization: ").append(toIndentedString(organization)).append("\n"); sb.append(" pipeline: ").append(toIndentedString(pipeline)).append("\n"); sb.append(" result: ").append(toIndentedString(result)).append("\n"); sb.append(" runSummary: ").append(toIndentedString(runSummary)).append("\n"); sb.append(" startTime: ").append(toIndentedString(startTime)).append("\n"); sb.append(" state: ").append(toIndentedString(state)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" commitId: ").append(toIndentedString(commitId)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
package com.giggs.heroquest.game; import android.app.Dialog; import android.content.DialogInterface; import android.content.DialogInterface.OnDismissListener; import android.content.Intent; import android.content.res.Resources; import android.graphics.Color; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.giggs.heroquest.R; import com.giggs.heroquest.activities.AdventureActivity; import com.giggs.heroquest.activities.HomeActivity; import com.giggs.heroquest.activities.adapters.ItemsAdapter; import com.giggs.heroquest.activities.fragments.StoryFragment; import com.giggs.heroquest.game.base.MyBaseGameActivity; import com.giggs.heroquest.game.gui.GameMenu; import com.giggs.heroquest.game.gui.Loading; import com.giggs.heroquest.game.gui.RewardDialog; import com.giggs.heroquest.game.gui.items.ItemInfoInGame; import com.giggs.heroquest.game.gui.skills.UseSkill; import com.giggs.heroquest.models.Game; import com.giggs.heroquest.models.Quest; import com.giggs.heroquest.models.Reward; import com.giggs.heroquest.models.characters.Hero; import com.giggs.heroquest.models.characters.Unit; import com.giggs.heroquest.models.items.Item; import com.giggs.heroquest.models.skills.ActiveSkill; import com.giggs.heroquest.models.skills.PassiveSkill; import com.giggs.heroquest.models.skills.Skill; import com.giggs.heroquest.utils.ApplicationUtils; import com.giggs.heroquest.views.CustomAlertDialog; import com.giggs.heroquest.views.LifeBar; import it.sephiroth.android.library.widget.AdapterView; import it.sephiroth.android.library.widget.HListView; public class GUIManager { private static final String TAG = "GUIManager"; private MyBaseGameActivity mGameActivity; private Resources mResources; private Hero mHero; private Dialog mLoadingScreen, mGameMenuDialog, mRewardDialog, mBagDialog, mItemInfoDialog; private TextView mBigLabel; private Animation mBigLabelAnimation; private ViewGroup mQueueLayout, mLifeLayout, mSpiritLayout; private ViewGroup mSkillButtonsLayout; private CustomAlertDialog mConfirmDialog; public GUIManager(MyBaseGameActivity activity) { mGameActivity = activity; mResources = mGameActivity.getResources(); } public void initGUI() { mQueueLayout = (ViewGroup) mGameActivity.findViewById(R.id.queue); mGameActivity.findViewById(R.id.bag).setOnClickListener(mGameActivity); mBigLabel = (TextView) mGameActivity.findViewById(R.id.bigLabel); mBigLabelAnimation = AnimationUtils.loadAnimation(mGameActivity, R.anim.big_label_in_game); mBigLabelAnimation.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { mBigLabel.setAnimation(null); mBigLabel.setVisibility(View.GONE); } @Override public void onAnimationRepeat(Animation animation) { } }); mSkillButtonsLayout = (ViewGroup) mGameActivity.findViewById(R.id.skillButtonsLayout); mLifeLayout = (ViewGroup) mGameActivity.findViewById(R.id.hero_life); mSpiritLayout = (ViewGroup) mGameActivity.findViewById(R.id.hero_spirit); } public void setData(Hero hero) { mHero = hero; } public void displayBigLabel(final String text, final int color) { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { mBigLabel.setVisibility(View.VISIBLE); mBigLabel.setText("" + text); mBigLabel.setTextColor(mResources.getColor(color)); mBigLabel.startAnimation(mBigLabelAnimation); } }); } public void hideLoadingScreen() { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { mLoadingScreen.dismiss(); } }); } public void openGameMenu() { mGameMenuDialog = new GameMenu(mGameActivity, new View.OnClickListener() { @Override public void onClick(View v) { showLeaveQuestConfirmDialog(); } }, new View.OnClickListener() { @Override public void onClick(View v) { mGameActivity.startActivity(new Intent(mGameActivity, HomeActivity.class)); mGameActivity.finish(); } }); mGameMenuDialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { mGameActivity.resumeGame(); } }); mGameMenuDialog.show(); } public void showLeaveQuestConfirmDialog() { mConfirmDialog = new CustomAlertDialog(mGameActivity, R.style.Dialog, mGameActivity.getString(R.string.confirm_leave_quest), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { if (which == R.id.ok_btn) { Intent intent = new Intent(mGameActivity, AdventureActivity.class); mHero.reset(); mGameActivity.getGame().setHero(mHero); intent.putExtra(Game.class.getName(), mGameActivity.getGame()); mGameActivity.startActivity(intent); mGameActivity.finish(); } dialog.dismiss(); } }); mConfirmDialog.show(); } public void showFinishQuestConfirmDialog() { mConfirmDialog = new CustomAlertDialog(mGameActivity, R.style.Dialog, mGameActivity.getString(R.string.confirm_finish_quest), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { if (which == R.id.ok_btn) { finishQuest(); } dialog.dismiss(); } }); mConfirmDialog.show(); } private void finishQuest() { Game game = mGameActivity.getGame(); Quest currentQuest = game.getQuest(); // get quest reward if (currentQuest.getReward() != null) { mHero.addGold(currentQuest.getReward().getGold()); } mHero.reset(); game.setHero(mHero); // quest is finished go to book chooser activity game.finishQuest(); game.setQuest(null); if (currentQuest.getOutroText(mResources) > 0) { // show outro text Bundle args = new Bundle(); args.putInt(StoryFragment.ARGUMENT_STORY, currentQuest.getOutroText(mResources)); args.putBoolean(StoryFragment.ARGUMENT_IS_OUTRO, true); ApplicationUtils.openDialogFragment(mGameActivity, new StoryFragment(), args); } else { // go directly to the book chooser Intent intent = new Intent(mGameActivity, AdventureActivity.class); intent.putExtra(Game.class.getName(), game); mGameActivity.startActivity(intent); mGameActivity.finish(); } } public void onPause() { if (mGameMenuDialog != null) { mGameMenuDialog.dismiss(); } if (mLoadingScreen != null) { mLoadingScreen.dismiss(); } if (mRewardDialog != null) { mRewardDialog.dismiss(); } if (mBagDialog != null) { mBagDialog.dismiss(); } if (mItemInfoDialog != null) { mItemInfoDialog.dismiss(); } if (mConfirmDialog != null) { mConfirmDialog.dismiss(); } } public void showLoadingScreen() { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { mLoadingScreen = new Loading(mGameActivity); mLoadingScreen.show(); } }); } public void updateHeroLayout() { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { mLifeLayout.removeAllViews(); ImageView view; int sizeInPixels = ApplicationUtils.convertDpToPixels(mGameActivity.getApplicationContext(), 25); LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(sizeInPixels, sizeInPixels); params.setMargins(0, 0, -10, 0); for (int n = 0; n < mHero.getHp(); n++) { view = new ImageView(mGameActivity.getApplicationContext()); view.setImageResource(R.drawable.ic_health); view.setLayoutParams(params); if (mHero.getCurrentHP() <= n) { view.setColorFilter(Color.argb(150, 0, 0, 0)); } mLifeLayout.addView(view); } mSpiritLayout.removeAllViews(); for (int n = 0; n < mHero.getSpirit(); n++) { view = new ImageView(mGameActivity.getApplicationContext()); view.setImageResource(R.drawable.ic_spirit); view.setLayoutParams(params); if (mHero.getCurrentSpirit() <= n) { view.setColorFilter(Color.argb(150, 0, 0, 0)); } mSpiritLayout.addView(view); } } }); } public void updateQueue(final Unit activeCharacter, final Quest quest) { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { updateQueueCharacter((ViewGroup) mQueueLayout.findViewById(R.id.activeCharacter), activeCharacter); if (quest.getQueue().size() > 1) { updateQueueCharacter((ViewGroup) mQueueLayout.findViewById(R.id.nextCharacter), quest.getQueue().get(0)); } else { mQueueLayout.findViewById(R.id.nextCharacter).setVisibility(View.GONE); } if (quest.getQueue().size() > 2) { updateQueueCharacter((ViewGroup) mQueueLayout.findViewById(R.id.nextnextCharacter), quest.getQueue().get(1)); } else { mQueueLayout.findViewById(R.id.nextnextCharacter).setVisibility(View.GONE); } } }); } private void updateQueueCharacter(ViewGroup view, Unit unit) { view.setVisibility(View.VISIBLE); ((ImageView) view.findViewById(R.id.image)).setImageResource(unit.getImage(mResources)); ((LifeBar) view.findViewById(R.id.life)).updateLife(unit.getLifeRatio()); } public void showReward(final Reward reward, final OnDismissListener onDismissListener) { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { if (mRewardDialog == null || !mRewardDialog.isShowing()) { mRewardDialog = new RewardDialog(mGameActivity, reward); mRewardDialog.setOnDismissListener(onDismissListener); mRewardDialog.show(); } } }); } public void showBag() { mBagDialog = new Dialog(mGameActivity, R.style.Dialog); mBagDialog.setContentView(R.layout.in_game_bag); mBagDialog.setCancelable(true); mBagDialog.setCanceledOnTouchOutside(true); HListView bagItemsListView = (HListView) mBagDialog.findViewById(R.id.bag); final ItemsAdapter bagAdapter = new ItemsAdapter(mGameActivity.getApplicationContext(), R.layout.item, mHero.getItems()); bagItemsListView.setAdapter(bagAdapter); bagItemsListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) { view.setTag(R.string.item, bagAdapter.getItem(position)); mGameActivity.onClick(view); } }); mBagDialog.findViewById(R.id.close_bag).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { mBagDialog.dismiss(); } }); mBagDialog.show(); } public void showItemInfo(Item item, ItemInfoInGame.OnItemActionSelected onItemActionCallback) { if (mItemInfoDialog == null || !mItemInfoDialog.isShowing()) { mItemInfoDialog = new ItemInfoInGame(mGameActivity, item, mHero, onItemActionCallback); mItemInfoDialog.show(); } } public void updateSkillButtons() { mGameActivity.runOnUiThread(new Runnable() { @Override public void run() { LayoutInflater inflater = mGameActivity.getLayoutInflater(); mSkillButtonsLayout.removeAllViews(); View skillButton; for (final Skill skill : mHero.getSkills()) { skillButton = inflater.inflate(R.layout.in_game_skill_button, null); skillButton.setTag(R.string.show_skill, skill); ((ImageView) skillButton.findViewById(R.id.image)).setImageResource(skill.getImage(mResources)); ((ImageView) skillButton.findViewById(R.id.image)).setColorFilter(Color.argb(120, 255, 255, 255)); if (skill instanceof PassiveSkill || skill instanceof ActiveSkill && ((ActiveSkill) skill).isUsed()) { ApplicationUtils.setAlpha(skillButton, 0.5f); } skillButton.setOnClickListener(mGameActivity); mSkillButtonsLayout.addView(skillButton); } } }); } public void showUseSkillInfo(Skill skill) { if (mItemInfoDialog == null || !mItemInfoDialog.isShowing()) { mItemInfoDialog = new UseSkill(mGameActivity, skill, new OnClickListener() { @Override public void onClick(View v) { mGameActivity.onClick(v); } }); mItemInfoDialog.show(); } } public void hideBag() { if (mBagDialog != null) { mBagDialog.dismiss(); } } }
package com.swfarm.biz.ebay.job; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import org.springframework.scheduling.quartz.QuartzJobBean; import com.swfarm.biz.chain.bo.CustomerOrder; import com.swfarm.biz.chain.bo.Semaphore; import com.swfarm.biz.chain.srv.ChainQueryService; import com.swfarm.biz.chain.srv.ChainService; import com.swfarm.biz.ebay.bo.EbayAccount; import com.swfarm.biz.ebay.srv.EbayQueryService; import com.swfarm.biz.ebay.srv.EbayService; import com.swfarm.biz.ebay.srv.SynchronizationService; import com.swfarm.biz.oa.bo.ImportResultVoucher; import com.swfarm.biz.oa.bo.ImportResultVoucherItem; import com.swfarm.biz.oa.bo.User; import com.swfarm.biz.oa.srv.SysConfigService; import com.swfarm.biz.oa.srv.UserService; import com.swfarm.biz.warehouse.srv.WarehouseService; import com.swfarm.pub.framework.Env; import com.swfarm.pub.utils.SpringUtils; public class SynchronizeTransactionStatusTask extends QuartzJobBean { Logger logger = Logger.getLogger(SynchronizeTransactionStatusTask.class); private EbayService ebayService; private ChainService chainService; private ChainQueryService chainQueryService; private WarehouseService warehouseService; private UserService userService; private SynchronizationService synchronizationService; private List accounts = new ArrayList(); public void setEbayService(EbayService ebayService) { this.ebayService = ebayService; } public void setChainService(ChainService chainService) { this.chainService = chainService; } public void setChainQueryService(ChainQueryService chainQueryService) { this.chainQueryService = chainQueryService; } public void setWarehouseService(WarehouseService warehouseService) { this.warehouseService = warehouseService; } public void setUserService(UserService userService) { this.userService = userService; } public void setSynchronizationService( SynchronizationService synchronizationService) { this.synchronizationService = synchronizationService; } public void setAccounts(List accounts) { this.accounts = accounts; } public EbayAccount getEbayAccount(String account) { EbayAccount ebayAccount = this.ebayService .findEbayAccountByName(account); return ebayAccount; } public void pushVirtualAllocateOrderStatus() { logger.warn("*********start to synchronize virtual transaction status*********"); if (CollectionUtils.isEmpty(this.accounts)) { this.accounts = new ArrayList(); EbayQueryService ebayQueryService = (EbayQueryService) SpringUtils.getBean("ebayQueryService"); List<EbayAccount> ebayAccounts = ebayQueryService.findEbayAccounts(); for (Iterator iter = ebayAccounts.iterator(); iter.hasNext();) { EbayAccount ebayAccount = (EbayAccount) iter.next(); String accountNumber = ebayAccount.getAccountNumber(); if (!this.accounts.contains(accountNumber)) { this.accounts.add(accountNumber); } } } for (Iterator accountIter = this.accounts.iterator(); accountIter .hasNext();) { String account = (String) accountIter.next(); EbayAccount ebayAccount = this.getEbayAccount(account); if (ebayAccount != null) { List customerOrders = this.chainService .findNeedUpdateEbayStatusVirtualAllocateOrders(ebayAccount .getAccountNumber()); Map failedCustomerOrders = new HashMap(); for (Iterator itemIter = customerOrders.iterator(); itemIter .hasNext();) { CustomerOrder customerOrder = (CustomerOrder) itemIter .next(); if (StringUtils.isNotEmpty(customerOrder .getVirtualAllocationProductVoucherNo())) { try { CustomerOrder updatedCustomerOrder = this.synchronizationService .updateCustomerOrderEbayStatus( failedCustomerOrders, customerOrder.getId(), false, true); logger.warn("account number [" + account + "] customer order [" + updatedCustomerOrder.getCustomerOrderNo() + "] status updated [" + updatedCustomerOrder .getEbayStatusUpdated() + "]"); } catch (Exception e) { logger.warn(e); } } } Long importResultVoucherId = this.saveImportResultVoucher( account, failedCustomerOrders); } } logger.warn("**********finish synchronize virtual transaction status**********"); } public void pushTransactionStatus() { logger.warn("*********start to synchronize transaction status*********"); if (CollectionUtils.isEmpty(this.accounts)) { this.accounts = new ArrayList(); EbayQueryService ebayQueryService = (EbayQueryService) SpringUtils.getBean("ebayQueryService"); List<EbayAccount> ebayAccounts = ebayQueryService.findEbayAccounts(); for (Iterator iter = ebayAccounts.iterator(); iter.hasNext();) { EbayAccount ebayAccount = (EbayAccount) iter.next(); String accountNumber = ebayAccount.getAccountNumber(); if (!this.accounts.contains(accountNumber)) { this.accounts.add(accountNumber); } } } for (Iterator accountIter = this.accounts.iterator(); accountIter .hasNext();) { String account = (String) accountIter.next(); EbayAccount ebayAccount = this.getEbayAccount(account); if (ebayAccount != null) { List customerOrders = this.chainService .findNeedUpdateEbayStatusCustomerOrders(ebayAccount .getAccountNumber()); Map failedCustomerOrders = new HashMap(); for (Iterator itemIter = customerOrders.iterator(); itemIter .hasNext();) { CustomerOrder customerOrder = (CustomerOrder) itemIter .next(); try { CustomerOrder updatedCustomerOrder = this.synchronizationService .updateCustomerOrderEbayStatus( failedCustomerOrders, customerOrder.getId(), false, null); logger.warn("account number [" + account + "] customer order [" + updatedCustomerOrder.getCustomerOrderNo() + "] status updated [" + updatedCustomerOrder.getEbayStatusUpdated() + "]"); } catch (Exception e) { logger.warn(e); } } Long importResultVoucherId = this.saveImportResultVoucher( account, failedCustomerOrders); } } logger.warn("**********finish synchronize transaction status**********"); } private Long saveImportResultVoucher(String account, Map failedCustomerOrders) { if (failedCustomerOrders.size() > 0) { ImportResultVoucher importResultVoucher = new ImportResultVoucher(); User executeUser = userService.findUserByUsername("system"); importResultVoucher.setImportUserId(executeUser.getId()); importResultVoucher .setImportFileName("synchronizetransactionstatus"); importResultVoucher.setImportType("synchronizetransactionstatus"); importResultVoucher.setImportDate(new Date()); for (Iterator iter = failedCustomerOrders.entrySet().iterator(); iter .hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); CustomerOrder customerOrder = (CustomerOrder) entry.getKey(); String errorMsg = (String) entry.getValue(); ImportResultVoucherItem importResultVoucherItem = new ImportResultVoucherItem(); importResultVoucherItem.setImportItemIdentifier(customerOrder .getAccountNumber() + "-" + customerOrder.getSaleChannelOrderId() + "-" + customerOrder.getShippedTime()); importResultVoucherItem .setImportResult(ImportResultVoucher.IMPORT_RESULT_FAILED); importResultVoucherItem.setImportMessage(errorMsg); importResultVoucher .addImportResultVoucherItem(importResultVoucherItem); } SysConfigService sysConfigService = (SysConfigService) SpringUtils .getBean("sysConfigService"); sysConfigService.saveImportResultVoucher(importResultVoucher); return importResultVoucher.getId(); } else { return null; } } protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException { this.ebayService = (EbayService) SpringUtils.getBean("ebayService"); this.chainService = (ChainService) SpringUtils.getBean("chainService"); this.chainQueryService = (ChainQueryService) SpringUtils .getBean("chainQueryService"); this.warehouseService = (WarehouseService) SpringUtils .getBean("warehouseService"); this.userService = (UserService) SpringUtils.getBean("userService"); this.synchronizationService = (SynchronizationService) SpringUtils .getBean("synchronizationService"); ChainService chainService = (ChainService) SpringUtils.getBean("chainService"); String jobDetailName = jobExecutionContext.getJobDetail().getName(); logger.warn("jobDetailName >> " + jobDetailName); Semaphore semaphore = chainService .findSemaphoreByJobServerPrefixAndJobDetailName(Env.JOB_SERVER_PREFIX, jobDetailName); if (semaphore == null) { this.pushVirtualAllocateOrderStatus(); this.pushTransactionStatus(); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.process; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.UserDataHolderBase; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.Nullable; import java.io.OutputStream; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public abstract class ProcessHandler extends UserDataHolderBase { private static final Logger LOG = Logger.getInstance("#com.intellij.execution.process.ProcessHandler"); /** * todo: replace with an overridable method [nik] * * @deprecated */ public static final Key<Boolean> SILENTLY_DESTROY_ON_CLOSE = Key.create("SILENTLY_DESTROY_ON_CLOSE"); private final List<ProcessListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private static final int STATE_INITIAL = 0; private static final int STATE_RUNNING = 1; private static final int STATE_TERMINATING = 2; private static final int STATE_TERMINATED = 3; private final AtomicInteger myState = new AtomicInteger(STATE_INITIAL); private final Semaphore myWaitSemaphore; private final ProcessListener myEventMulticaster; private final TasksRunner myAfterStartNotifiedRunner; protected ProcessHandler() { myEventMulticaster = createEventMulticaster(); myWaitSemaphore = new Semaphore(); myWaitSemaphore.down(); myAfterStartNotifiedRunner = new TasksRunner(); myListeners.add(myAfterStartNotifiedRunner); } public void startNotify() { if (myState.compareAndSet(STATE_INITIAL, STATE_RUNNING)) { myEventMulticaster.startNotified(new ProcessEvent(this)); } else { LOG.error("startNotify called already"); } } protected abstract void destroyProcessImpl(); protected abstract void detachProcessImpl(); public abstract boolean detachIsDefault(); /** * Wait for process execution. * * @return true if target process has actually ended; false if we stopped watching the process execution and don't know if it has completed. */ public boolean waitFor() { try { myWaitSemaphore.waitFor(); return true; } catch (ProcessCanceledException e) { return false; } } public boolean waitFor(long timeoutInMilliseconds) { try { return myWaitSemaphore.waitFor(timeoutInMilliseconds); } catch (ProcessCanceledException e) { return false; } } public void destroyProcess() { myAfterStartNotifiedRunner.execute(new Runnable() { @Override public void run() { if (myState.compareAndSet(STATE_RUNNING, STATE_TERMINATING)) { fireProcessWillTerminate(true); destroyProcessImpl(); } } }); } public void detachProcess() { myAfterStartNotifiedRunner.execute(new Runnable() { @Override public void run() { if (myState.compareAndSet(STATE_RUNNING, STATE_TERMINATING)) { fireProcessWillTerminate(false); detachProcessImpl(); } } }); } public boolean isProcessTerminated() { return myState.get() == STATE_TERMINATED; } public boolean isProcessTerminating() { return myState.get() == STATE_TERMINATING; } public void addProcessListener(final ProcessListener listener) { myListeners.add(listener); } public void removeProcessListener(final ProcessListener listener) { myListeners.remove(listener); } protected void notifyProcessDetached() { notifyTerminated(0, false); } protected void notifyProcessTerminated(final int exitCode) { notifyTerminated(exitCode, true); } private void notifyTerminated(final int exitCode, final boolean willBeDestroyed) { myAfterStartNotifiedRunner.execute(new Runnable() { @Override public void run() { LOG.assertTrue(isStartNotified(), "Start notify is not called"); if (myState.compareAndSet(STATE_RUNNING, STATE_TERMINATING)) { try { fireProcessWillTerminate(willBeDestroyed); } catch (Throwable e) { if (!isCanceledException(e)) { LOG.error(e); } } } if (myState.compareAndSet(STATE_TERMINATING, STATE_TERMINATED)) { try { myEventMulticaster.processTerminated(new ProcessEvent(ProcessHandler.this, exitCode)); } catch (Throwable e) { if (!isCanceledException(e)) { LOG.error(e); } } finally { myWaitSemaphore.up(); } } } }); } public void notifyTextAvailable(final String text, final Key outputType) { final ProcessEvent event = new ProcessEvent(this, text); myEventMulticaster.onTextAvailable(event, outputType); } @Nullable public abstract OutputStream getProcessInput(); private void fireProcessWillTerminate(final boolean willBeDestroyed) { LOG.assertTrue(isStartNotified(), "All events should be fired after startNotify is called"); myEventMulticaster.processWillTerminate(new ProcessEvent(this), willBeDestroyed); } public boolean isStartNotified() { return myState.get() > STATE_INITIAL; } public boolean isSilentlyDestroyOnClose() { return false; } private ProcessListener createEventMulticaster() { final Class<ProcessListener> listenerClass = ProcessListener.class; return (ProcessListener)Proxy.newProxyInstance(listenerClass.getClassLoader(), new Class[]{listenerClass}, new InvocationHandler() { @Override public Object invoke(Object object, Method method, Object[] params) throws Throwable { for (ProcessListener listener : myListeners) { try { method.invoke(listener, params); } catch (Throwable e) { if (!isCanceledException(e)) { LOG.error(e); } } } return null; } }); } private static boolean isCanceledException(Throwable e) { final boolean value = e instanceof InvocationTargetException && e.getCause() instanceof ProcessCanceledException; if (value) { LOG.info(e); } return value; } private final class TasksRunner extends ProcessAdapter { private final List<Runnable> myPendingTasks = new ArrayList<Runnable>(); @Override public void startNotified(ProcessEvent event) { removeProcessListener(this); // at this point it is guaranteed that nothing will be added to myPendingTasks runPendingTasks(); } public void execute(Runnable task) { if (isStartNotified()) { task.run(); } else { synchronized (myPendingTasks) { myPendingTasks.add(task); } if (isStartNotified()) { runPendingTasks(); } } } private void runPendingTasks() { final Runnable[] tasks; synchronized (myPendingTasks) { tasks = myPendingTasks.toArray(new Runnable[myPendingTasks.size()]); myPendingTasks.clear(); } for (Runnable task : tasks) { task.run(); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.llap.registry.impl; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.security.auth.login.AppConfigurationEntry; import com.google.common.collect.Sets; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; import org.apache.curator.framework.imps.CuratorFrameworkState; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode; import org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode.Mode; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.utils.CloseableUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.LlapUtil; import org.apache.hadoop.hive.llap.io.api.LlapProxy; import org.apache.hadoop.hive.llap.registry.ServiceInstance; import org.apache.hadoop.hive.llap.registry.ServiceInstanceSet; import org.apache.hadoop.hive.llap.registry.ServiceInstanceStateChangeListener; import org.apache.hadoop.hive.llap.registry.ServiceRegistry; import org.apache.hadoop.registry.client.binding.RegistryTypeUtils; import org.apache.hadoop.registry.client.binding.RegistryUtils; import org.apache.hadoop.registry.client.binding.RegistryUtils.ServiceRecordMarshal; import org.apache.hadoop.registry.client.types.AddressTypes; import org.apache.hadoop.registry.client.types.Endpoint; import org.apache.hadoop.registry.client.types.ProtocolTypes; import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.KeeperException.InvalidACLException; import org.apache.zookeeper.client.ZooKeeperSaslClient; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ThreadFactoryBuilder; public class LlapZookeeperRegistryImpl implements ServiceRegistry { private static final Logger LOG = LoggerFactory.getLogger(LlapZookeeperRegistryImpl.class); /** * IPC endpoint names. */ private static final String IPC_SERVICES = "services"; private static final String IPC_MNG = "llapmng"; private static final String IPC_SHUFFLE = "shuffle"; private static final String IPC_LLAP = "llap"; private static final String IPC_OUTPUTFORMAT = "llapoutputformat"; private final static String SASL_NAMESPACE = "llap-sasl"; private final static String UNSECURE_NAMESPACE = "llap-unsecure"; private final static String USER_SCOPE_PATH_PREFIX = "user-"; private static final String DISABLE_MESSAGE = "Set " + ConfVars.LLAP_VALIDATE_ACLS.varname + " to false to disable ACL validation"; private static final String WORKER_PREFIX = "worker-"; private static final String SLOT_PREFIX = "slot-"; private final Configuration conf; private final CuratorFramework zooKeeperClient; // userPathPrefix is the path specific to the user for which ACLs should be restrictive. // workersPath is the directory path where all the worker znodes are located. private final String userPathPrefix, workersPath; private String userNameFromPrincipal; // Only set when setting up the secure config for ZK. private PersistentEphemeralNode znode; private SlotZnode slotZnode; private String znodePath; // unique identity for this instance private final ServiceRecordMarshal encoder; // to marshal/unmarshal znode data // to be used by clients of ServiceRegistry private DynamicServiceInstanceSet instances; private PathChildrenCache instancesCache; private static final UUID uniq = UUID.randomUUID(); private static final String UNIQUE_IDENTIFIER = "llap.unique.id"; private Set<ServiceInstanceStateChangeListener> stateChangeListeners; private final Map<String, Set<ServiceInstance>> pathToInstanceCache; private final Map<String, Set<ServiceInstance>> nodeToInstanceCache; private final Lock instanceCacheLock = new ReentrantLock(); // get local hostname private static final String hostname; static { String localhost = "localhost"; try { localhost = InetAddress.getLocalHost().getCanonicalHostName(); } catch (UnknownHostException uhe) { // ignore } hostname = localhost; } public LlapZookeeperRegistryImpl(String instanceName, Configuration conf) { this.conf = new Configuration(conf); this.conf.addResource(YarnConfiguration.YARN_SITE_CONFIGURATION_FILE); String zkEnsemble = getQuorumServers(this.conf); this.encoder = new RegistryUtils.ServiceRecordMarshal(); int sessionTimeout = (int) HiveConf.getTimeVar(conf, ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT, TimeUnit.MILLISECONDS); int baseSleepTime = (int) HiveConf .getTimeVar(conf, ConfVars.HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME, TimeUnit.MILLISECONDS); int maxRetries = HiveConf.getIntVar(conf, ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES); // sample path: /llap-sasl/hiveuser/hostname/workers/worker-0000000 // worker-0000000 is the sequence number which will be retained until session timeout. If a // worker does not respond due to communication interruptions it will retain the same sequence // number when it returns back. If session timeout expires, the node will be deleted and new // addition of the same node (restart) will get next sequence number this.userPathPrefix = USER_SCOPE_PATH_PREFIX + getZkPathUser(this.conf); this.workersPath = "/" + userPathPrefix + "/" + instanceName + "/workers"; this.instancesCache = null; this.instances = null; this.stateChangeListeners = new HashSet<>(); this.pathToInstanceCache = new ConcurrentHashMap<>(); this.nodeToInstanceCache = new ConcurrentHashMap<>(); final boolean isSecure = UserGroupInformation.isSecurityEnabled(); ACLProvider zooKeeperAclProvider = new ACLProvider() { @Override public List<ACL> getDefaultAcl() { // We always return something from getAclForPath so this should not happen. LOG.warn("getDefaultAcl was called"); return Lists.newArrayList(ZooDefs.Ids.OPEN_ACL_UNSAFE); } @Override public List<ACL> getAclForPath(String path) { if (!isSecure || path == null || !path.contains(userPathPrefix)) { // No security or the path is below the user path - full access. return Lists.newArrayList(ZooDefs.Ids.OPEN_ACL_UNSAFE); } return createSecureAcls(); } }; String rootNs = HiveConf.getVar(conf, ConfVars.LLAP_ZK_REGISTRY_NAMESPACE); if (rootNs == null) { rootNs = isSecure ? SASL_NAMESPACE : UNSECURE_NAMESPACE; // The normal path. } // Create a CuratorFramework instance to be used as the ZooKeeper client // Use the zooKeeperAclProvider to create appropriate ACLs this.zooKeeperClient = CuratorFrameworkFactory.builder() .connectString(zkEnsemble) .sessionTimeoutMs(sessionTimeout) .aclProvider(zooKeeperAclProvider) .namespace(rootNs) .retryPolicy(new ExponentialBackoffRetry(baseSleepTime, maxRetries)) .build(); LOG.info("Llap Zookeeper Registry is enabled with registryid: " + instanceName); } private static List<ACL> createSecureAcls() { // Read all to the world List<ACL> nodeAcls = new ArrayList<ACL>(ZooDefs.Ids.READ_ACL_UNSAFE); // Create/Delete/Write/Admin to creator nodeAcls.addAll(ZooDefs.Ids.CREATOR_ALL_ACL); return nodeAcls; } /** * Get the ensemble server addresses from the configuration. The format is: host1:port, * host2:port.. * * @param conf **/ private String getQuorumServers(Configuration conf) { String[] hosts = conf.getTrimmedStrings(ConfVars.HIVE_ZOOKEEPER_QUORUM.varname); String port = conf.get(ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT.varname, ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT.getDefaultValue()); StringBuilder quorum = new StringBuilder(); for (int i = 0; i < hosts.length; i++) { quorum.append(hosts[i].trim()); if (!hosts[i].contains(":")) { // if the hostname doesn't contain a port, add the configured port to hostname quorum.append(":"); quorum.append(port); } if (i != hosts.length - 1) { quorum.append(","); } } return quorum.toString(); } private String getZkPathUser(Configuration conf) { // External LLAP clients would need to set LLAP_ZK_REGISTRY_USER to the LLAP daemon user (hive), // rather than relying on RegistryUtils.currentUser(). String user = HiveConf.getVar(conf, ConfVars.LLAP_ZK_REGISTRY_USER, RegistryUtils.currentUser()); return user; } public Endpoint getRpcEndpoint() { final int rpcPort = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_RPC_PORT); return RegistryTypeUtils.ipcEndpoint(IPC_LLAP, new InetSocketAddress(hostname, rpcPort)); } public Endpoint getShuffleEndpoint() { final int shufflePort = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_YARN_SHUFFLE_PORT); // HTTP today, but might not be return RegistryTypeUtils.inetAddrEndpoint(IPC_SHUFFLE, ProtocolTypes.PROTOCOL_TCP, hostname, shufflePort); } public Endpoint getServicesEndpoint() { final int servicePort = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_WEB_PORT); final boolean isSSL = HiveConf.getBoolVar(conf, ConfVars.LLAP_DAEMON_WEB_SSL); final String scheme = isSSL ? "https" : "http"; final URL serviceURL; try { serviceURL = new URL(scheme, hostname, servicePort, ""); return RegistryTypeUtils.webEndpoint(IPC_SERVICES, serviceURL.toURI()); } catch (MalformedURLException e) { throw new RuntimeException(e); } catch (URISyntaxException e) { throw new RuntimeException("llap service URI for " + hostname + " is invalid", e); } } public Endpoint getMngEndpoint() { return RegistryTypeUtils.ipcEndpoint(IPC_MNG, new InetSocketAddress(hostname, HiveConf.getIntVar(conf, ConfVars.LLAP_MANAGEMENT_RPC_PORT))); } public Endpoint getOutputFormatEndpoint() { return RegistryTypeUtils.ipcEndpoint(IPC_OUTPUTFORMAT, new InetSocketAddress(hostname, HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_OUTPUT_SERVICE_PORT))); } @Override public String register() throws IOException { ServiceRecord srv = new ServiceRecord(); Endpoint rpcEndpoint = getRpcEndpoint(); srv.addInternalEndpoint(rpcEndpoint); srv.addInternalEndpoint(getMngEndpoint()); srv.addInternalEndpoint(getShuffleEndpoint()); srv.addExternalEndpoint(getServicesEndpoint()); srv.addInternalEndpoint(getOutputFormatEndpoint()); for (Map.Entry<String, String> kv : this.conf) { if (kv.getKey().startsWith(HiveConf.PREFIX_LLAP) || kv.getKey().startsWith(HiveConf.PREFIX_HIVE_LLAP)) { // TODO: read this somewhere useful, like the task scheduler srv.set(kv.getKey(), kv.getValue()); } } // restart sensitive instance id srv.set(UNIQUE_IDENTIFIER, uniq.toString()); // Create a znode under the rootNamespace parent for this instance of the server try { // PersistentEphemeralNode will make sure the ephemeral node created on server will be present // even under connection or session interruption (will automatically handle retries) znode = new PersistentEphemeralNode(zooKeeperClient, Mode.EPHEMERAL_SEQUENTIAL, workersPath + "/" + WORKER_PREFIX, encoder.toBytes(srv)); // start the creation of znodes znode.start(); // We'll wait for 120s for node creation long znodeCreationTimeout = 120; if (!znode.waitForInitialCreate(znodeCreationTimeout, TimeUnit.SECONDS)) { throw new Exception( "Max znode creation wait time: " + znodeCreationTimeout + "s exhausted"); } znodePath = znode.getActualPath(); slotZnode = new SlotZnode( zooKeeperClient, workersPath, SLOT_PREFIX, WORKER_PREFIX, uniq.toString()); if (!slotZnode.start(znodeCreationTimeout, TimeUnit.SECONDS)) { throw new Exception( "Max znode creation wait time: " + znodeCreationTimeout + "s exhausted"); } if (HiveConf.getBoolVar(conf, ConfVars.LLAP_VALIDATE_ACLS)) { try { checkAndSetAcls(); } catch (Exception ex) { throw new IOException("Error validating or setting ACLs. " + DISABLE_MESSAGE, ex); } } if (zooKeeperClient.checkExists().forPath(znodePath) == null) { // No node exists, throw exception throw new Exception("Unable to create znode for this LLAP instance on ZooKeeper."); } LOG.info( "Registered node. Created a znode on ZooKeeper for LLAP instance: rpc: {}, shuffle: {}," + " webui: {}, mgmt: {}, znodePath: {} ", rpcEndpoint, getShuffleEndpoint(), getServicesEndpoint(), getMngEndpoint(), znodePath); } catch (Exception e) { LOG.error("Unable to create a znode for this server instance", e); CloseableUtils.closeQuietly(znode); CloseableUtils.closeQuietly(slotZnode); throw (e instanceof IOException) ? (IOException)e : new IOException(e); } if (LOG.isDebugEnabled()) { LOG.debug("Created zknode with path: {} service record: {}", znodePath, srv); } return uniq.toString(); } private void checkAndSetAcls() throws Exception { if (!UserGroupInformation.isSecurityEnabled()) return; // We are trying to check ACLs on the "workers" directory, which noone except us should be // able to write to. Higher-level directories shouldn't matter - we don't read them. String pathToCheck = workersPath; List<ACL> acls = zooKeeperClient.getACL().forPath(pathToCheck); if (acls == null || acls.isEmpty()) { // Can there be no ACLs? There's some access (to get ACLs), so assume it means free for all. LOG.warn("No ACLs on " + pathToCheck + "; setting up ACLs. " + DISABLE_MESSAGE); setUpAcls(pathToCheck); return; } // This could be brittle. assert userNameFromPrincipal != null; Id currentUser = new Id("sasl", userNameFromPrincipal); for (ACL acl : acls) { if ((acl.getPerms() & ~ZooDefs.Perms.READ) == 0 || currentUser.equals(acl.getId())) { continue; // Read permission/no permissions, or the expected user. } LOG.warn("The ACL " + acl + " is unnacceptable for " + pathToCheck + "; setting up ACLs. " + DISABLE_MESSAGE); setUpAcls(pathToCheck); return; } } private void setUpAcls(String path) throws Exception { List<ACL> acls = createSecureAcls(); LinkedList<String> paths = new LinkedList<>(); paths.add(path); while (!paths.isEmpty()) { String currentPath = paths.poll(); List<String> children = zooKeeperClient.getChildren().forPath(currentPath); if (children != null) { for (String child : children) { paths.add(currentPath + "/" + child); } } zooKeeperClient.setACL().withACL(acls).forPath(currentPath); } } @Override public void unregister() throws IOException { // Nothing for the zkCreate models } private class DynamicServiceInstance implements ServiceInstance { private final ServiceRecord srv; private final String host; private final int rpcPort; private final int mngPort; private final int shufflePort; private final int outputFormatPort; private final String serviceAddress; private final Resource resource; public DynamicServiceInstance(ServiceRecord srv) throws IOException { this.srv = srv; if (LOG.isTraceEnabled()) { LOG.trace("Working with ServiceRecord: {}", srv); } final Endpoint shuffle = srv.getInternalEndpoint(IPC_SHUFFLE); final Endpoint rpc = srv.getInternalEndpoint(IPC_LLAP); final Endpoint mng = srv.getInternalEndpoint(IPC_MNG); final Endpoint outputFormat = srv.getInternalEndpoint(IPC_OUTPUTFORMAT); final Endpoint services = srv.getExternalEndpoint(IPC_SERVICES); this.host = RegistryTypeUtils.getAddressField(rpc.addresses.get(0), AddressTypes.ADDRESS_HOSTNAME_FIELD); this.rpcPort = Integer.parseInt(RegistryTypeUtils.getAddressField(rpc.addresses.get(0), AddressTypes.ADDRESS_PORT_FIELD)); this.mngPort = Integer.parseInt(RegistryTypeUtils.getAddressField(mng.addresses.get(0), AddressTypes.ADDRESS_PORT_FIELD)); this.shufflePort = Integer.parseInt(RegistryTypeUtils.getAddressField(shuffle.addresses.get(0), AddressTypes.ADDRESS_PORT_FIELD)); this.outputFormatPort = Integer.valueOf(RegistryTypeUtils.getAddressField(outputFormat.addresses.get(0), AddressTypes.ADDRESS_PORT_FIELD)); this.serviceAddress = RegistryTypeUtils.getAddressField(services.addresses.get(0), AddressTypes.ADDRESS_URI); String memStr = srv.get(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, ""); String coreStr = srv.get(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname, ""); try { this.resource = Resource.newInstance(Integer.parseInt(memStr), Integer.parseInt(coreStr)); } catch (NumberFormatException ex) { throw new IOException("Invalid resource configuration for a LLAP node: memory " + memStr + ", vcores " + coreStr); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DynamicServiceInstance other = (DynamicServiceInstance) o; return this.getWorkerIdentity().equals(other.getWorkerIdentity()); } @Override public int hashCode() { return getWorkerIdentity().hashCode(); } @Override public String getWorkerIdentity() { return srv.get(UNIQUE_IDENTIFIER); } @Override public String getHost() { return host; } @Override public int getRpcPort() { return rpcPort; } @Override public int getShufflePort() { return shufflePort; } @Override public String getServicesAddress() { return serviceAddress; } @Override public Map<String, String> getProperties() { return srv.attributes(); } @Override public Resource getResource() { return resource; } @Override public String toString() { return "DynamicServiceInstance [id=" + getWorkerIdentity() + ", host=" + host + ":" + rpcPort + " with resources=" + getResource() + ", shufflePort=" + getShufflePort() + ", servicesAddress=" + getServicesAddress() + ", mgmtPort=" + getManagementPort() + "]"; } @Override public int getManagementPort() { return mngPort; } @Override public int getOutputFormatPort() { return outputFormatPort; } // TODO: This needs a hashCode/equality implementation if used as a key in various structures. // A new ServiceInstance is created each time. } private void addToCache(String path, String host, ServiceInstance instance) { instanceCacheLock.lock(); try { putInCache(path, pathToInstanceCache, instance); putInCache(host, nodeToInstanceCache, instance); } finally { instanceCacheLock.unlock(); } LOG.debug("Added path={}, host={} instance={} to cache." + " pathToInstanceCache:size={}, nodeToInstanceCache:size={}", path, host, instance, pathToInstanceCache.size(), nodeToInstanceCache.size()); } private void removeFromCache(String path, String host) { instanceCacheLock.lock(); try { pathToInstanceCache.remove(path); nodeToInstanceCache.remove(host); } finally { instanceCacheLock.unlock(); } LOG.debug("Removed path={}, host={} from cache." + " pathToInstanceCache:size={}, nodeToInstanceCache:size={}", path, host, pathToInstanceCache.size(), nodeToInstanceCache.size()); } private void putInCache(String key, Map<String, Set<ServiceInstance>> cache, ServiceInstance instance) { Set<ServiceInstance> instanceSet = cache.get(key); if (instanceSet == null) { instanceSet = Sets.newHashSet(); cache.put(key, instanceSet); } instanceSet.add(instance); } private class DynamicServiceInstanceSet implements ServiceInstanceSet { private final PathChildrenCache instancesCache; public DynamicServiceInstanceSet(final PathChildrenCache cache) { this.instancesCache = cache; populateCache(); } private void populateCache() { for (ChildData childData : instancesCache.getCurrentData()) { byte[] data = getWorkerData(childData); if (data == null) continue; try { ServiceRecord srv = encoder.fromBytes(childData.getPath(), data); ServiceInstance instance = new DynamicServiceInstance(srv); addToCache(childData.getPath(), instance.getHost(), instance); } catch (IOException e) { LOG.error("Unable to decode data for zkpath: {}." + " Ignoring from current instances list..", childData.getPath()); } } } @Override public Collection<ServiceInstance> getAll() { Set<ServiceInstance> instances = new HashSet<>(); for(Set<ServiceInstance> instanceSet : pathToInstanceCache.values()) { instances.addAll(instanceSet); } return instances; } public ApplicationId getApplicationId() { for (ChildData childData : instancesCache.getCurrentData()) { byte[] data = getWorkerData(childData); if (data == null) continue; ServiceRecord sr = null; try { sr = encoder.fromBytes(childData.getPath(), data); } catch (IOException e) { LOG.error("Unable to decode data for zkpath: {}." + " Ignoring from current instances list..", childData.getPath()); continue; } String containerStr = sr.get(HiveConf.ConfVars.LLAP_DAEMON_CONTAINER_ID.varname); if (containerStr == null || containerStr.isEmpty()) continue; return ContainerId.fromString(containerStr).getApplicationAttemptId().getApplicationId(); } return null; } private byte[] getWorkerData(ChildData childData) { if (childData == null) return null; byte[] data = childData.getData(); if (data == null) return null; if (!extractNodeName(childData).startsWith(WORKER_PREFIX)) return null; return data; } @Override public Collection<ServiceInstance> getAllInstancesOrdered(boolean consistentIndexes) { Map<String, Long> slotByWorker = new HashMap<String, Long>(); Set<ServiceInstance> unsorted = Sets.newHashSet(); for (ChildData childData : instancesCache.getCurrentData()) { if (childData == null) continue; byte[] data = childData.getData(); if (data == null) continue; String nodeName = extractNodeName(childData); if (nodeName.startsWith(WORKER_PREFIX)) { Set<ServiceInstance> instances = pathToInstanceCache.get(childData.getPath()); if (instances != null) { unsorted.addAll(instances); } } else if (nodeName.startsWith(SLOT_PREFIX)) { slotByWorker.put(extractWorkerIdFromSlot(childData), Long.parseLong(nodeName.substring(SLOT_PREFIX.length()))); } else { LOG.info("Ignoring unknown node {}", childData.getPath()); } } TreeMap<Long, ServiceInstance> sorted = new TreeMap<>(); long maxSlot = Long.MIN_VALUE; for (ServiceInstance worker : unsorted) { Long slot = slotByWorker.get(worker.getWorkerIdentity()); if (slot == null) { LOG.info("Unknown slot for {}", worker.getWorkerIdentity()); continue; } maxSlot = Math.max(maxSlot, slot); sorted.put(slot, worker); } if (consistentIndexes) { // Add dummy instances to all slots where LLAPs are MIA... I can haz insert_iterator? TreeMap<Long, ServiceInstance> dummies = new TreeMap<>(); Iterator<Long> keyIter = sorted.keySet().iterator(); long expected = 0; Long ts = null; while (keyIter.hasNext()) { Long slot = keyIter.next(); assert slot >= expected; while (slot > expected) { if (ts == null) { ts = System.nanoTime(); // Inactive nodes restart every call! } dummies.put(expected, new InactiveServiceInstance("inactive-" + expected + "-" + ts)); ++expected; } ++expected; } sorted.putAll(dummies); } return sorted.values(); } @Override public ServiceInstance getInstance(String name) { Collection<ServiceInstance> instances = getAll(); for(ServiceInstance instance : instances) { if (instance.getWorkerIdentity().equals(name)) { return instance; } } return null; } @Override public Set<ServiceInstance> getByHost(String host) { Set<ServiceInstance> byHost = nodeToInstanceCache.get(host); byHost = (byHost == null) ? Sets.<ServiceInstance>newHashSet() : byHost; if (LOG.isDebugEnabled()) { LOG.debug("Returning " + byHost.size() + " hosts for locality allocation on " + host); } return byHost; } @Override public int size() { // not using the path child cache here as there could be more than 1 path per host (worker and slot znodes) return nodeToInstanceCache.size(); } } // TODO: make class static? fields leak private class InstanceStateChangeListener implements PathChildrenCacheListener { private final Logger LOG = LoggerFactory.getLogger(InstanceStateChangeListener.class); @Override public void childEvent(final CuratorFramework client, final PathChildrenCacheEvent event) throws Exception { Preconditions.checkArgument(client != null && client.getState() == CuratorFrameworkState.STARTED, "client is not started"); synchronized (this) { ChildData childData = event.getData(); if (childData == null) return; String nodeName = extractNodeName(childData); if (!nodeName.startsWith(WORKER_PREFIX)) return; // No need to propagate slot updates. LOG.info("{} for zknode {} in llap namespace", event.getType(), childData.getPath()); ServiceInstance instance = extractServiceInstance(event, childData); switch (event.getType()) { case CHILD_ADDED: addToCache(childData.getPath(), instance.getHost(), instance); for (ServiceInstanceStateChangeListener listener : stateChangeListeners) { listener.onCreate(instance); } break; case CHILD_UPDATED: addToCache(childData.getPath(), instance.getHost(), instance); for (ServiceInstanceStateChangeListener listener : stateChangeListeners) { listener.onUpdate(instance); } break; case CHILD_REMOVED: removeFromCache(childData.getPath(), instance.getHost()); for (ServiceInstanceStateChangeListener listener : stateChangeListeners) { listener.onRemove(instance); } break; default: // Ignore all the other events; logged above. } } } } private static String extractWorkerIdFromSlot(ChildData childData) { return new String(childData.getData(), SlotZnode.CHARSET); } private static String extractNodeName(ChildData childData) { String nodeName = childData.getPath(); int ix = nodeName.lastIndexOf("/"); if (ix >= 0) { nodeName = nodeName.substring(ix + 1); } return nodeName; } private ServiceInstance extractServiceInstance( PathChildrenCacheEvent event, ChildData childData) { byte[] data = childData.getData(); if (data == null) return null; try { ServiceRecord srv = encoder.fromBytes(event.getData().getPath(), data); return new DynamicServiceInstance(srv); } catch (IOException e) { LOG.error("Unable to decode data for zknode: {}." + " Dropping notification of type: {}", childData.getPath(), event.getType()); return null; } } @Override public ServiceInstanceSet getInstances( String component, long clusterReadyTimeoutMs) throws IOException { checkPathChildrenCache(clusterReadyTimeoutMs); // lazily create instances if (instances == null) { this.instances = new DynamicServiceInstanceSet(instancesCache); } return instances; } @Override public ApplicationId getApplicationId() throws IOException { getInstances("LLAP", 0); return instances.getApplicationId(); } @Override public synchronized void registerStateChangeListener( final ServiceInstanceStateChangeListener listener) throws IOException { checkPathChildrenCache(0); this.stateChangeListeners.add(listener); } private synchronized void checkPathChildrenCache(long clusterReadyTimeoutMs) throws IOException { Preconditions.checkArgument(zooKeeperClient != null && zooKeeperClient.getState() == CuratorFrameworkState.STARTED, "client is not started"); // lazily create PathChildrenCache if (instancesCache != null) return; ExecutorService tp = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder() .setDaemon(true).setNameFormat("StateChangeNotificationHandler").build()); long startTimeNs = System.nanoTime(), deltaNs = clusterReadyTimeoutMs * 1000000L; long sleepTimeMs = Math.min(16, clusterReadyTimeoutMs); while (true) { PathChildrenCache instancesCache = new PathChildrenCache(zooKeeperClient, workersPath, true); instancesCache.getListenable().addListener(new InstanceStateChangeListener(), tp); try { instancesCache.start(PathChildrenCache.StartMode.BUILD_INITIAL_CACHE); this.instancesCache = instancesCache; break; } catch (InvalidACLException e) { // PathChildrenCache tried to mkdir when the znode wasn't there, and failed. CloseableUtils.closeQuietly(instancesCache); long elapsedNs = System.nanoTime() - startTimeNs; if (deltaNs == 0 || deltaNs <= elapsedNs) { LOG.error("Unable to start curator PathChildrenCache", e); throw new IOException(e); } LOG.warn("The cluster is not started yet (InvalidACL); will retry"); try { Thread.sleep(Math.min(sleepTimeMs, (deltaNs - elapsedNs)/1000000L)); } catch (InterruptedException e1) { LOG.error("Interrupted while retrying the PathChildrenCache startup"); throw new IOException(e1); } sleepTimeMs = sleepTimeMs << 1; } catch (Exception e) { CloseableUtils.closeQuietly(instancesCache); LOG.error("Unable to start curator PathChildrenCache", e); throw new IOException(e); } } } @Override public void start() throws IOException { if (zooKeeperClient != null) { setupZookeeperAuth(this.conf); zooKeeperClient.start(); } // Init closeable utils in case register is not called (see HIVE-13322) CloseableUtils.class.getName(); } @Override public void stop() throws IOException { CloseableUtils.closeQuietly(znode); CloseableUtils.closeQuietly(slotZnode); CloseableUtils.closeQuietly(instancesCache); CloseableUtils.closeQuietly(zooKeeperClient); } private void setupZookeeperAuth(final Configuration conf) throws IOException { if (UserGroupInformation.isSecurityEnabled() && LlapProxy.isDaemon()) { LOG.info("UGI security is enabled. Setting up ZK auth."); String llapPrincipal = HiveConf.getVar(conf, ConfVars.LLAP_KERBEROS_PRINCIPAL); if (llapPrincipal == null || llapPrincipal.isEmpty()) { throw new IOException("Llap Kerberos principal is empty"); } String llapKeytab = HiveConf.getVar(conf, ConfVars.LLAP_KERBEROS_KEYTAB_FILE); if (llapKeytab == null || llapKeytab.isEmpty()) { throw new IOException("Llap Kerberos keytab is empty"); } // Install the JAAS Configuration for the runtime setZookeeperClientKerberosJaasConfig(llapPrincipal, llapKeytab); } else { LOG.info("UGI security is not enabled, or non-daemon environment. Skipping setting up ZK auth."); } } /** * Dynamically sets up the JAAS configuration that uses kerberos * * @param principal * @param keyTabFile * @throws IOException */ private void setZookeeperClientKerberosJaasConfig(String principal, String keyTabFile) throws IOException { // ZooKeeper property name to pick the correct JAAS conf section final String SASL_LOGIN_CONTEXT_NAME = "LlapZooKeeperClient"; System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY, SASL_LOGIN_CONTEXT_NAME); principal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0"); userNameFromPrincipal = LlapUtil.getUserNameFromPrincipal(principal); JaasConfiguration jaasConf = new JaasConfiguration(SASL_LOGIN_CONTEXT_NAME, principal, keyTabFile); // Install the Configuration in the runtime. javax.security.auth.login.Configuration.setConfiguration(jaasConf); } /** * A JAAS configuration for ZooKeeper clients intended to use for SASL * Kerberos. */ private static class JaasConfiguration extends javax.security.auth.login.Configuration { // Current installed Configuration private final javax.security.auth.login.Configuration baseConfig = javax.security.auth.login.Configuration .getConfiguration(); private final String loginContextName; private final String principal; private final String keyTabFile; public JaasConfiguration(String llapLoginContextName, String principal, String keyTabFile) { this.loginContextName = llapLoginContextName; this.principal = principal; this.keyTabFile = keyTabFile; } @Override public AppConfigurationEntry[] getAppConfigurationEntry(String appName) { if (loginContextName.equals(appName)) { Map<String, String> krbOptions = new HashMap<String, String>(); krbOptions.put("doNotPrompt", "true"); krbOptions.put("storeKey", "true"); krbOptions.put("useKeyTab", "true"); krbOptions.put("principal", principal); krbOptions.put("keyTab", keyTabFile); krbOptions.put("refreshKrb5Config", "true"); AppConfigurationEntry llapZooKeeperClientEntry = new AppConfigurationEntry( KerberosUtil.getKrb5LoginModuleName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, krbOptions); return new AppConfigurationEntry[]{llapZooKeeperClientEntry}; } // Try the base config if (baseConfig != null) { return baseConfig.getAppConfigurationEntry(appName); } return null; } } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials.svn; import com.thoughtworks.go.config.PasswordEncrypter; import com.thoughtworks.go.config.SecretParams; import com.thoughtworks.go.config.materials.PasswordAwareMaterial; import com.thoughtworks.go.config.materials.ScmMaterial; import com.thoughtworks.go.config.materials.ScmMaterialConfig; import com.thoughtworks.go.config.materials.SubprocessExecutionContext; import com.thoughtworks.go.domain.MaterialInstance; import com.thoughtworks.go.domain.materials.*; import com.thoughtworks.go.domain.materials.svn.*; import com.thoughtworks.go.security.CryptoException; import com.thoughtworks.go.security.GoCipher; import com.thoughtworks.go.util.GoConstants; import com.thoughtworks.go.util.command.ConsoleOutputStreamConsumer; import com.thoughtworks.go.util.command.UrlArgument; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.PostConstruct; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import static com.thoughtworks.go.util.ExceptionUtils.bomb; import static com.thoughtworks.go.util.ExceptionUtils.bombIfNull; import static com.thoughtworks.go.util.FileUtil.createParentFolderIfNotExist; import static java.lang.String.format; /** * @understands configuration for subversion */ public class SvnMaterial extends ScmMaterial implements PasswordEncrypter, PasswordAwareMaterial { private static final Logger LOGGER = LoggerFactory.getLogger(SvnMaterial.class); private UrlArgument url; private String userName; private String password; private String encryptedPassword; private boolean checkExternals; private transient Subversion svnLazyLoaded; private final GoCipher goCipher; public static final String TYPE = "SvnMaterial"; private SecretParams secretParamsForPassword; private SvnMaterial(GoCipher goCipher) { super("SvnMaterial"); this.goCipher = goCipher; } public SvnMaterial(String url, String userName, String password, boolean checkExternals) { this(url, userName, password, checkExternals, new GoCipher()); } public SvnMaterial(Subversion svn) { this(svn.getUrl().originalArgument(), svn.getUserName(), svn.getPassword(), svn.isCheckExternals()); this.svnLazyLoaded = svn; } public SvnMaterial(String url, String userName, String password, boolean checkExternals, String folder) { this(url, userName, password, checkExternals); this.folder = folder; } public SvnMaterial(SvnMaterialConfig config) { this(config.getUrl(), config.getUserName(), config.getPassword(), config.isCheckExternals(), config.getGoCipher()); this.autoUpdate = config.getAutoUpdate(); this.filter = config.rawFilter(); this.invertFilter = config.getInvertFilter(); this.folder = config.getFolder(); this.name = config.getName(); } public SvnMaterial(String url, String userName, String password, boolean checkExternals, GoCipher goCipher) { super("SvnMaterial"); this.goCipher = goCipher; bombIfNull(url, "null url"); setUrl(url); this.userName = userName; setPassword(password); this.checkExternals = checkExternals; } @Override public MaterialConfig config() { return new SvnMaterialConfig(url, userName, getPassword(), checkExternals, goCipher, autoUpdate, filter, invertFilter, folder, name); } private Subversion svn() { if (svnLazyLoaded == null || !svnLazyLoaded.getUrl().equals(url)) { svnLazyLoaded = new SvnCommand(getFingerprint(), url.forCommandLine(), userName, passwordForCommandLine(), checkExternals); } return svnLazyLoaded; } public List<Modification> latestModification(File baseDir, final SubprocessExecutionContext execCtx) { return svn().latestModification(); } public List<Modification> modificationsSince(File workingDirectory, Revision revision, final SubprocessExecutionContext execCtx) { return svn().modificationsSince(new SubversionRevision(revision.getRevision())); } public MaterialInstance createMaterialInstance() { return new SvnMaterialInstance(url.originalArgument(), userName, UUID.randomUUID().toString(), checkExternals); } @Override protected void appendCriteria(Map parameters) { parameters.put(ScmMaterialConfig.URL, url.originalArgument()); parameters.put(ScmMaterialConfig.USERNAME, userName); parameters.put("checkExternals", checkExternals); } @Override protected void appendAttributes(Map parameters) { parameters.put(ScmMaterialConfig.URL, url); parameters.put(ScmMaterialConfig.USERNAME, userName); parameters.put("checkExternals", checkExternals); } public void updateTo(ConsoleOutputStreamConsumer outputStreamConsumer, File baseDir, RevisionContext revisionContext, final SubprocessExecutionContext execCtx) { Revision revision = revisionContext.getLatestRevision(); File workingDir = execCtx.isServer() ? baseDir : workingdir(baseDir); LOGGER.debug("Updating to revision: {} in workingdirectory {}", revision, workingDir); outputStreamConsumer.stdOutput(format("[%s] Start updating %s at revision %s from %s", GoConstants.PRODUCT_NAME, updatingTarget(), revision.getRevision(), url)); boolean shouldDoFreshCheckout = !workingDir.isDirectory() || isRepositoryChanged(workingDir); if (shouldDoFreshCheckout) { freshCheckout(outputStreamConsumer, new SubversionRevision(revision), workingDir); } else { cleanupAndUpdate(outputStreamConsumer, new SubversionRevision(revision), workingDir); } LOGGER.debug("done with update"); outputStreamConsumer.stdOutput(format("[%s] Done.\n", GoConstants.PRODUCT_NAME)); } public boolean isRepositoryChanged(File workingFolder) { try { File file = new File(workingFolder, ".svn"); if (workingFolder.isDirectory() && file.exists() && file.isDirectory()) { String workingUrl = svn().workingRepositoryUrl(workingFolder); return !MaterialUrl.sameUrl(url.toString(), workingUrl); } else { return true; } } catch (IOException e) { return true; } } public void freshCheckout(ConsoleOutputStreamConsumer outputStreamConsumer, SubversionRevision revision, File workingFolder) { if (workingFolder.isDirectory()) { FileUtils.deleteQuietly(workingFolder); } LOGGER.trace("Checking out to revision {} in {}", revision, workingFolder); createParentFolderIfNotExist(workingFolder); svn().checkoutTo(outputStreamConsumer, workingFolder, revision); } public void cleanupAndUpdate(ConsoleOutputStreamConsumer outputStreamConsumer, SubversionRevision revision, File workingFolder) { try { svn().cleanupAndRevert(outputStreamConsumer, workingFolder); } catch (Exception e) { String message = "Failed to do cleanup and revert in " + workingFolder.getAbsolutePath(); LOGGER.error(message); LOGGER.debug(message, e); } LOGGER.trace("Updating to revision {} on {}", revision, workingFolder); svn().updateTo(outputStreamConsumer, workingFolder, revision); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } SvnMaterial that = (SvnMaterial) o; if (checkExternals != that.checkExternals) { return false; } if (url != null ? !url.equals(that.url) : that.url != null) { return false; } if (userName != null ? !userName.equals(that.userName) : that.userName != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (url != null ? url.hashCode() : 0); result = 31 * result + (userName != null ? userName.hashCode() : 0); result = 31 * result + (checkExternals ? 1 : 0); return result; } protected String getLocation() { return url == null ? null : url.forDisplay(); } public String getTypeForDisplay() { return "Subversion"; } @Override public Map<String, Object> getAttributes(boolean addSecureFields) { Map<String, Object> materialMap = new HashMap<>(); materialMap.put("type", "svn"); Map<String, Object> configurationMap = new HashMap<>(); if (addSecureFields) { configurationMap.put("url", url.forCommandLine()); configurationMap.put("password", getPassword()); } else { configurationMap.put("url", url.forDisplay()); } configurationMap.put("username", userName); configurationMap.put("check-externals", checkExternals); materialMap.put("svn-configuration", configurationMap); return materialMap; } public Class getInstanceType() { return SvnMaterialInstance.class; } public ValidationBean checkConnection(final SubprocessExecutionContext execCtx) { return svn().checkConnection(); } @Override public String getUrl() { return url == null ? null : url.originalArgument(); } @Override public String urlForCommandLine() { return url.forCommandLine(); } @Override public UrlArgument getUrlArgument() { return url; } public String getLongDescription() { return String.format("URL: %s, Username: %s, CheckExternals: %s", url.forDisplay(), userName, checkExternals); } public String getUserName() { return userName; } public void setUrl(String url) { this.url = new UrlArgument(url); } public void setPassword(String password) { resetPassword(password); } public boolean isCheckExternals() { return checkExternals; } private String folderFor(String folderForExternal) { return getFolder() == null ? folderForExternal : getFolder() + "/" + folderForExternal; } public void add(ConsoleOutputStreamConsumer outputStreamConsumer, File file) { svn().add(outputStreamConsumer, file); } public void commit(ConsoleOutputStreamConsumer outputStreamConsumer, File workingDir, String message) { svn().commit(outputStreamConsumer, workingDir, message); } @Override public boolean matches(String name, String regex) { if (!regex.startsWith("/")) { regex = "/" + regex; } return name.matches(regex); } @Override public String toString() { return "SvnMaterial{" + "url=" + url + ", userName='" + userName + '\'' + ", checkExternals=" + checkExternals + '}'; } /** * @deprecated used only in tests - we need to disentangle this */ public static SvnMaterial createSvnMaterialWithMock(Subversion svn) { return new SvnMaterial(svn); } private void resetPassword(String passwordToSet) { if (StringUtils.isBlank(passwordToSet)) { encryptedPassword = null; } setPasswordIfNotBlank(passwordToSet); } private void setPasswordIfNotBlank(String password) { this.password = StringUtils.stripToNull(password); this.secretParamsForPassword = SecretParams.parse(password); this.encryptedPassword = StringUtils.stripToNull(encryptedPassword); if (this.password == null) { return; } try { this.encryptedPassword = this.goCipher.encrypt(password); } catch (Exception e) { bomb("Password encryption failed. Please verify your cipher key.", e); } this.password = null; } public String getEncryptedPassword() { return encryptedPassword; } @PostConstruct public void ensureEncrypted() { this.userName = StringUtils.stripToNull(this.userName); setPasswordIfNotBlank(password); if (encryptedPassword != null) { setEncryptedPassword(goCipher.maybeReEncryptForPostConstructWithoutExceptions(encryptedPassword)); } } private void setEncryptedPassword(String encryptedPassword) { this.encryptedPassword = encryptedPassword; } @Override public String getPassword() { try { return StringUtils.isBlank(encryptedPassword) ? null : this.goCipher.decrypt(encryptedPassword); } catch (CryptoException e) { throw new RuntimeException("Could not decrypt the password to get the real password", e); } } @Override public String passwordForCommandLine() { return secretParamsForPassword.isEmpty() ? getPassword() : secretParamsForPassword.substitute(getPassword()); } @Override public boolean hasSecretParams() { return (this.url != null && this.url.hasSecretParams()) || (this.secretParamsForPassword != null && !this.secretParamsForPassword.isEmpty()); } @Override public SecretParams getSecretParams() { return SecretParams.union(url.getSecretParams(), secretParamsForPassword); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.rest.compatibility; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.runtime.rest.util.DocumentingDispatcherRestEndpoint; import org.apache.flink.runtime.rest.util.DocumentingRestEndpoint; import org.apache.flink.runtime.rest.versioning.RestAPIVersion; import org.apache.flink.util.ConfigurationException; import org.apache.flink.util.TestLogger; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.util.DefaultIndenter; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; /** Stability test and snapshot generator for the REST API. */ @RunWith(Parameterized.class) public final class RestAPIStabilityTest extends TestLogger { private static final String REGENERATE_SNAPSHOT_PROPERTY = "generate-rest-snapshot"; private static final String SNAPSHOT_RESOURCE_PATTERN = "rest_api_%s.snapshot"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Parameterized.Parameters(name = "version = {0}") public static Iterable<RestAPIVersion> getStableVersions() { return Arrays.stream(RestAPIVersion.values()) .filter(RestAPIVersion::isStableVersion) .collect(Collectors.toList()); } private final RestAPIVersion apiVersion; public RestAPIStabilityTest(final RestAPIVersion apiVersion) { this.apiVersion = apiVersion; } @Test public void testDispatcherRestAPIStability() throws IOException, ConfigurationException { final String versionedSnapshotFileName = String.format(SNAPSHOT_RESOURCE_PATTERN, apiVersion.getURLVersionPrefix()); final RestAPISnapshot currentSnapshot = createSnapshot(new DocumentingDispatcherRestEndpoint()); if (System.getProperty(REGENERATE_SNAPSHOT_PROPERTY) != null) { writeSnapshot(versionedSnapshotFileName, currentSnapshot); } final URL resource = RestAPIStabilityTest.class.getClassLoader().getResource(versionedSnapshotFileName); if (resource == null) { Assert.fail( "Snapshot file does not exist. If you added a new version, re-run this test with" + " -D" + REGENERATE_SNAPSHOT_PROPERTY + " being set."); } final RestAPISnapshot previousSnapshot = OBJECT_MAPPER.readValue(resource, RestAPISnapshot.class); assertCompatible(previousSnapshot, currentSnapshot); } private static void writeSnapshot( final String versionedSnapshotFileName, final RestAPISnapshot snapshot) throws IOException { OBJECT_MAPPER .writer( new DefaultPrettyPrinter() .withObjectIndenter(new DefaultIndenter().withLinefeed("\n"))) .writeValue(new File("src/test/resources/" + versionedSnapshotFileName), snapshot); System.out.println( "REST API snapshot " + versionedSnapshotFileName + " was updated, please remember to commit the snapshot."); } private RestAPISnapshot createSnapshot(final DocumentingRestEndpoint restEndpoint) { final List<JsonNode> calls = restEndpoint.getSpecs().stream() // we only compare compatibility within the given version .filter(spec -> spec.getSupportedAPIVersions().contains(apiVersion)) .map( spec -> { final ObjectNode json = OBJECT_MAPPER.createObjectNode(); for (final CompatibilityRoutine<?> routine : CompatibilityRoutines.ROUTINES) { final Object extract = routine.getContainer(spec); json.set( routine.getKey(), OBJECT_MAPPER.valueToTree(extract)); } return json; }) .collect(Collectors.toList()); return new RestAPISnapshot(calls); } private static void assertCompatible(final RestAPISnapshot old, final RestAPISnapshot cur) { for (final JsonNode oldCall : old.calls) { final List<Tuple2<JsonNode, CompatibilityCheckResult>> compatibilityCheckResults = cur.calls.stream() .map( curCall -> Tuple2.of( curCall, checkCompatibility(oldCall, curCall))) .collect(Collectors.toList()); if (compatibilityCheckResults.stream() .allMatch( result -> result.f1.getBackwardCompatibility() == Compatibility.INCOMPATIBLE)) { fail(oldCall, compatibilityCheckResults); } if (compatibilityCheckResults.stream() .noneMatch( result -> result.f1.getBackwardCompatibility() == Compatibility.IDENTICAL)) { Assert.fail( "The API was modified in a compatible way, but the snapshot was not updated. " + "To update the snapshot, re-run this test with -D" + REGENERATE_SNAPSHOT_PROPERTY + " being set. If you see this message in a CI pipeline, rerun the test locally and commit the generated changes."); } } // check for entirely new calls, for which the snapshot should be updated for (final JsonNode curCall : cur.calls) { final List<Tuple2<JsonNode, CompatibilityCheckResult>> compatibilityCheckResults = old.calls.stream() .map( oldCall -> Tuple2.of( curCall, checkCompatibility(oldCall, curCall))) .collect(Collectors.toList()); if (compatibilityCheckResults.stream() .noneMatch( result -> result.f1.getBackwardCompatibility() == Compatibility.IDENTICAL)) { Assert.fail( "The API was modified in a compatible way, but the snapshot was not updated. " + "To update the snapshot, re-run this test with -D" + REGENERATE_SNAPSHOT_PROPERTY + " being set."); } } } private static void fail( final JsonNode oldCall, final List<Tuple2<JsonNode, CompatibilityCheckResult>> compatibilityCheckResults) { final StringBuilder sb = new StringBuilder(); sb.append("No compatible call could be found for " + oldCall + '.'); compatibilityCheckResults.stream() .sorted( Collections.reverseOrder( Comparator.comparingInt( tuple -> tuple.f1.getBackwardCompatibilityGrade()))) .forEachOrdered( result -> { sb.append(System.lineSeparator()); sb.append("\tRejected by candidate: " + result.f0 + '.'); sb.append(System.lineSeparator()); sb.append( "\tCompatibility grade: " + result.f1.getBackwardCompatibilityGrade() + '/' + CompatibilityRoutines.ROUTINES.size()); sb.append(System.lineSeparator()); sb.append("\tIncompatibilities: "); for (AssertionError error : result.f1.getBackwardCompatibilityErrors()) { sb.append(System.lineSeparator()); sb.append("\t\t" + error.getMessage()); } }); Assert.fail(sb.toString()); } private static CompatibilityCheckResult checkCompatibility( final JsonNode oldCall, final JsonNode newCall) { return CompatibilityRoutines.ROUTINES.stream() .map(routine -> checkCompatibility(routine, oldCall, newCall)) .reduce(CompatibilityCheckResult::merge) .get(); } private static <X> CompatibilityCheckResult checkCompatibility( final CompatibilityRoutine<X> routine, final JsonNode oldCall, final JsonNode curCall) { final Optional<X> old = readJson(routine, oldCall); final Optional<X> cur = readJson(routine, curCall); return routine.checkCompatibility(old, cur); } private static <X> Optional<X> readJson( final CompatibilityRoutine<X> routine, final JsonNode call) { final Optional<JsonNode> jsonContainer = Optional.ofNullable(call.get(routine.getKey())); return jsonContainer.map(container -> jsonToObject(container, routine.getContainerClass())); } private static <X> X jsonToObject(final JsonNode jsonContainer, final Class<X> containerClass) { try { return OBJECT_MAPPER.treeToValue(jsonContainer, containerClass); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } static final class RestAPISnapshot { public List<JsonNode> calls; private RestAPISnapshot() { // required by jackson } RestAPISnapshot(List<JsonNode> calls) { this.calls = calls; } } }
package id.bizdir.ui.fragment; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.internal.view.ContextThemeWrapper; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.EditText; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import com.afollestad.materialdialogs.MaterialDialog; import java.io.IOException; import java.util.ArrayList; import java.util.List; import id.bizdir.R; import id.bizdir.modelhelper.NewsBusinessCategoryHelper; import id.bizdir.modelhelper.NewsBusinessHelper; import id.bizdir.model.NewsBusiness; import id.bizdir.model.NewsBusinessCategory; import id.bizdir.model.ResultObject; import id.bizdir.model.ResultObjectHelper; import id.bizdir.service.AllSync; import id.bizdir.ui.activity.NewsBusinessDetailActivity; import id.bizdir.ui.adapter.NewsBusinessAdapter; import id.bizdir.util.Const; import id.bizdir.util.Helpers; /** * Created by Hendry on 02/05/2015. */ public class NewsBusinessFragment extends Fragment { private List<NewsBusiness> newsBusinessList; private ListView listNewsBusiness; private int selectedIndex; private TextView textNoData; private EditText editNewsCategory; private List<NewsBusinessCategory> newsBusinessCategoryList; private SwipeRefreshLayout mSwipeRefreshLayout; private NewsBusinessAdapter newsBusinessAdapter; private CharSequence[] charSequenceArray; public NewsBusinessFragment newInstance() { NewsBusinessFragment fragment = new NewsBusinessFragment(); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { if (savedInstanceState != null) { selectedIndex = savedInstanceState.getInt("selectedIndex"); } else { selectedIndex = 0; } final Context contextThemeWrapper = new ContextThemeWrapper(getActivity(), R.style.MaterialDrawerTheme_BizDir); LayoutInflater localInflater = inflater.cloneInContext(contextThemeWrapper); View view = localInflater.inflate(R.layout.fragment_news_business, container, false); View headerLayout = localInflater.inflate(R.layout.advertising_view, null); ImageView imageButtonAds = (ImageView) headerLayout.findViewById(R.id.imageButtonAds); ImageView imageAds = (ImageView) headerLayout.findViewById(R.id.image); listNewsBusiness = (ListView) view.findViewById(R.id.listNewsBusiness); listNewsBusiness.setOnItemClickListener(setOnItemClickListener); listNewsBusiness.addHeaderView(headerLayout); textNoData = (TextView) view.findViewById(R.id.textNoData); editNewsCategory = (EditText) view.findViewById(R.id.editNewsCategory); editNewsCategory.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showNewsCategory(); } }); editNewsCategory.setOnFocusChangeListener(new View.OnFocusChangeListener() { public void onFocusChange(View v, boolean hasFocus) { if (hasFocus) { showNewsCategory(); } } }); mSwipeRefreshLayout = (SwipeRefreshLayout) view.findViewById(R.id.activity_main_swipe_refresh_layout); mSwipeRefreshLayout.setColorSchemeResources(R.color.orange, R.color.green, R.color.blue, R.color.red); mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { new Handler().post(new Runnable() { @Override public void run() { refreshData(); } }); } }); getData(); Helpers.getLocalAds(getActivity(), imageAds, imageButtonAds, Const.ADS_ZONE_ID_NEWS_ANTARA); Helpers.getRemoteAds(getActivity(), imageAds, imageButtonAds, Const.ADS_ZONE_ID_NEWS_ANTARA); return view; } AdapterView.OnItemClickListener setOnItemClickListener = new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if (position > 0) { NewsBusiness newsBusiness = newsBusinessList.get(position - 1); if (newsBusiness != null) { Intent intent = new Intent(getActivity(), NewsBusinessDetailActivity.class); intent.putExtra(Const.OBJECT_INDEX, newsBusiness.getId()); startActivity(intent); } } } }; private void getData() { NewsBusinessCategoryHelper newsBusinessCategoryHelper = new NewsBusinessCategoryHelper(); newsBusinessCategoryList = newsBusinessCategoryHelper.getAll(); NewsBusinessHelper newsBusinessHelper = new NewsBusinessHelper(); newsBusinessList = newsBusinessHelper. getAll(newsBusinessCategoryList.get(selectedIndex).getId()); newsBusinessAdapter = new NewsBusinessAdapter(getActivity(), R.layout.item_menu_news, newsBusinessList); listNewsBusiness.setAdapter(newsBusinessAdapter); if (newsBusinessList.size() > 0) { textNoData.setVisibility(View.GONE); } else { textNoData.setVisibility(View.VISIBLE); } List<CharSequence> categoryNews = new ArrayList<>(); for (NewsBusinessCategory category : newsBusinessCategoryList) { categoryNews.add(category.getTitle()); } charSequenceArray = categoryNews.toArray(new CharSequence[categoryNews.size()]); String selectedCategory = charSequenceArray[selectedIndex].toString(); if (!TextUtils.isEmpty(selectedCategory)) { editNewsCategory.setText(selectedCategory); } } private void showNewsCategory() { new MaterialDialog.Builder(getActivity()) .title(R.string.action_change_news) .items(charSequenceArray) .itemsCallbackSingleChoice(selectedIndex, new MaterialDialog.ListCallbackSingleChoice() { @Override public boolean onSelection(MaterialDialog dialog, View view, int index, CharSequence text) { selectedIndex = index; getData(); return true; } }) .positiveText(R.string.button_choose) .show(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt("selectedIndex", selectedIndex); } private void hideProgress() { mSwipeRefreshLayout.post(new Runnable() { @Override public void run() { mSwipeRefreshLayout.setRefreshing(false); } }); } private void refreshData() { class GetAllSyncTask extends AsyncTask<String, Void, String> { String response = ""; @Override protected String doInBackground(String... param) { response = ""; try { response = AllSync.syncNewsAntara(); } catch (IOException e) { response = e.getMessage(); e.printStackTrace(); } return response; } @Override protected void onPostExecute(String resultJson) { if (!TextUtils.isEmpty(resultJson)) { try { ResultObject resultObject = ResultObjectHelper.getResult(resultJson); int status = resultObject.getStatus(); if (status == 1) { String jsonString = resultObject.getResult(); AllSync.insertNewsAntaraSync(jsonString); } getData(); } catch (Exception ignore) { hideProgress(); } } hideProgress(); } } boolean isavailable = Helpers.isInternetConnected(getActivity()); if (isavailable) { GetAllSyncTask task = new GetAllSyncTask(); task.execute(); } else { new MaterialDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.MaterialDrawerTheme_Light_DarkToolbar)) .title(R.string.no_internet_connection_title) .content(R.string.no_internet_connection) .positiveText(R.string.button_ok) .show(); hideProgress(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.auth; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; import java.nio.file.AccessDeniedException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.services.securitytoken.model.AWSSecurityTokenServiceException; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.s3a.AWSBadRequestException; import org.apache.hadoop.fs.s3a.AbstractS3ATestBase; import org.apache.hadoop.fs.s3a.MultipartUtils; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3ATestConstants; import org.apache.hadoop.fs.s3a.S3AUtils; import org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider; import org.apache.hadoop.fs.s3a.commit.CommitConstants; import org.apache.hadoop.fs.s3a.commit.CommitOperations; import org.apache.hadoop.fs.s3a.commit.files.PendingSet; import org.apache.hadoop.fs.s3a.commit.files.SinglePendingCommit; import static org.apache.hadoop.fs.contract.ContractTestUtils.assertRenameOutcome; import static org.apache.hadoop.fs.contract.ContractTestUtils.touch; import static org.apache.hadoop.fs.s3a.Constants.*; import static org.apache.hadoop.fs.s3a.S3ATestUtils.*; import static org.apache.hadoop.fs.s3a.S3AUtils.*; import static org.apache.hadoop.fs.s3a.auth.RoleTestUtils.*; import static org.apache.hadoop.fs.s3a.auth.RoleModel.*; import static org.apache.hadoop.fs.s3a.auth.RolePolicies.*; import static org.apache.hadoop.fs.s3a.auth.RoleTestUtils.forbidden; import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; import static org.apache.hadoop.test.LambdaTestUtils.*; /** * Tests use of assumed roles. * Only run if an assumed role is provided. */ @SuppressWarnings({"IOResourceOpenedButNotSafelyClosed", "ThrowableNotThrown"}) public class ITestAssumeRole extends AbstractS3ATestBase { private static final Logger LOG = LoggerFactory.getLogger(ITestAssumeRole.class); private static final Path ROOT = new Path("/"); /** * test URI, built in setup. */ private URI uri; /** * A role FS; if non-null it is closed in teardown. */ private S3AFileSystem roleFS; /** * Error code from STS server. */ protected static final String VALIDATION_ERROR = "ValidationError"; @Override public void setup() throws Exception { super.setup(); assumeRoleTests(); uri = new URI(S3ATestConstants.DEFAULT_CSVTEST_FILE); } @Override public void teardown() throws Exception { S3AUtils.closeAll(LOG, roleFS); super.teardown(); } private void assumeRoleTests() { assume("No ARN for role tests", !getAssumedRoleARN().isEmpty()); } private String getAssumedRoleARN() { return getContract().getConf().getTrimmed(ASSUMED_ROLE_ARN, ""); } /** * Expect a filesystem to fail to instantiate. * @param conf config to use * @param clazz class of exception to expect * @param text text in exception * @param <E> type of exception as inferred from clazz * @return the caught exception if it was of the expected type and contents * @throws Exception if the exception was the wrong class */ private <E extends Throwable> E expectFileSystemCreateFailure( Configuration conf, Class<E> clazz, String text) throws Exception { return interceptClosing(clazz, text, () -> new Path(getFileSystem().getUri()).getFileSystem(conf)); } @Test public void testCreateCredentialProvider() throws IOException { describe("Create the credential provider"); String roleARN = getAssumedRoleARN(); Configuration conf = new Configuration(getContract().getConf()); conf.set(AWS_CREDENTIALS_PROVIDER, AssumedRoleCredentialProvider.NAME); conf.set(ASSUMED_ROLE_ARN, roleARN); conf.set(ASSUMED_ROLE_SESSION_NAME, "valid"); conf.set(ASSUMED_ROLE_SESSION_DURATION, "45m"); bindRolePolicy(conf, RESTRICTED_POLICY); try (AssumedRoleCredentialProvider provider = new AssumedRoleCredentialProvider(uri, conf)) { LOG.info("Provider is {}", provider); AWSCredentials credentials = provider.getCredentials(); assertNotNull("Null credentials from " + provider, credentials); } } @Test public void testAssumedInvalidRole() throws Throwable { Configuration conf = new Configuration(); conf.set(ASSUMED_ROLE_ARN, ROLE_ARN_EXAMPLE); interceptClosing(AWSSecurityTokenServiceException.class, "", () -> new AssumedRoleCredentialProvider(uri, conf)); } @Test public void testAssumeRoleFSBadARN() throws Exception { describe("Attemnpt to create the FS with an invalid ARN"); Configuration conf = createAssumedRoleConfig(); conf.set(ASSUMED_ROLE_ARN, ROLE_ARN_EXAMPLE); expectFileSystemCreateFailure(conf, AccessDeniedException.class, ""); } @Test public void testAssumeRoleNoARN() throws Exception { describe("Attemnpt to create the FS with no ARN"); Configuration conf = createAssumedRoleConfig(); conf.unset(ASSUMED_ROLE_ARN); expectFileSystemCreateFailure(conf, IOException.class, AssumedRoleCredentialProvider.E_NO_ROLE); } @Test public void testAssumeRoleFSBadPolicy() throws Exception { describe("Attemnpt to create the FS with malformed JSON"); Configuration conf = createAssumedRoleConfig(); // add some malformed JSON conf.set(ASSUMED_ROLE_POLICY, "}"); expectFileSystemCreateFailure(conf, AWSBadRequestException.class, "JSON"); } @Test public void testAssumeRoleFSBadPolicy2() throws Exception { describe("Attempt to create the FS with valid but non-compliant JSON"); Configuration conf = createAssumedRoleConfig(); // add some invalid JSON conf.set(ASSUMED_ROLE_POLICY, "{'json':'but not what AWS wants}"); expectFileSystemCreateFailure(conf, AWSBadRequestException.class, "Syntax errors in policy"); } @Test public void testAssumeRoleCannotAuthAssumedRole() throws Exception { describe("Assert that you can't use assumed roles to auth assumed roles"); Configuration conf = createAssumedRoleConfig(); conf.set(ASSUMED_ROLE_CREDENTIALS_PROVIDER, AssumedRoleCredentialProvider.NAME); expectFileSystemCreateFailure(conf, IOException.class, AssumedRoleCredentialProvider.E_FORBIDDEN_PROVIDER); } @Test public void testAssumeRoleBadInnerAuth() throws Exception { describe("Try to authenticate with a keypair with spaces"); Configuration conf = createAssumedRoleConfig(); conf.set(ASSUMED_ROLE_CREDENTIALS_PROVIDER, SimpleAWSCredentialsProvider.NAME); conf.set(ACCESS_KEY, "not valid"); conf.set(SECRET_KEY, "not secret"); expectFileSystemCreateFailure(conf, AWSBadRequestException.class, "not a valid " + "key=value pair (missing equal-sign) in Authorization header"); } @Test public void testAssumeRoleBadInnerAuth2() throws Exception { describe("Try to authenticate with an invalid keypair"); Configuration conf = createAssumedRoleConfig(); conf.set(ASSUMED_ROLE_CREDENTIALS_PROVIDER, SimpleAWSCredentialsProvider.NAME); conf.set(ACCESS_KEY, "notvalid"); conf.set(SECRET_KEY, "notsecret"); expectFileSystemCreateFailure(conf, AccessDeniedException.class, "The security token included in the request is invalid"); } @Test public void testAssumeRoleBadSession() throws Exception { describe("Try to authenticate with an invalid session"); Configuration conf = createAssumedRoleConfig(); conf.set(ASSUMED_ROLE_SESSION_NAME, "Session names cannot hava spaces!"); expectFileSystemCreateFailure(conf, AWSBadRequestException.class, "Member must satisfy regular expression pattern"); } /** * A duration >1h is forbidden client-side in AWS SDK 1.11.271; * with the ability to extend durations deployed in March 2018, * duration checks will need to go server-side, and, presumably, * later SDKs will remove the client side checks. * This code exists to see when this happens. */ @Test public void testAssumeRoleThreeHourSessionDuration() throws Exception { describe("Try to authenticate with a long session duration"); Configuration conf = createAssumedRoleConfig(); // add a duration of three hours conf.setInt(ASSUMED_ROLE_SESSION_DURATION, 3 * 60 * 60); try { new Path(getFileSystem().getUri()).getFileSystem(conf).close(); LOG.info("Successfully created token of a duration >3h"); } catch (IOException ioe) { assertExceptionContains(VALIDATION_ERROR, ioe); } } /** * A duration >1h is forbidden client-side in AWS SDK 1.11.271; * with the ability to extend durations deployed in March 2018. * with the later SDKs, the checks go server-side and * later SDKs will remove the client side checks. * This test doesn't look into the details of the exception * to avoid being too brittle. */ @Test public void testAssumeRoleThirtySixHourSessionDuration() throws Exception { describe("Try to authenticate with a long session duration"); Configuration conf = createAssumedRoleConfig(); conf.setInt(ASSUMED_ROLE_SESSION_DURATION, 36 * 60 * 60); IOException ioe = expectFileSystemCreateFailure(conf, IOException.class, null); } /** * Create the assumed role configuration. * @return a config bonded to the ARN of the assumed role */ public Configuration createAssumedRoleConfig() { return createAssumedRoleConfig(getAssumedRoleARN()); } /** * Create a config for an assumed role; it also disables FS caching. * @param roleARN ARN of role * @return the new configuration */ private Configuration createAssumedRoleConfig(String roleARN) { return newAssumedRoleConfig(getContract().getConf(), roleARN); } @Test public void testAssumeRoleUndefined() throws Throwable { describe("Verify that you cannot instantiate the" + " AssumedRoleCredentialProvider without a role ARN"); Configuration conf = new Configuration(); conf.set(ASSUMED_ROLE_ARN, ""); interceptClosing(IOException.class, AssumedRoleCredentialProvider.E_NO_ROLE, () -> new AssumedRoleCredentialProvider(uri, conf)); } @Test public void testAssumedIllegalDuration() throws Throwable { describe("Expect the constructor to fail if the session is to short"); Configuration conf = new Configuration(); conf.set(ASSUMED_ROLE_SESSION_DURATION, "30s"); interceptClosing(AWSSecurityTokenServiceException.class, "", () -> new AssumedRoleCredentialProvider(uri, conf)); } @Test public void testAssumeRoleCreateFS() throws IOException { describe("Create an FS client with the role and do some basic IO"); String roleARN = getAssumedRoleARN(); Configuration conf = createAssumedRoleConfig(roleARN); Path path = new Path(getFileSystem().getUri()); LOG.info("Creating test FS and user {} with assumed role {}", conf.get(ACCESS_KEY), roleARN); try (FileSystem fs = path.getFileSystem(conf)) { fs.getFileStatus(ROOT); fs.mkdirs(path("testAssumeRoleFS")); } } @Test public void testAssumeRoleRestrictedPolicyFS() throws Exception { describe("Restrict the policy for this session; verify that reads fail."); // there's some special handling of S3Guard here as operations // which only go to DDB don't fail the way S3 would reject them. Configuration conf = createAssumedRoleConfig(); bindRolePolicy(conf, RESTRICTED_POLICY); Path path = new Path(getFileSystem().getUri()); boolean guarded = getFileSystem().hasMetadataStore(); try (FileSystem fs = path.getFileSystem(conf)) { if (!guarded) { // when S3Guard is enabled, the restricted policy still // permits S3Guard record lookup, so getFileStatus calls // will work iff the record is in the database. forbidden("getFileStatus", () -> fs.getFileStatus(ROOT)); } forbidden("", () -> fs.listStatus(ROOT)); forbidden("", () -> fs.mkdirs(path("testAssumeRoleFS"))); } } /** * Tighten the extra policy on the assumed role call for torrent access, * and verify that it blocks all other operations. * That is: any non empty policy in the assumeRole API call overrides * all of the policies attached to the role before. * switches the role instance to only those policies in the */ @Test public void testAssumeRolePoliciesOverrideRolePerms() throws Throwable { describe("extra policies in assumed roles need;" + " all required policies stated"); Configuration conf = createAssumedRoleConfig(); bindRolePolicy(conf, policy( statement(false, S3_ALL_BUCKETS, S3_GET_OBJECT_TORRENT), ALLOW_S3_GET_BUCKET_LOCATION, STATEMENT_S3GUARD_CLIENT, STATEMENT_ALLOW_SSE_KMS_RW)); Path path = path("testAssumeRoleStillIncludesRolePerms"); roleFS = (S3AFileSystem) path.getFileSystem(conf); assertTouchForbidden(roleFS, path); } /** * After blocking all write verbs used by S3A, try to write data (fail) * and read data (succeed). * For S3Guard: full DDB RW access is retained. * SSE-KMS key access is set to decrypt only. */ @Test public void testReadOnlyOperations() throws Throwable { describe("Restrict role to read only"); Configuration conf = createAssumedRoleConfig(); bindRolePolicy(conf, policy( statement(false, S3_ALL_BUCKETS, S3_PATH_WRITE_OPERATIONS), STATEMENT_ALL_S3, STATEMENT_S3GUARD_CLIENT, STATEMENT_ALLOW_SSE_KMS_READ)); Path path = methodPath(); roleFS = (S3AFileSystem) path.getFileSystem(conf); // list the root path, expect happy roleFS.listStatus(ROOT); // touch will fail assertTouchForbidden(roleFS, path); // you can delete it, because it's not there and getFileStatus() is allowed roleFS.delete(path, true); //create it with the full FS getFileSystem().mkdirs(path); // and delete will not assertDeleteForbidden(this.roleFS, path); // list multipart uploads. // This is part of the read policy. int counter = 0; MultipartUtils.UploadIterator iterator = roleFS.listUploads("/"); while (iterator.hasNext()) { counter++; iterator.next(); } LOG.info("Found {} outstanding MPUs", counter); } /** * Write successfully to the directory with full R/W access, * fail to write or delete data elsewhere. */ @SuppressWarnings("StringConcatenationMissingWhitespace") @Test public void testRestrictedWriteSubdir() throws Throwable { describe("Attempt writing to paths where a role only has" + " write access to a subdir of the bucket"); Path restrictedDir = methodPath(); Path child = new Path(restrictedDir, "child"); // the full FS S3AFileSystem fs = getFileSystem(); fs.delete(restrictedDir, true); Configuration conf = createAssumedRoleConfig(); bindRolePolicyStatements(conf, STATEMENT_S3GUARD_CLIENT, statement(true, S3_ALL_BUCKETS, S3_ROOT_READ_OPERATIONS), STATEMENT_ALLOW_SSE_KMS_RW, new Statement(Effects.Allow) .addActions(S3_ALL_OPERATIONS) .addResources(directory(restrictedDir))); roleFS = (S3AFileSystem) restrictedDir.getFileSystem(conf); roleFS.getFileStatus(ROOT); roleFS.mkdirs(restrictedDir); assertIsDirectory(restrictedDir); // you can create an adjacent child touch(roleFS, child); assertIsFile(child); // child delete rights ContractTestUtils.assertDeleted(roleFS, child, true); // parent delete rights ContractTestUtils.assertDeleted(roleFS, restrictedDir, true); // delete will try to create an empty parent directory marker, and may fail roleFS.delete(restrictedDir, false); // this sibling path has the same prefix as restrictedDir, but is // adjacent. This verifies that a restrictedDir* pattern isn't matching // siblings, so granting broader rights Path sibling = new Path(restrictedDir.toUri() + "sibling"); touch(fs, sibling); assertTouchForbidden(roleFS, sibling); assertDeleteForbidden(roleFS, sibling); } public Path methodPath() throws IOException { return path(getMethodName()); } @Test public void testRestrictedRename() throws Throwable { describe("rename with parent paths not writeable"); executeRestrictedRename(createAssumedRoleConfig()); } @Test public void testRestrictedSingleDeleteRename() throws Throwable { describe("rename with parent paths not writeable" + " and multi-object delete disabled"); Configuration conf = createAssumedRoleConfig(); conf.setBoolean(ENABLE_MULTI_DELETE, false); executeRestrictedRename(conf); } /** * Execute a sequence of rename operations with access locked down. * @param conf FS configuration */ public void executeRestrictedRename(final Configuration conf) throws IOException { Path basePath = methodPath(); Path restrictedDir = new Path(basePath, "renameSrc"); Path destPath = new Path(basePath, "renameDest"); Path child = new Path(restrictedDir, "child"); // the full FS S3AFileSystem fs = getFileSystem(); fs.delete(basePath, true); bindRolePolicyStatements(conf, STATEMENT_S3GUARD_CLIENT, STATEMENT_ALLOW_SSE_KMS_RW, statement(true, S3_ALL_BUCKETS, S3_ROOT_READ_OPERATIONS), new Statement(Effects.Allow) .addActions(S3_PATH_RW_OPERATIONS) .addResources(directory(restrictedDir)) .addResources(directory(destPath)) ); roleFS = (S3AFileSystem) restrictedDir.getFileSystem(conf); roleFS.getFileStatus(ROOT); roleFS.mkdirs(restrictedDir); // you can create an adjacent child touch(roleFS, child); roleFS.delete(destPath, true); // as dest doesn't exist, this will map child -> dest assertRenameOutcome(roleFS, child, destPath, true); assertIsFile(destPath); assertIsDirectory(restrictedDir); Path renamedDestPath = new Path(restrictedDir, destPath.getName()); assertRenameOutcome(roleFS, destPath, restrictedDir, true); assertIsFile(renamedDestPath); roleFS.delete(restrictedDir, true); roleFS.delete(destPath, true); } @Test public void testRestrictedRenameReadOnlyData() throws Throwable { describe("rename with source read only, multidelete"); executeRenameReadOnlyData(createAssumedRoleConfig()); } @Test public void testRestrictedRenameReadOnlySingleDelete() throws Throwable { describe("rename with source read only single delete"); Configuration conf = createAssumedRoleConfig(); conf.setBoolean(ENABLE_MULTI_DELETE, false); executeRenameReadOnlyData(conf); } /** * Without simulation of STS failures, and with STS overload likely to * be very rare, there'll be no implicit test coverage of * {@link AssumedRoleCredentialProvider#operationRetried(String, Exception, int, boolean)}. * This test simply invokes the callback for both the first and second retry event. * * If the handler ever adds more than logging, this test ensures that things * don't break. */ @Test public void testAssumedRoleRetryHandler() throws Throwable { try(AssumedRoleCredentialProvider provider = new AssumedRoleCredentialProvider(getFileSystem().getUri(), createAssumedRoleConfig())) { provider.operationRetried("retry", new IOException("failure"), 0, true); provider.operationRetried("retry", new IOException("failure"), 1, true); } } /** * Execute a sequence of rename operations where the source * data is read only to the client calling rename(). * This will cause the inner delete() operations to fail, whose outcomes * are explored. * Multiple files are created (in parallel) for some renames, so exploring * the outcome on bulk delete calls, including verifying that a * MultiObjectDeleteException is translated to an AccessDeniedException. * <ol> * <li>The exception raised is AccessDeniedException, * from single and multi DELETE calls.</li> * <li>It happens after the COPY. Not ideal, but, well, we can't pretend * it's a filesystem forever.</li> * </ol> * @param conf FS configuration */ public void executeRenameReadOnlyData(final Configuration conf) throws Exception { assume("Does not work with S3Guard", !getFileSystem().hasMetadataStore()); Path basePath = methodPath(); Path destDir = new Path(basePath, "renameDest"); Path readOnlyDir = new Path(basePath, "readonlyDir"); Path readOnlyFile = new Path(readOnlyDir, "readonlyChild"); // the full FS S3AFileSystem fs = getFileSystem(); fs.delete(basePath, true); // this file is readable by the roleFS, but cannot be deleted touch(fs, readOnlyFile); bindRolePolicyStatements(conf, STATEMENT_S3GUARD_CLIENT, statement(true, S3_ALL_BUCKETS, S3_ROOT_READ_OPERATIONS), new Statement(Effects.Allow) .addActions(S3_PATH_RW_OPERATIONS) .addResources(directory(destDir)) ); roleFS = (S3AFileSystem) destDir.getFileSystem(conf); roleFS.delete(destDir, true); roleFS.mkdirs(destDir); // rename will fail in the delete phase forbidden(readOnlyFile.toString(), () -> roleFS.rename(readOnlyFile, destDir)); // and the source file is still there assertIsFile(readOnlyFile); // but so is the copied version, because there's no attempt // at rollback, or preflight checking on the delete permissions Path renamedFile = new Path(destDir, readOnlyFile.getName()); assertIsFile(renamedFile); ContractTestUtils.assertDeleted(roleFS, renamedFile, true); assertFileCount("Empty Dest Dir", roleFS, destDir, 0); // create a set of files // this is done in parallel as it is 10x faster on a long-haul test run. int range = 10; touchFiles(fs, readOnlyDir, range); // don't forget about that original file! final long createdFiles = range + 1; // are they all there? assertFileCount("files ready to rename", roleFS, readOnlyDir, createdFiles); // try to rename the directory LOG.info("Renaming readonly files {} to {}", readOnlyDir, destDir); AccessDeniedException ex = forbidden("", () -> roleFS.rename(readOnlyDir, destDir)); LOG.info("Result of renaming read-only files is AccessDeniedException", ex); assertFileCount("files copied to the destination", roleFS, destDir, createdFiles); assertFileCount("files in the source directory", roleFS, readOnlyDir, createdFiles); // and finally (so as to avoid the delay of POSTing some more objects, // delete that r/o source forbidden("", () -> roleFS.delete(readOnlyDir, true)); } /** * Parallel-touch a set of files in the destination directory. * @param fs filesystem * @param destDir destination * @param range range 1..range inclusive of files to create. */ public void touchFiles(final S3AFileSystem fs, final Path destDir, final int range) { IntStream.rangeClosed(1, range).parallel().forEach( (i) -> eval(() -> touch(fs, new Path(destDir, "file-" + i)))); } @Test public void testRestrictedCommitActions() throws Throwable { describe("Attempt commit operations against a path with restricted rights"); Configuration conf = createAssumedRoleConfig(); conf.setBoolean(CommitConstants.MAGIC_COMMITTER_ENABLED, true); final int uploadPartSize = 5 * 1024 * 1024; Path basePath = methodPath(); Path readOnlyDir = new Path(basePath, "readOnlyDir"); Path writeableDir = new Path(basePath, "writeableDir"); // the full FS S3AFileSystem fs = getFileSystem(); fs.delete(basePath, true); fs.mkdirs(readOnlyDir); bindRolePolicyStatements(conf, STATEMENT_S3GUARD_CLIENT, STATEMENT_ALLOW_SSE_KMS_RW, statement(true, S3_ALL_BUCKETS, S3_ROOT_READ_OPERATIONS), new Statement(Effects.Allow) .addActions(S3_PATH_RW_OPERATIONS) .addResources(directory(writeableDir)) ); roleFS = (S3AFileSystem) writeableDir.getFileSystem(conf); CommitOperations fullOperations = new CommitOperations(fs); CommitOperations operations = new CommitOperations(roleFS); File localSrc = File.createTempFile("source", ""); writeCSVData(localSrc); Path uploadDest = new Path(readOnlyDir, "restricted.csv"); forbidden("initiate MultiPartUpload", () -> { return operations.uploadFileToPendingCommit(localSrc, uploadDest, "", uploadPartSize); }); // delete the file localSrc.delete(); // create a directory there localSrc.mkdirs(); // create some local files and upload them with permissions int range = 2; IntStream.rangeClosed(1, range) .parallel() .forEach((i) -> eval(() -> { String name = "part-000" + i; File src = new File(localSrc, name); Path dest = new Path(readOnlyDir, name); writeCSVData(src); SinglePendingCommit pending = fullOperations.uploadFileToPendingCommit(src, dest, "", uploadPartSize); pending.save(fs, new Path(readOnlyDir, name + CommitConstants.PENDING_SUFFIX), true); assertTrue(src.delete()); })); try { // we expect to be able to list all the files here Pair<PendingSet, List<Pair<LocatedFileStatus, IOException>>> pendingCommits = operations.loadSinglePendingCommits(readOnlyDir, true); // all those commits must fail List<SinglePendingCommit> commits = pendingCommits.getLeft().getCommits(); assertEquals(range, commits.size()); commits.parallelStream().forEach( (c) -> { CommitOperations.MaybeIOE maybeIOE = operations.commit(c, "origin"); Path path = c.destinationPath(); assertCommitAccessDenied(path, maybeIOE); }); // fail of all list and abort of .pending files. LOG.info("abortAllSinglePendingCommits({})", readOnlyDir); assertCommitAccessDenied(readOnlyDir, operations.abortAllSinglePendingCommits(readOnlyDir, true)); // try writing a magic file Path magicDestPath = new Path(readOnlyDir, CommitConstants.MAGIC + "/" + "magic.txt"); forbidden("", () -> { touch(roleFS, magicDestPath); // shouldn't get here; if we do: return the existence of the 0-byte // dest file. return fs.getFileStatus(magicDestPath); }); // a recursive list and abort is blocked. forbidden("", () -> operations.abortPendingUploadsUnderPath(readOnlyDir)); } finally { LOG.info("Cleanup"); fullOperations.abortPendingUploadsUnderPath(readOnlyDir); } } /** * Verifies that an operation returning a "MaybeIOE" failed * with an AccessDeniedException in the maybe instance. * @param path path operated on * @param maybeIOE result to inspect */ public void assertCommitAccessDenied(final Path path, final CommitOperations.MaybeIOE maybeIOE) { IOException ex = maybeIOE.getException(); assertNotNull("no IOE in " + maybeIOE + " for " + path, ex); if (!(ex instanceof AccessDeniedException)) { ContractTestUtils.fail("Wrong exception class for commit to " + path, ex); } } /** * Write some CSV data to a local file. * @param localSrc local file * @throws IOException failure */ public void writeCSVData(final File localSrc) throws IOException { try(FileOutputStream fo = new FileOutputStream(localSrc)) { fo.write("1, true".getBytes()); } } @Test public void testPartialDelete() throws Throwable { describe("delete with part of the child tree read only; multidelete"); executePartialDelete(createAssumedRoleConfig()); } @Test public void testPartialDeleteSingleDelete() throws Throwable { describe("delete with part of the child tree read only"); Configuration conf = createAssumedRoleConfig(); conf.setBoolean(ENABLE_MULTI_DELETE, false); executePartialDelete(conf); } /** * Have a directory with full R/W permissions, but then remove * write access underneath, and try to delete it. * @param conf FS configuration */ public void executePartialDelete(final Configuration conf) throws Exception { Path destDir = methodPath(); Path readOnlyDir = new Path(destDir, "readonlyDir"); // the full FS S3AFileSystem fs = getFileSystem(); fs.delete(destDir, true); bindRolePolicyStatements(conf, STATEMENT_S3GUARD_CLIENT, STATEMENT_ALLOW_SSE_KMS_RW, statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS), new Statement(Effects.Deny) .addActions(S3_PATH_WRITE_OPERATIONS) .addResources(directory(readOnlyDir)) ); roleFS = (S3AFileSystem) destDir.getFileSystem(conf); int range = 10; touchFiles(fs, readOnlyDir, range); touchFiles(roleFS, destDir, range); forbidden("", () -> roleFS.delete(readOnlyDir, true)); forbidden("", () -> roleFS.delete(destDir, true)); // and although you can't delete under the path, if the file doesn't // exist, the delete call fails fast. Path pathWhichDoesntExist = new Path(readOnlyDir, "no-such-path"); assertFalse("deleting " + pathWhichDoesntExist, roleFS.delete(pathWhichDoesntExist, true)); } /** * Assert that the number of files in a destination matches that expected. * @param text text to use in the message * @param fs filesystem * @param path path to list (recursively) * @param expected expected count * @throws IOException IO problem */ private static void assertFileCount(String text, FileSystem fs, Path path, long expected) throws IOException { List<String> files = new ArrayList<>(); applyLocatedFiles(fs.listFiles(path, true), (status) -> files.add(status.getPath().toString())); long actual = files.size(); if (actual != expected) { String ls = files.stream().collect(Collectors.joining("\n")); fail(text + ": expected " + expected + " files in " + path + " but got " + actual + "\n" + ls); } } }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package org.apache.hadoop.hbase.shaded.com.google.protobuf; import java.io.IOException; import java.util.Arrays; /** * {@code UnknownFieldSetLite} is used to keep track of fields which were seen * when parsing a protocol message but whose field numbers or types are * unrecognized. This most frequently occurs when new fields are added to a * message type and then messages containing those fields are read by old * software that was compiled before the new types were added. * * <p>For use by generated code only. * * @author dweis@google.com (Daniel Weis) */ public final class UnknownFieldSetLite { // Arbitrarily chosen. // TODO(dweis): Tune this number? private static final int MIN_CAPACITY = 8; private static final UnknownFieldSetLite DEFAULT_INSTANCE = new UnknownFieldSetLite(0, new int[0], new Object[0], false /* isMutable */); /** * Get an empty {@code UnknownFieldSetLite}. * * <p>For use by generated code only. */ public static UnknownFieldSetLite getDefaultInstance() { return DEFAULT_INSTANCE; } /** * Returns a new mutable instance. */ static UnknownFieldSetLite newInstance() { return new UnknownFieldSetLite(); } /** * Returns a mutable {@code UnknownFieldSetLite} that is the composite of {@code first} and * {@code second}. */ static UnknownFieldSetLite mutableCopyOf(UnknownFieldSetLite first, UnknownFieldSetLite second) { int count = first.count + second.count; int[] tags = Arrays.copyOf(first.tags, count); System.arraycopy(second.tags, 0, tags, first.count, second.count); Object[] objects = Arrays.copyOf(first.objects, count); System.arraycopy(second.objects, 0, objects, first.count, second.count); return new UnknownFieldSetLite(count, tags, objects, true /* isMutable */); } /** * The number of elements in the set. */ private int count; /** * The tag numbers for the elements in the set. */ private int[] tags; /** * The boxed values of the elements in the set. */ private Object[] objects; /** * The lazily computed serialized size of the set. */ private int memoizedSerializedSize = -1; /** * Indicates that this object is mutable. */ private boolean isMutable; /** * Constructs a mutable {@code UnknownFieldSetLite}. */ private UnknownFieldSetLite() { this(0, new int[MIN_CAPACITY], new Object[MIN_CAPACITY], true /* isMutable */); } /** * Constructs the {@code UnknownFieldSetLite}. */ private UnknownFieldSetLite(int count, int[] tags, Object[] objects, boolean isMutable) { this.count = count; this.tags = tags; this.objects = objects; this.isMutable = isMutable; } /** * Marks this object as immutable. * * <p>Future calls to methods that attempt to modify this object will throw. */ public void makeImmutable() { this.isMutable = false; } /** * Throws an {@link UnsupportedOperationException} if immutable. */ void checkMutable() { if (!isMutable) { throw new UnsupportedOperationException(); } } /** * Serializes the set and writes it to {@code output}. * * <p>For use by generated code only. */ public void writeTo(CodedOutputStream output) throws IOException { for (int i = 0; i < count; i++) { int tag = tags[i]; int fieldNumber = WireFormat.getTagFieldNumber(tag); switch (WireFormat.getTagWireType(tag)) { case WireFormat.WIRETYPE_VARINT: output.writeUInt64(fieldNumber, (Long) objects[i]); break; case WireFormat.WIRETYPE_FIXED32: output.writeFixed32(fieldNumber, (Integer) objects[i]); break; case WireFormat.WIRETYPE_FIXED64: output.writeFixed64(fieldNumber, (Long) objects[i]); break; case WireFormat.WIRETYPE_LENGTH_DELIMITED: output.writeBytes(fieldNumber, (ByteString) objects[i]); break; case WireFormat.WIRETYPE_START_GROUP: output.writeTag(fieldNumber, WireFormat.WIRETYPE_START_GROUP); ((UnknownFieldSetLite) objects[i]).writeTo(output); output.writeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP); break; default: throw InvalidProtocolBufferException.invalidWireType(); } } } /** * Serializes the set and writes it to {@code output} using {@code MessageSet} wire format. * * <p>For use by generated code only. */ public void writeAsMessageSetTo(CodedOutputStream output) throws IOException { for (int i = 0; i < count; i++) { int fieldNumber = WireFormat.getTagFieldNumber(tags[i]); output.writeRawMessageSetExtension(fieldNumber, (ByteString) objects[i]); } } /** * Get the number of bytes required to encode this field, including field number, using {@code * MessageSet} wire format. */ public int getSerializedSizeAsMessageSet() { int size = memoizedSerializedSize; if (size != -1) { return size; } size = 0; for (int i = 0; i < count; i++) { int tag = tags[i]; int fieldNumber = WireFormat.getTagFieldNumber(tag); size += CodedOutputStream.computeRawMessageSetExtensionSize( fieldNumber, (ByteString) objects[i]); } memoizedSerializedSize = size; return size; } /** * Get the number of bytes required to encode this set. * * <p>For use by generated code only. */ public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) { return size; } size = 0; for (int i = 0; i < count; i++) { int tag = tags[i]; int fieldNumber = WireFormat.getTagFieldNumber(tag); switch (WireFormat.getTagWireType(tag)) { case WireFormat.WIRETYPE_VARINT: size += CodedOutputStream.computeUInt64Size(fieldNumber, (Long) objects[i]); break; case WireFormat.WIRETYPE_FIXED32: size += CodedOutputStream.computeFixed32Size(fieldNumber, (Integer) objects[i]); break; case WireFormat.WIRETYPE_FIXED64: size += CodedOutputStream.computeFixed64Size(fieldNumber, (Long) objects[i]); break; case WireFormat.WIRETYPE_LENGTH_DELIMITED: size += CodedOutputStream.computeBytesSize(fieldNumber, (ByteString) objects[i]); break; case WireFormat.WIRETYPE_START_GROUP: size += CodedOutputStream.computeTagSize(fieldNumber) * 2 + ((UnknownFieldSetLite) objects[i]).getSerializedSize(); break; default: throw new IllegalStateException(InvalidProtocolBufferException.invalidWireType()); } } memoizedSerializedSize = size; return size; } private static boolean equals(int[] tags1, int[] tags2, int count) { for (int i = 0; i < count; ++i) { if (tags1[i] != tags2[i]) { return false; } } return true; } private static boolean equals(Object[] objects1, Object[] objects2, int count) { for (int i = 0; i < count; ++i) { if (!objects1[i].equals(objects2[i])) { return false; } } return true; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof UnknownFieldSetLite)) { return false; } UnknownFieldSetLite other = (UnknownFieldSetLite) obj; if (count != other.count || !equals(tags, other.tags, count) || !equals(objects, other.objects, count)) { return false; } return true; } @Override public int hashCode() { int hashCode = 17; hashCode = 31 * hashCode + count; hashCode = 31 * hashCode + Arrays.hashCode(tags); hashCode = 31 * hashCode + Arrays.deepHashCode(objects); return hashCode; } /** * Prints a String representation of the unknown field set. * * <p>For use by generated code only. * * @param buffer the buffer to write to * @param indent the number of spaces the fields should be indented by */ final void printWithIndent(StringBuilder buffer, int indent) { for (int i = 0; i < count; i++) { int fieldNumber = WireFormat.getTagFieldNumber(tags[i]); MessageLiteToString.printField(buffer, indent, String.valueOf(fieldNumber), objects[i]); } } // Package private for unsafe experimental runtime. void storeField(int tag, Object value) { ensureCapacity(); tags[count] = tag; objects[count] = value; count++; } /** * Ensures that our arrays are long enough to store more metadata. */ private void ensureCapacity() { if (count == tags.length) { int increment = count < (MIN_CAPACITY / 2) ? MIN_CAPACITY : count >> 1; int newLength = count + increment; tags = Arrays.copyOf(tags, newLength); objects = Arrays.copyOf(objects, newLength); } } /** * Parse a single field from {@code input} and merge it into this set. * * <p>For use by generated code only. * * @param tag The field's tag number, which was already parsed. * @return {@code false} if the tag is an end group tag. */ boolean mergeFieldFrom(final int tag, final CodedInputStream input) throws IOException { checkMutable(); final int fieldNumber = WireFormat.getTagFieldNumber(tag); switch (WireFormat.getTagWireType(tag)) { case WireFormat.WIRETYPE_VARINT: storeField(tag, input.readInt64()); return true; case WireFormat.WIRETYPE_FIXED32: storeField(tag, input.readFixed32()); return true; case WireFormat.WIRETYPE_FIXED64: storeField(tag, input.readFixed64()); return true; case WireFormat.WIRETYPE_LENGTH_DELIMITED: storeField(tag, input.readBytes()); return true; case WireFormat.WIRETYPE_START_GROUP: final UnknownFieldSetLite subFieldSet = new UnknownFieldSetLite(); subFieldSet.mergeFrom(input); input.checkLastTagWas( WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); storeField(tag, subFieldSet); return true; case WireFormat.WIRETYPE_END_GROUP: return false; default: throw InvalidProtocolBufferException.invalidWireType(); } } /** * Convenience method for merging a new field containing a single varint * value. This is used in particular when an unknown enum value is * encountered. * * <p>For use by generated code only. */ UnknownFieldSetLite mergeVarintField(int fieldNumber, int value) { checkMutable(); if (fieldNumber == 0) { throw new IllegalArgumentException("Zero is not a valid field number."); } storeField(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_VARINT), (long) value); return this; } /** * Convenience method for merging a length-delimited field. * * <p>For use by generated code only. */ UnknownFieldSetLite mergeLengthDelimitedField(final int fieldNumber, final ByteString value) { checkMutable(); if (fieldNumber == 0) { throw new IllegalArgumentException("Zero is not a valid field number."); } storeField(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED), value); return this; } /** * Parse an entire message from {@code input} and merge its fields into * this set. */ private UnknownFieldSetLite mergeFrom(final CodedInputStream input) throws IOException { // Ensures initialization in mergeFieldFrom. while (true) { final int tag = input.readTag(); if (tag == 0 || !mergeFieldFrom(tag, input)) { break; } } return this; } }
/****************************************************************** * File: ProcessOpenID.java * Created by: Dave Reynolds * Created on: 15 Jul 2014 * * (c) Copyright 2014, Epimorphics Limited * *****************************************************************/ package com.epimorphics.appbase.security; import java.util.List; import java.util.Map; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.shiro.SecurityUtils; import org.apache.shiro.subject.Subject; import org.openid4java.consumer.ConsumerManager; import org.openid4java.consumer.VerificationResult; import org.openid4java.discovery.DiscoveryInformation; import org.openid4java.discovery.Identifier; import org.openid4java.message.AuthRequest; import org.openid4java.message.AuthSuccess; import org.openid4java.message.ParameterList; import org.openid4java.message.ax.AxMessage; import org.openid4java.message.ax.FetchRequest; import org.openid4java.message.ax.FetchResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.epimorphics.util.EpiException; public class ProcessOpenID { static final Logger log = LoggerFactory.getLogger( ProcessOpenID.class ); public static final String DEFAULT_PROVIDER = "https://www.google.com/accounts/o8/id"; public static final String PROVIDER_COOKIE = "appbase-login-provider"; // Session attribute names public static final String SA_OPENID_DISC = "openid_disc"; public static final String SA_OPENID_PROVIDER = "openid_provider"; public static final String SA_REGISTRATION = "isRegistration"; public static final String SA_RETURN_URL = "returnURL"; // Attribute parameter names public static final String AP_EMAIL = "email"; public static final String AP_FIRST_NAME = "firstName"; public static final String AP_LAST_NAME = "lastName"; public static final String AP_FULL_NAME = "fullname"; // Velocity binding names public static final String VN_REGISTRATION_STATUS = "registrationStatus"; public static final String RS_NEW = "new"; public static final String RS_ALREADY_REGISTERED = "already"; public static final String RS_LOGIN = "login"; private static ConsumerManager manager = null; static { try { manager = new ConsumerManager(); } catch (Exception e) { log.error("Failed to initialize openid subsystem", e); } } /** * Perform a login or registration via OpenID. * @throws EpiException if the request is malformed in some way. */ @SuppressWarnings("rawtypes") static public void processOpenID(HttpServletRequest request, HttpServletResponse response, OpenidRequest oid) { HttpSession session = request.getSession(); session.setAttribute(SA_REGISTRATION, oid.isRegister()); session.setAttribute(SA_OPENID_PROVIDER, oid.getProvider()); session.setAttribute(SA_RETURN_URL, oid.getReturnURL()); log.info("Authentication request for " + oid.getProvider() + (oid.isRegister() ? " (registration)" : "")); try { // perform discovery on the user-supplied identifier List discoveries = manager.discover(oid.getProvider()); // attempt to associate with the OpenID provider // and retrieve one service endpoint for authentication DiscoveryInformation discovered = manager.associate(discoveries); // store the discovery information in the user's session request.getSession().setAttribute(SA_OPENID_DISC, discovered); // obtain a AuthRequest message to be sent to the OpenID provider AuthRequest authReq = manager.authenticate(discovered, oid.getResponseURL()); if (oid.isRegister()) { // Attribute Exchange example: fetching the 'email' attribute FetchRequest fetch = FetchRequest.createFetchRequest(); if (oid.getProvider().contains("google.com")) { // fetch.addAttribute(AP_EMAIL, "http://axschema.org/contact/email", false); fetch.addAttribute(AP_FIRST_NAME, "http://axschema.org/namePerson/first", true); fetch.addAttribute(AP_LAST_NAME, "http://axschema.org/namePerson/last", true); } else if (oid.getProvider().contains("yahoo.com")) { // fetch.addAttribute(AP_EMAIL, "http://axschema.org/contact/email", false); fetch.addAttribute(AP_FULL_NAME, "http://axschema.org/namePerson", true); } else { //works for myOpenID // fetch.addAttribute(AP_EMAIL, "http://schema.openid.net/contact/email", false); fetch.addAttribute(AP_FULL_NAME, "http://schema.openid.net/namePerson", true); } // attach the extension to the authentication request authReq.addExtension(fetch); } // For version2 endpoints can do a form-redirect but this is easier, // Relies on payload being less ~ 2k, currently ~ 800 bytes response.sendRedirect(authReq.getDestinationUrl(true)); } catch (Exception e) { throw new EpiException("Login/registration action failed: " + e); } } /** * Process the verification response from the OpenID provider. This should be called * from a URL which is given as part of the original OpenIDRequest. If the verification * was successful it returns the URL to which the user should be redirected (specified * in the original call), otherwise an EpiExpception is thrown. */ @SuppressWarnings({ "unchecked" }) static public String verifyResponse(HttpServletRequest request, HttpServletResponse httpresponse, UserStore userstore) { try { HttpSession session = request.getSession(); // extract the parameters from the authentication response // (which comes in as a HTTP request from the OpenID provider) ParameterList response = new ParameterList(request.getParameterMap()); // retrieve the previously stored discovery information DiscoveryInformation discovered = (DiscoveryInformation) session.getAttribute("openid-disc"); // extract the receiving URL from the HTTP request StringBuffer receivingURL = request.getRequestURL(); String queryString = request.getQueryString(); if (queryString != null && queryString.length() > 0) receivingURL.append("?").append(request.getQueryString()); // verify the response; ConsumerManager needs to be the same // (static) instance used to place the authentication request VerificationResult verification = manager.verify( receivingURL.toString(), response, discovered); // examine the verification result and extract the verified identifier Identifier verified = verification.getVerifiedId(); if (verified != null) { AuthSuccess authSuccess = (AuthSuccess) verification.getAuthResponse(); String name = null; if (authSuccess.hasExtension(AxMessage.OPENID_NS_AX)) { FetchResponse fetchResp = (FetchResponse) authSuccess .getExtension(AxMessage.OPENID_NS_AX); Map<String, List<String>> attributes = fetchResp.getAttributes(); if (attributes.containsKey(AP_FULL_NAME)) { name = attributes.get(AP_FULL_NAME).get(0); } else { name = attributes.get(AP_FIRST_NAME).get(0) + " " + attributes.get(AP_LAST_NAME).get(0); } } log.info(String.format("Verified identity %s = %s", verified.getIdentifier(), name)); boolean isRegistration = ((Boolean)session.getAttribute(SA_REGISTRATION)).booleanValue(); String registrationStatus = RS_LOGIN; if (isRegistration) { UserInfo userinfo = new UserInfo(verified.getIdentifier(), name); if (userstore.register( userinfo )) { registrationStatus = RS_NEW; } else { registrationStatus = RS_ALREADY_REGISTERED; } } AppRealmToken token = new AppRealmToken(verified.getIdentifier(), true); Subject subject = SecurityUtils.getSubject(); try { subject.login(token); session.setAttribute(VN_REGISTRATION_STATUS, registrationStatus); String provider = (String)session.getAttribute(SA_OPENID_PROVIDER); if (provider != null && !provider.isEmpty()) { Cookie cookie = new Cookie(PROVIDER_COOKIE, provider); cookie.setComment("Records the openid provider you last used to log in to an appbase application"); cookie.setMaxAge(60 * 60 * 24 * 30); cookie.setHttpOnly(true); cookie.setPath("/"); httpresponse.addCookie(cookie); } return session.getAttribute(SA_RETURN_URL).toString(); } catch (Exception e) { log.error("Authentication failure", e); throw new EpiException("Could not find a registration."); } } } catch (Exception e) { throw new EpiException(e); } throw new EpiException("OpenID login failed"); } /** * Packaged set of parameters for an OpenID login or registration request. * * @author <a href="mailto:dave@epimorphics.com">Dave Reynolds</a> */ static public class OpenidRequest { String provider = DEFAULT_PROVIDER; String responseURL; String returnURL = "/"; boolean isRegister = false; /** * Create a login or registration request * @param responseURL The URL to use for the OpenID response, this endpoint should invoke a verifyRequest call */ public OpenidRequest(String responseURL) { this.responseURL = responseURL; } /** * Set the OpenID provider to use. The default is generic Google login (which is * distinct from a person-specific Google profile provider) */ public void setProvider(String provider) { if (provider == null) { this.provider = DEFAULT_PROVIDER; } else { this.provider = provider; } } /** * Set the URL to which the user will be redirected after a successful login */ public void setReturnURL(String returnURL) { this.returnURL = returnURL; } /** * Set to true if this is a registration rather than a login (default is login) */ public void setRegister(boolean isRegister) { this.isRegister = isRegister; } public String getProvider() { return provider; } public String getResponseURL() { return responseURL; } public String getReturnURL() { return returnURL; } public boolean isRegister() { return isRegister; } } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ /* * Copyright 2010 Mario Zechner (contact@badlogicgames.com), Nathan Sweet (admin@esotericsoftware.com) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package com.badlogic.gdx.tests.utils; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import com.badlogic.gdx.tests.*; import com.badlogic.gdx.tests.bench.TiledMapBench; import com.badlogic.gdx.tests.examples.MoveSpriteExample; import com.badlogic.gdx.tests.extensions.ControllersTest; import com.badlogic.gdx.tests.extensions.FreeTypeDisposeTest; import com.badlogic.gdx.tests.extensions.FreeTypeFontLoaderTest; import com.badlogic.gdx.tests.extensions.FreeTypeIncrementalTest; import com.badlogic.gdx.tests.extensions.FreeTypePackTest; import com.badlogic.gdx.tests.extensions.FreeTypeTest; import com.badlogic.gdx.tests.extensions.InternationalFontsTest; import com.badlogic.gdx.tests.g3d.Animation3DTest; import com.badlogic.gdx.tests.g3d.Basic3DSceneTest; import com.badlogic.gdx.tests.g3d.Basic3DTest; import com.badlogic.gdx.tests.g3d.Benchmark3DTest; import com.badlogic.gdx.tests.g3d.FogTest; import com.badlogic.gdx.tests.g3d.LightsTest; import com.badlogic.gdx.tests.g3d.MaterialTest; import com.badlogic.gdx.tests.g3d.MeshBuilderTest; import com.badlogic.gdx.tests.g3d.ModelTest; import com.badlogic.gdx.tests.g3d.ParticleControllerTest; import com.badlogic.gdx.tests.g3d.ShaderCollectionTest; import com.badlogic.gdx.tests.g3d.ShaderTest; import com.badlogic.gdx.tests.g3d.ShadowMappingTest; import com.badlogic.gdx.tests.g3d.SkeletonTest; import com.badlogic.gdx.tests.g3d.TextureRegion3DTest; import com.badlogic.gdx.tests.gles2.HelloTriangle; import com.badlogic.gdx.tests.gles2.SimpleVertexShader; import com.badlogic.gdx.tests.net.NetAPITest; import com.badlogic.gdx.tests.superkoalio.SuperKoalio; import com.badlogic.gdx.utils.ObjectMap; import com.badlogic.gdx.utils.StreamUtils; /** List of GdxTest classes. To be used by the test launchers. If you write your own test, add it in here! * * @author badlogicgames@gmail.com */ public class GdxTests { public static final List<Class<? extends GdxTest>> tests = new ArrayList<Class<? extends GdxTest>>(Arrays.asList( // @off IssueTest.class, AccelerometerTest.class, ActionSequenceTest.class, ActionTest.class, Affine2Test.class, AlphaTest.class, Animation3DTest.class, AnimationTest.class, AnnotationTest.class, AssetManagerTest.class, AtlasIssueTest.class, AudioDeviceTest.class, AudioRecorderTest.class, Basic3DSceneTest.class, Basic3DTest.class, Benchmark3DTest.class, BitmapFontAlignmentTest.class, BitmapFontDistanceFieldTest.class, BitmapFontFlipTest.class, BitmapFontMetricsTest.class, BitmapFontTest.class, BitmapFontAtlasRegionTest.class, BlitTest.class, Box2DTest.class, Box2DTestCollection.class, Bresenham2Test.class, BufferUtilsTest.class, BulletTestCollection.class, CpuSpriteBatchTest.class, ContainerTest.class, CullTest.class, DelaunayTriangulatorTest.class, DeltaTimeTest.class, DirtyRenderingTest.class, DragAndDropTest.class, ETC1Test.class, // EarClippingTriangulatorTest.class, EdgeDetectionTest.class, ExitTest.class, ExternalMusicTest.class, FilesTest.class, FilterPerformanceTest.class, FloatTextureTest.class, FogTest.class, FrameBufferTest.class, FramebufferToTextureTest.class, FullscreenTest.class, ControllersTest.class, Gdx2DTest.class, GestureDetectorTest.class, GroupCullingTest.class, GroupFadeTest.class, GroupTest.class, HelloTriangle.class, HexagonalTiledMapTest.class, I18NMessageTest.class, I18NSimpleMessageTest.class, ImageScaleTest.class, ImageTest.class, ImmediateModeRendererTest.class, IndexBufferObjectShaderTest.class, InputTest.class, IntegerBitmapFontTest.class, InterpolationTest.class, InverseKinematicsTest.class, IsometricTileTest.class, KinematicBodyTest.class, KTXTest.class, LabelScaleTest.class, LabelTest.class, LifeCycleTest.class, LightsTest.class, MaterialTest.class, MatrixJNITest.class, MeshBuilderTest.class, MeshShaderTest.class, MipMapTest.class, ModelTest.class, MoveSpriteExample.class, MultitouchTest.class, MusicTest.class, NetAPITest.class, NinePatchTest.class, OnscreenKeyboardTest.class, PathTest.class, ParallaxTest.class, ParticleControllerTest.class, ParticleEmitterTest.class, ParticleEmittersTest.class, PixelsPerInchTest.class, PixmapBlendingTest.class, PixmapPackerTest.class, PixmapTest.class, PolygonRegionTest.class, PolygonSpriteTest.class, PreferencesTest.class, ProjectTest.class, ProjectiveTextureTest.class, ReflectionTest.class, RotationTest.class, RunnablePostTest.class, StageDebugTest.class, Scene2dTest.class, ScrollPane2Test.class, ScrollPaneScrollBarsTest.class, ScrollPaneTest.class, SelectTest.class, ShaderCollectionTest.class, ShaderMultitextureTest.class, ShaderTest.class, ShadowMappingTest.class, ShapeRendererTest.class, SimpleAnimationTest.class, SimpleDecalTest.class, SimpleStageCullingTest.class, SimpleVertexShader.class, SkeletonTest.class, SoftKeyboardTest.class, SortedSpriteTest.class, SoundTest.class, SpriteBatchRotationTest.class, SpriteBatchShaderTest.class, SpriteBatchTest.class, SpriteCacheOffsetTest.class, SpriteCacheTest.class, StagePerformanceTest.class, StageTest.class, SuperKoalio.class, TableLayoutTest.class, TableTest.class, TextAreaTest.class, TextButtonTest.class, TextInputDialogTest.class, TextureAtlasTest.class, TextureDataTest.class, TextureDownloadTest.class, TextureFormatTest.class, TextureRegion3DTest.class, TideMapAssetManagerTest.class, TideMapDirectLoaderTest.class, TileTest.class, TiledMapAssetManagerTest.class, TiledMapAtlasAssetManagerTest.class, TiledMapDirectLoaderTest.class, TiledMapModifiedExternalTilesetTest.class, TiledMapObjectLoadingTest.class, TiledMapBench.class, TimerTest.class, TimeUtilsTest.class, TouchpadTest.class, TreeTest.class, UISimpleTest.class, UITest.class, Vector2dTest.class, VertexBufferObjectShaderTest.class, VibratorTest.class, ViewportTest1.class, ViewportTest2.class, ViewportTest3.class, YDownTest.class, FreeTypeFontLoaderTest.class, FreeTypeDisposeTest.class, FreeTypeIncrementalTest.class, FreeTypePackTest.class, FreeTypeTest.class, InternationalFontsTest.class, PngTest.class, JsonTest.class // @on // SoundTouchTest.class, Mpg123Test.class, WavTest.class, FreeTypeTest.class, // VorbisTest.class )); static final ObjectMap<String, String> obfuscatedToOriginal = new ObjectMap(); static final ObjectMap<String, String> originalToObfuscated = new ObjectMap(); static { InputStream mappingInput = GdxTests.class.getResourceAsStream("/mapping.txt"); if (mappingInput != null) { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(mappingInput), 512); while (true) { String line = reader.readLine(); if (line == null) break; if (line.startsWith(" ")) continue; String[] split = line.replace(":", "").split(" -> "); String original = split[0]; if (original.indexOf('.') != -1) original = original.substring(original.lastIndexOf('.') + 1); originalToObfuscated.put(original, split[1]); obfuscatedToOriginal.put(split[1], original); } reader.close(); } catch (Exception ex) { System.out.println("GdxTests: Error reading mapping file: mapping.txt"); ex.printStackTrace(); } finally { StreamUtils.closeQuietly(reader); } } } public static List<String> getNames () { List<String> names = new ArrayList<String>(tests.size()); for (Class clazz : tests) names.add(obfuscatedToOriginal.get(clazz.getSimpleName(), clazz.getSimpleName())); Collections.sort(names); return names; } private static Class<? extends GdxTest> forName (String name) { name = originalToObfuscated.get(name, name); for (Class clazz : tests) if (clazz.getSimpleName().equals(name)) return clazz; return null; } public static GdxTest newTest (String testName) { testName = originalToObfuscated.get(testName, testName); try { return forName(testName).newInstance(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.composite; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.plugins.tree.RootProvider; import org.apache.jackrabbit.oak.plugins.tree.TreeLocation; import org.apache.jackrabbit.oak.plugins.tree.TreeProvider; import org.apache.jackrabbit.oak.plugins.tree.TreeType; import org.apache.jackrabbit.oak.plugins.tree.TreeTypeProvider; import org.apache.jackrabbit.oak.security.authorization.composite.CompositeAuthorizationConfiguration.CompositionType; import org.apache.jackrabbit.oak.security.authorization.permission.PermissionUtil; import org.apache.jackrabbit.oak.spi.security.Context; import org.apache.jackrabbit.oak.spi.security.authorization.permission.AggregatedPermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.permission.RepositoryPermission; import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.Set; import java.util.function.Function; /** * Permission provider implementation that aggregates a list of different * provider implementations. Note, that the aggregated provider implementations * *must* implement the * {@link org.apache.jackrabbit.oak.spi.security.authorization.permission.AggregatedPermissionProvider} * interface. */ abstract class CompositePermissionProvider implements AggregatedPermissionProvider { private final Root root; private final AggregatedPermissionProvider[] pps; private final Context ctx; private final RootProvider rootProvider; private final TreeProvider treeProvider; private final RepositoryPermission repositoryPermission; private Root immutableRoot; private PrivilegeBitsProvider privilegeBitsProvider; private final TreeTypeProvider typeProvider; CompositePermissionProvider(@NotNull Root root, @NotNull List<AggregatedPermissionProvider> pps, @NotNull Context acContext, @NotNull RootProvider rootProvider, @NotNull TreeProvider treeProvider) { this.root = root; this.pps = pps.toArray(new AggregatedPermissionProvider[0]); this.ctx = acContext; this.rootProvider = rootProvider; this.treeProvider = treeProvider; repositoryPermission = createRepositoryPermission(); immutableRoot = rootProvider.createReadOnlyRoot(root); privilegeBitsProvider = new PrivilegeBitsProvider(immutableRoot); typeProvider = new TreeTypeProvider(ctx); } static CompositePermissionProvider create(@NotNull Root root, @NotNull List<AggregatedPermissionProvider> pps, @NotNull Context acContext, @NotNull CompositionType compositionType, @NotNull RootProvider rootProvider, @NotNull TreeProvider treeProvider) { if (compositionType == CompositionType.AND) { return new CompositePermissionProviderAnd(root, pps, acContext, rootProvider, treeProvider); } else { return new CompositePermissionProviderOr(root, pps, acContext, rootProvider, treeProvider); } } @NotNull abstract CompositionType getCompositeType(); @NotNull abstract RepositoryPermission createRepositoryPermission(); abstract boolean hasPrivileges(@Nullable Tree immutableTree, @NotNull PrivilegeBitsProvider bitsProvider, @NotNull PrivilegeBits privilegeBits, @NotNull PrivilegeBits coveredPrivileges); @NotNull Root getImmutableRoot() { return immutableRoot; } @NotNull PrivilegeBitsProvider getBitsProvider() { return privilegeBitsProvider; } @NotNull AggregatedPermissionProvider[] getPermissionProviders() { return pps; } //-------------------------------------------------< PermissionProvider >--- @Override public void refresh() { immutableRoot = rootProvider.createReadOnlyRoot(root); privilegeBitsProvider = new PrivilegeBitsProvider(immutableRoot); for (PermissionProvider pp : pps) { pp.refresh(); } } @Override public boolean hasPrivileges(@Nullable Tree tree, @NotNull String... privilegeNames) { Tree immutableTree = PermissionUtil.getReadOnlyTreeOrNull(tree, getImmutableRoot()); PrivilegeBits privilegeBits = privilegeBitsProvider.getBits(privilegeNames); if (privilegeBits.isEmpty()) { return true; } PrivilegeBits coveredPrivs = PrivilegeBits.getInstance(); boolean hasPrivileges = hasPrivileges(immutableTree, privilegeBitsProvider, privilegeBits, coveredPrivs); return hasPrivileges && coveredPrivs.includes(privilegeBits); } @NotNull @Override public RepositoryPermission getRepositoryPermission() { return repositoryPermission; } @NotNull @Override public TreePermission getTreePermission(@NotNull Tree tree, @NotNull TreePermission parentPermission) { Tree readOnlyTree = PermissionUtil.getReadOnlyTree(tree, immutableRoot); if (tree.isRoot()) { return CompositeTreePermission.create(readOnlyTree, treeProvider, typeProvider, pps, getCompositeType()); } else if (parentPermission instanceof CompositeTreePermission) { return CompositeTreePermission.create(readOnlyTree, treeProvider, ((CompositeTreePermission) parentPermission)); } else { return parentPermission.getChildPermission(readOnlyTree.getName(), treeProvider.asNodeState(readOnlyTree)); } } @Override public boolean isGranted(@NotNull String oakPath, @NotNull String jcrActions) { TreeLocation location = TreeLocation.create(immutableRoot, oakPath); boolean isAcContent = ctx.definesLocation(location); long permissions = Permissions.getPermissions(jcrActions, location, isAcContent); return isGranted(location, permissions); } //---------------------------------------< AggregatedPermissionProvider >--- @NotNull @Override public PrivilegeBits supportedPrivileges(@Nullable Tree tree, @Nullable PrivilegeBits privilegeBits) { PrivilegeBits result = PrivilegeBits.getInstance(); for (AggregatedPermissionProvider aggregatedPermissionProvider : pps) { PrivilegeBits supported = aggregatedPermissionProvider.supportedPrivileges(tree, privilegeBits); result.add(supported); } return result; } @Override public long supportedPermissions(@Nullable Tree tree, @Nullable PropertyState property, long permissions) { return supportedPermissions(aggregatedPermissionProvider -> aggregatedPermissionProvider.supportedPermissions(tree, property, permissions)); } @Override public long supportedPermissions(@NotNull TreeLocation location, long permissions) { return supportedPermissions(aggregatedPermissionProvider -> aggregatedPermissionProvider.supportedPermissions(location, permissions)); } @Override public long supportedPermissions(@NotNull TreePermission treePermission, @Nullable PropertyState property, long permissions) { return supportedPermissions(aggregatedPermissionProvider -> aggregatedPermissionProvider.supportedPermissions(treePermission, property, permissions)); } private long supportedPermissions(Function<AggregatedPermissionProvider, Long> supported) { long coveredPermissions = Permissions.NO_PERMISSION; for (AggregatedPermissionProvider aggregatedPermissionProvider : pps) { long supportedPermissions = supported.apply(aggregatedPermissionProvider); coveredPermissions |= supportedPermissions; } return coveredPermissions; } @NotNull @Override public TreePermission getTreePermission(@NotNull Tree tree, @NotNull TreeType type, @NotNull TreePermission parentPermission) { Tree immutableTree = PermissionUtil.getReadOnlyTree(tree, immutableRoot); if (tree.isRoot()) { return CompositeTreePermission.create(immutableTree, treeProvider, typeProvider, pps, getCompositeType()); } else if (parentPermission instanceof CompositeTreePermission) { return CompositeTreePermission.create(immutableTree, treeProvider, ((CompositeTreePermission) parentPermission), type); } else { return parentPermission.getChildPermission(immutableTree.getName(), treeProvider.asNodeState(immutableTree)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.component.dsl; import javax.annotation.Generated; import org.apache.camel.Component; import org.apache.camel.builder.component.AbstractComponentBuilder; import org.apache.camel.builder.component.ComponentBuilder; import org.apache.camel.component.aws2.sts.STS2Component; /** * Manage AWS STS cluster instances using AWS SDK version 2.x. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.ComponentDslMojo") public interface Aws2StsComponentBuilderFactory { /** * AWS Security Token Service (STS) (camel-aws2-sts) * Manage AWS STS cluster instances using AWS SDK version 2.x. * * Category: cloud,management * Since: 3.5 * Maven coordinates: org.apache.camel:camel-aws2-sts * * @return the dsl builder */ static Aws2StsComponentBuilder aws2Sts() { return new Aws2StsComponentBuilderImpl(); } /** * Builder for the AWS Security Token Service (STS) component. */ interface Aws2StsComponentBuilder extends ComponentBuilder<STS2Component> { /** * Component configuration. * * The option is a: * &lt;code&gt;org.apache.camel.component.aws2.sts.STS2Configuration&lt;/code&gt; type. * * Group: producer * * @param configuration the value to set * @return the dsl builder */ default Aws2StsComponentBuilder configuration( org.apache.camel.component.aws2.sts.STS2Configuration configuration) { doSetProperty("configuration", configuration); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default Aws2StsComponentBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * The operation to perform. * * The option is a: * &lt;code&gt;org.apache.camel.component.aws2.sts.STS2Operations&lt;/code&gt; type. * * Default: assumeRole * Group: producer * * @param operation the value to set * @return the dsl builder */ default Aws2StsComponentBuilder operation( org.apache.camel.component.aws2.sts.STS2Operations operation) { doSetProperty("operation", operation); return this; } /** * Set the need for overidding the endpoint. This option needs to be * used in combination with uriEndpointOverride option. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param overrideEndpoint the value to set * @return the dsl builder */ default Aws2StsComponentBuilder overrideEndpoint( boolean overrideEndpoint) { doSetProperty("overrideEndpoint", overrideEndpoint); return this; } /** * If we want to use a POJO request as body or not. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param pojoRequest the value to set * @return the dsl builder */ default Aws2StsComponentBuilder pojoRequest(boolean pojoRequest) { doSetProperty("pojoRequest", pojoRequest); return this; } /** * To define a proxy host when instantiating the STS client. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: producer * * @param proxyHost the value to set * @return the dsl builder */ default Aws2StsComponentBuilder proxyHost(java.lang.String proxyHost) { doSetProperty("proxyHost", proxyHost); return this; } /** * To define a proxy port when instantiating the STS client. * * The option is a: &lt;code&gt;java.lang.Integer&lt;/code&gt; type. * * Group: producer * * @param proxyPort the value to set * @return the dsl builder */ default Aws2StsComponentBuilder proxyPort(java.lang.Integer proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy protocol when instantiating the STS client. * * The option is a: * &lt;code&gt;software.amazon.awssdk.core.Protocol&lt;/code&gt; type. * * Default: HTTPS * Group: producer * * @param proxyProtocol the value to set * @return the dsl builder */ default Aws2StsComponentBuilder proxyProtocol( software.amazon.awssdk.core.Protocol proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * The region in which STS client needs to work. When using this * parameter, the configuration will expect the lowercase name of the * region (for example ap-east-1) You'll need to use the name * Region.EU_WEST_1.id(). * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Default: aws-global * Group: producer * * @param region the value to set * @return the dsl builder */ default Aws2StsComponentBuilder region(java.lang.String region) { doSetProperty("region", region); return this; } /** * To use a existing configured AWS STS as client. * * The option is a: * &lt;code&gt;software.amazon.awssdk.services.sts.StsClient&lt;/code&gt; type. * * Group: producer * * @param stsClient the value to set * @return the dsl builder */ default Aws2StsComponentBuilder stsClient( software.amazon.awssdk.services.sts.StsClient stsClient) { doSetProperty("stsClient", stsClient); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param trustAllCertificates the value to set * @return the dsl builder */ default Aws2StsComponentBuilder trustAllCertificates( boolean trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * Set the overriding uri endpoint. This option needs to be used in * combination with overrideEndpoint option. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: producer * * @param uriEndpointOverride the value to set * @return the dsl builder */ default Aws2StsComponentBuilder uriEndpointOverride( java.lang.String uriEndpointOverride) { doSetProperty("uriEndpointOverride", uriEndpointOverride); return this; } /** * Set whether the STS client should expect to load credentials through * a default credentials provider or to expect static credentials to be * passed in. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param useDefaultCredentialsProvider the value to set * @return the dsl builder */ default Aws2StsComponentBuilder useDefaultCredentialsProvider( boolean useDefaultCredentialsProvider) { doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider); return this; } /** * Whether autowiring is enabled. This is used for automatic autowiring * options (the option must be marked as autowired) by looking up in the * registry to find if there is a single instance of matching type, * which then gets configured on the component. This can be used for * automatic configuring JDBC data sources, JMS connection factories, * AWS Clients, etc. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: advanced * * @param autowiredEnabled the value to set * @return the dsl builder */ default Aws2StsComponentBuilder autowiredEnabled( boolean autowiredEnabled) { doSetProperty("autowiredEnabled", autowiredEnabled); return this; } /** * Amazon AWS Access Key. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: security * * @param accessKey the value to set * @return the dsl builder */ default Aws2StsComponentBuilder accessKey(java.lang.String accessKey) { doSetProperty("accessKey", accessKey); return this; } /** * Amazon AWS Secret Key. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: security * * @param secretKey the value to set * @return the dsl builder */ default Aws2StsComponentBuilder secretKey(java.lang.String secretKey) { doSetProperty("secretKey", secretKey); return this; } } class Aws2StsComponentBuilderImpl extends AbstractComponentBuilder<STS2Component> implements Aws2StsComponentBuilder { @Override protected STS2Component buildConcreteComponent() { return new STS2Component(); } private org.apache.camel.component.aws2.sts.STS2Configuration getOrCreateConfiguration( org.apache.camel.component.aws2.sts.STS2Component component) { if (component.getConfiguration() == null) { component.setConfiguration(new org.apache.camel.component.aws2.sts.STS2Configuration()); } return component.getConfiguration(); } @Override protected boolean setPropertyOnComponent( Component component, String name, Object value) { switch (name) { case "configuration": ((STS2Component) component).setConfiguration((org.apache.camel.component.aws2.sts.STS2Configuration) value); return true; case "lazyStartProducer": ((STS2Component) component).setLazyStartProducer((boolean) value); return true; case "operation": getOrCreateConfiguration((STS2Component) component).setOperation((org.apache.camel.component.aws2.sts.STS2Operations) value); return true; case "overrideEndpoint": getOrCreateConfiguration((STS2Component) component).setOverrideEndpoint((boolean) value); return true; case "pojoRequest": getOrCreateConfiguration((STS2Component) component).setPojoRequest((boolean) value); return true; case "proxyHost": getOrCreateConfiguration((STS2Component) component).setProxyHost((java.lang.String) value); return true; case "proxyPort": getOrCreateConfiguration((STS2Component) component).setProxyPort((java.lang.Integer) value); return true; case "proxyProtocol": getOrCreateConfiguration((STS2Component) component).setProxyProtocol((software.amazon.awssdk.core.Protocol) value); return true; case "region": getOrCreateConfiguration((STS2Component) component).setRegion((java.lang.String) value); return true; case "stsClient": getOrCreateConfiguration((STS2Component) component).setStsClient((software.amazon.awssdk.services.sts.StsClient) value); return true; case "trustAllCertificates": getOrCreateConfiguration((STS2Component) component).setTrustAllCertificates((boolean) value); return true; case "uriEndpointOverride": getOrCreateConfiguration((STS2Component) component).setUriEndpointOverride((java.lang.String) value); return true; case "useDefaultCredentialsProvider": getOrCreateConfiguration((STS2Component) component).setUseDefaultCredentialsProvider((boolean) value); return true; case "autowiredEnabled": ((STS2Component) component).setAutowiredEnabled((boolean) value); return true; case "accessKey": getOrCreateConfiguration((STS2Component) component).setAccessKey((java.lang.String) value); return true; case "secretKey": getOrCreateConfiguration((STS2Component) component).setSecretKey((java.lang.String) value); return true; default: return false; } } } }
/* * Copyright (C) 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.example.games.basegameutils; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Vector; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.Intent; import android.content.IntentSender.SendIntentException; import android.content.pm.PackageManager; import android.content.pm.Signature; import android.content.res.Resources; import android.os.Bundle; import android.util.Log; import android.view.Gravity; import com.google.android.gms.appstate.AppStateClient; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesClient; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.common.Scopes; import com.google.android.gms.games.GamesActivityResultCodes; import com.google.android.gms.games.GamesClient; import com.google.android.gms.games.multiplayer.Invitation; import com.google.android.gms.plus.PlusClient; import com.primalpond.hunt.R; public class GameHelper implements GooglePlayServicesClient.ConnectionCallbacks, GooglePlayServicesClient.OnConnectionFailedListener { /** Listener for sign-in success or failure events. */ public interface GameHelperListener { /** * Called when sign-in fails. As a result, a "Sign-In" button can be * shown to the user; when that button is clicked, call * @link{GamesHelper#beginUserInitiatedSignIn}. Note that not all calls to this * method mean an error; it may be a result of the fact that automatic * sign-in could not proceed because user interaction was required * (consent dialogs). So implementations of this method should NOT * display an error message unless a call to @link{GamesHelper#hasSignInError} * indicates that an error indeed occurred. */ void onSignInFailed(); /** Called when sign-in succeeds. */ void onSignInSucceeded(); } // States we can be in public static final int STATE_UNCONFIGURED = 0; public static final int STATE_DISCONNECTED = 1; public static final int STATE_CONNECTING = 2; public static final int STATE_CONNECTED = 3; // State names (for debug logging, etc) public static final String[] STATE_NAMES = { "UNCONFIGURED", "DISCONNECTED", "CONNECTING", "CONNECTED" }; // State we are in right now int mState = STATE_UNCONFIGURED; // Are we expecting the result of a resolution flow? boolean mExpectingResolution = false; /** * The Activity we are bound to. We need to keep a reference to the Activity * because some games methods require an Activity (a Context won't do). We * are careful not to leak these references: we release them on onStop(). */ Activity mActivity = null; // OAuth scopes required for the clients. Initialized in setup(). String mScopes[]; // Request code we use when invoking other Activities to complete the // sign-in flow. final static int RC_RESOLVE = 9001; // Request code when invoking Activities whose result we don't care about. final static int RC_UNUSED = 9002; // Client objects we manage. If a given client is not enabled, it is null. GamesClient mGamesClient = null; PlusClient mPlusClient = null; AppStateClient mAppStateClient = null; // What clients we manage (OR-able values, can be combined as flags) public final static int CLIENT_NONE = 0x00; public final static int CLIENT_GAMES = 0x01; public final static int CLIENT_PLUS = 0x02; public final static int CLIENT_APPSTATE = 0x04; public final static int CLIENT_ALL = CLIENT_GAMES | CLIENT_PLUS | CLIENT_APPSTATE; // What clients were requested? (bit flags) int mRequestedClients = CLIENT_NONE; // What clients are currently connected? (bit flags) int mConnectedClients = CLIENT_NONE; // What client are we currently connecting? int mClientCurrentlyConnecting = CLIENT_NONE; // Whether to automatically try to sign in on onStart(). boolean mAutoSignIn = true; /* * Whether user has specifically requested that the sign-in process begin. * If mUserInitiatedSignIn is false, we're in the automatic sign-in attempt * that we try once the Activity is started -- if true, then the user has * already clicked a "Sign-In" button or something similar */ boolean mUserInitiatedSignIn = false; // The connection result we got from our last attempt to sign-in. ConnectionResult mConnectionResult = null; // The error that happened during sign-in. SignInFailureReason mSignInFailureReason = null; // Print debug logs? boolean mDebugLog = true; String mDebugTag = "GameHelper"; /* * If we got an invitation id when we connected to the games client, it's * here. Otherwise, it's null. */ String mInvitationId; // Listener GameHelperListener mListener = null; /** * Construct a GameHelper object, initially tied to the given Activity. * After constructing this object, call @link{setup} from the onCreate() * method of your Activity. */ public GameHelper(Activity activity) { mActivity = activity; } static private final int TYPE_DEVELOPER_ERROR = 1001; static private final int TYPE_GAMEHELPER_BUG = 1002; boolean checkState(int type, String operation, String warning, int... expectedStates) { for (int expectedState : expectedStates) { if (mState == expectedState) { return true; } } StringBuilder sb = new StringBuilder(); if (type == TYPE_DEVELOPER_ERROR) { sb.append("GameHelper: you attempted an operation at an invalid. "); } else { sb.append("GameHelper: bug detected. Please report it at our bug tracker "); sb.append("https://github.com/playgameservices/android-samples/issues. "); sb.append("Please include the last couple hundred lines of logcat output "); sb.append("and describe the operation that caused this. "); } sb.append("Explanation: ").append(warning); sb.append("Operation: ").append(operation).append(". "); sb.append("State: ").append(STATE_NAMES[mState]).append(". "); if (expectedStates.length == 1) { sb.append("Expected state: ").append(STATE_NAMES[expectedStates[0]]).append("."); } else { sb.append("Expected states:"); for (int expectedState : expectedStates) { sb.append(" " ).append(STATE_NAMES[expectedState]); } sb.append("."); } logWarn(sb.toString()); return false; } void assertConfigured(String operation) { if (mState == STATE_UNCONFIGURED) { String error = "GameHelper error: Operation attempted without setup: " + operation + ". The setup() method must be called before attempting any other operation."; logError(error); throw new IllegalStateException(error); } } /** * Same as calling @link{setup(GameHelperListener, int)} requesting only the * CLIENT_GAMES client. */ public void setup(GameHelperListener listener) { setup(listener, CLIENT_GAMES); } /** * Performs setup on this GameHelper object. Call this from the onCreate() * method of your Activity. This will create the clients and do a few other * initialization tasks. Next, call @link{#onStart} from the onStart() * method of your Activity. * * @param listener The listener to be notified of sign-in events. * @param clientsToUse The clients to use. Use a combination of * CLIENT_GAMES, CLIENT_PLUS and CLIENT_APPSTATE, or CLIENT_ALL * to request all clients. * @param additionalScopes Any scopes to be used that are outside of the ones defined * in the Scopes class. * I.E. for YouTube uploads one would add * "https://www.googleapis.com/auth/youtube.upload" */ public void setup(GameHelperListener listener, int clientsToUse, String ... additionalScopes) { if (mState != STATE_UNCONFIGURED) { String error = "GameHelper: you called GameHelper.setup() twice. You can only call " + "it once."; logError(error); throw new IllegalStateException(error); } mListener = listener; mRequestedClients = clientsToUse; debugLog("Setup: requested clients: " + mRequestedClients); Vector<String> scopesVector = new Vector<String>(); if (0 != (clientsToUse & CLIENT_GAMES)) { scopesVector.add(Scopes.GAMES); } if (0 != (clientsToUse & CLIENT_PLUS)) { scopesVector.add(Scopes.PLUS_LOGIN); } if (0 != (clientsToUse & CLIENT_APPSTATE)) { scopesVector.add(Scopes.APP_STATE); } if (null != additionalScopes) { for (String scope : additionalScopes) { scopesVector.add(scope); } } mScopes = new String[scopesVector.size()]; scopesVector.copyInto(mScopes); debugLog("setup: scopes:"); for (String scope : mScopes) { debugLog(" - " + scope); } if (0 != (clientsToUse & CLIENT_GAMES)) { debugLog("setup: creating GamesClient"); mGamesClient = new GamesClient.Builder(getContext(), this, this) .setGravityForPopups(Gravity.TOP | Gravity.CENTER_HORIZONTAL) .setScopes(mScopes) .create(); } if (0 != (clientsToUse & CLIENT_PLUS)) { debugLog("setup: creating GamesPlusClient"); mPlusClient = new PlusClient.Builder(getContext(), this, this) .setScopes(mScopes) .build(); } if (0 != (clientsToUse & CLIENT_APPSTATE)) { debugLog("setup: creating AppStateClient"); mAppStateClient = new AppStateClient.Builder(getContext(), this, this) .setScopes(mScopes) .create(); } setState(STATE_DISCONNECTED); } void setState(int newState) { String oldStateName = STATE_NAMES[mState]; String newStateName = STATE_NAMES[newState]; mState = newState; debugLog("State change " + oldStateName + " -> " + newStateName); } /** * Returns the GamesClient object. In order to call this method, you must have * called @link{setup} with a set of clients that includes CLIENT_GAMES. */ public GamesClient getGamesClient() { if (mGamesClient == null) { throw new IllegalStateException("No GamesClient. Did you request it at setup?"); } return mGamesClient; } /** * Returns the AppStateClient object. In order to call this method, you must have * called @link{#setup} with a set of clients that includes CLIENT_APPSTATE. */ public AppStateClient getAppStateClient() { if (mAppStateClient == null) { throw new IllegalStateException("No AppStateClient. Did you request it at setup?"); } return mAppStateClient; } /** * Returns the PlusClient object. In order to call this method, you must have * called @link{#setup} with a set of clients that includes CLIENT_PLUS. */ public PlusClient getPlusClient() { if (mPlusClient == null) { throw new IllegalStateException("No PlusClient. Did you request it at setup?"); } return mPlusClient; } /** Returns whether or not the user is signed in. */ public boolean isSignedIn() { return mState == STATE_CONNECTED; } /** * Returns whether or not there was a (non-recoverable) error during the * sign-in process. */ public boolean hasSignInError() { return mSignInFailureReason != null; } /** * Returns the error that happened during the sign-in process, null if no * error occurred. */ public SignInFailureReason getSignInError() { return mSignInFailureReason; } /** Call this method from your Activity's onStart(). */ public void onStart(Activity act) { mActivity = act; debugLog("onStart, state = " + STATE_NAMES[mState]); assertConfigured("onStart"); switch (mState) { case STATE_DISCONNECTED: // we are not connected, so attempt to connect if (mAutoSignIn) { debugLog("onStart: Now connecting clients."); startConnections(); } else { debugLog("onStart: Not connecting (user specifically signed out)."); } break; case STATE_CONNECTING: // connection process is in progress; no action required debugLog("onStart: connection process in progress, no action taken."); break; case STATE_CONNECTED: // already connected (for some strange reason). No complaints :-) debugLog("onStart: already connected (unusual, but ok)."); break; default: String msg = "onStart: BUG: unexpected state " + STATE_NAMES[mState]; logError(msg); throw new IllegalStateException(msg); } } /** Call this method from your Activity's onStop(). */ public void onStop() { debugLog("onStop, state = " + STATE_NAMES[mState]); assertConfigured("onStop"); switch (mState) { case STATE_CONNECTED: case STATE_CONNECTING: // kill connections debugLog("onStop: Killing connections"); killConnections(); break; case STATE_DISCONNECTED: debugLog("onStop: not connected, so no action taken."); break; default: String msg = "onStop: BUG: unexpected state " + STATE_NAMES[mState]; logError(msg); throw new IllegalStateException(msg); } // let go of the Activity reference mActivity = null; } /** Convenience method to show an alert dialog. */ public void showAlert(String title, String message) { (new AlertDialog.Builder(getContext())).setTitle(title).setMessage(message) .setNeutralButton(android.R.string.ok, null).create().show(); } /** Convenience method to show an alert dialog. */ public void showAlert(String message) { (new AlertDialog.Builder(getContext())).setMessage(message) .setNeutralButton(android.R.string.ok, null).create().show(); } /** * Returns the invitation ID received through an invitation notification. * This should be called from your GameHelperListener's * * @link{GameHelperListener#onSignInSucceeded} method, to check if there's an * invitation available. In that case, accept the invitation. * @return The id of the invitation, or null if none was received. */ public String getInvitationId() { if (!checkState(TYPE_DEVELOPER_ERROR, "getInvitationId", "Invitation ID is only available when connected " + "(after getting the onSignInSucceeded callback).", STATE_CONNECTED)) { return null; } return mInvitationId; } /** Enables debug logging */ public void enableDebugLog(boolean enabled, String tag) { mDebugLog = enabled; mDebugTag = tag; if (enabled) { debugLog("Debug log enabled, tag: " + tag); } } /** * Returns the current requested scopes. This is not valid until setup() has * been called. * * @return the requested scopes, including the oauth2: prefix */ public String getScopes() { StringBuilder scopeStringBuilder = new StringBuilder(); if (null != mScopes) { for (String scope: mScopes) { addToScope(scopeStringBuilder, scope); } } return scopeStringBuilder.toString(); } /** * Returns an array of the current requested scopes. This is not valid until * setup() has been called * * @return the requested scopes, including the oauth2: prefix */ public String[] getScopesArray() { return mScopes; } /** Sign out and disconnect from the APIs. */ public void signOut() { if (mState == STATE_DISCONNECTED) { // nothing to do debugLog("signOut: state was already DISCONNECTED, ignoring."); return; } // for the PlusClient, "signing out" means clearing the default account and // then disconnecting if (mPlusClient != null && mPlusClient.isConnected()) { debugLog("Clearing default account on PlusClient."); mPlusClient.clearDefaultAccount(); } // For the games client, signing out means calling signOut and disconnecting if (mGamesClient != null && mGamesClient.isConnected()) { debugLog("Signing out from GamesClient."); mGamesClient.signOut(); } // Ready to disconnect debugLog("Proceeding with disconnection."); killConnections(); } void killConnections() { if (!checkState(TYPE_GAMEHELPER_BUG, "killConnections", "killConnections() should only " + "get called while connected or connecting.", STATE_CONNECTED, STATE_CONNECTING)) { return; } debugLog("killConnections: killing connections."); mConnectionResult = null; mSignInFailureReason = null; if (mGamesClient != null && mGamesClient.isConnected()) { debugLog("Disconnecting GamesClient."); mGamesClient.disconnect(); } if (mPlusClient != null && mPlusClient.isConnected()) { debugLog("Disconnecting PlusClient."); mPlusClient.disconnect(); } if (mAppStateClient != null && mAppStateClient.isConnected()) { debugLog("Disconnecting AppStateClient."); mAppStateClient.disconnect(); } mConnectedClients = CLIENT_NONE; debugLog("killConnections: all clients disconnected."); setState(STATE_DISCONNECTED); } static String activityResponseCodeToString(int respCode) { switch (respCode) { case Activity.RESULT_OK: return "RESULT_OK"; case Activity.RESULT_CANCELED: return "RESULT_CANCELED"; case GamesActivityResultCodes.RESULT_APP_MISCONFIGURED: return "RESULT_APP_MISCONFIGURED"; case GamesActivityResultCodes.RESULT_LEFT_ROOM: return "RESULT_LEFT_ROOM"; case GamesActivityResultCodes.RESULT_LICENSE_FAILED: return "RESULT_LICENSE_FAILED"; case GamesActivityResultCodes.RESULT_RECONNECT_REQUIRED: return "RESULT_RECONNECT_REQUIRED"; case GamesActivityResultCodes.RESULT_SIGN_IN_FAILED: return "SIGN_IN_FAILED"; default: return String.valueOf(respCode); } } /** * Handle activity result. Call this method from your Activity's * onActivityResult callback. If the activity result pertains to the sign-in * process, processes it appropriately. */ public void onActivityResult(int requestCode, int responseCode, Intent intent) { debugLog("onActivityResult: req=" + (requestCode == RC_RESOLVE ? "RC_RESOLVE" : String.valueOf(requestCode)) + ", resp=" + activityResponseCodeToString(responseCode)); if (requestCode != RC_RESOLVE) { debugLog("onActivityResult: request code not meant for us. Ignoring."); return; } // no longer expecting a resolution mExpectingResolution = false; if (mState != STATE_CONNECTING) { debugLog("onActivityResult: ignoring because state isn't STATE_CONNECTING (" + "it's " + STATE_NAMES[mState] + ")"); return; } // We're coming back from an activity that was launched to resolve a // connection problem. For example, the sign-in UI. if (responseCode == Activity.RESULT_OK) { // Ready to try to connect again. debugLog("onAR: Resolution was RESULT_OK, so connecting current client again."); connectCurrentClient(); } else if (responseCode == GamesActivityResultCodes.RESULT_RECONNECT_REQUIRED) { debugLog("onAR: Resolution was RECONNECT_REQUIRED, so reconnecting."); connectCurrentClient(); } else if (responseCode == Activity.RESULT_CANCELED) { // User cancelled. debugLog("onAR: Got a cancellation result, so disconnecting."); mAutoSignIn = false; mUserInitiatedSignIn = false; mSignInFailureReason = null; // cancelling is not a failure! killConnections(); notifyListener(false); } else { // Whatever the problem we were trying to solve, it was not // solved. So give up and show an error message. debugLog("onAR: responseCode=" + activityResponseCodeToString(responseCode) + ", so giving up."); giveUp(new SignInFailureReason(mConnectionResult.getErrorCode(), responseCode)); } } void notifyListener(boolean success) { debugLog("Notifying LISTENER of sign-in " + (success ? "SUCCESS" : mSignInFailureReason != null ? "FAILURE (error)" : "FAILURE (no error)")); if (mListener != null) { if (success) { mListener.onSignInSucceeded(); } else { mListener.onSignInFailed(); } } } /** * Starts a user-initiated sign-in flow. This should be called when the user * clicks on a "Sign In" button. As a result, authentication/consent dialogs * may show up. At the end of the process, the GameHelperListener's * onSignInSucceeded() or onSignInFailed() methods will be called. */ public void beginUserInitiatedSignIn() { if (mState == STATE_CONNECTED) { // nothing to do logWarn("beginUserInitiatedSignIn() called when already connected. " + "Calling listener directly to notify of success."); notifyListener(true); return; } else if (mState == STATE_CONNECTING) { logWarn("beginUserInitiatedSignIn() called when already connecting. " + "Be patient! You can only call this method after you get an " + "onSignInSucceeded() or onSignInFailed() callback. Suggestion: disable " + "the sign-in button on startup and also when it's clicked, and re-enable " + "when you get the callback."); // ignore call (listener will get a callback when the connection process finishes) return; } debugLog("Starting USER-INITIATED sign-in flow."); // sign in automatically on onStart() mAutoSignIn = true; // Is Google Play services available? int result = GooglePlayServicesUtil.isGooglePlayServicesAvailable(getContext()); debugLog("isGooglePlayServicesAvailable returned " + result); if (result != ConnectionResult.SUCCESS) { // Google Play services is not available. debugLog("Google Play services not available. Show error dialog."); mSignInFailureReason = new SignInFailureReason(result, 0); showFailureDialog(); notifyListener(false); return; } // indicate that user is actively trying to sign in (so we know to resolve // connection problems by showing dialogs) mUserInitiatedSignIn = true; if (mConnectionResult != null) { // We have a pending connection result from a previous failure, so // start with that. debugLog("beginUserInitiatedSignIn: continuing pending sign-in flow."); setState(STATE_CONNECTING); resolveConnectionResult(); } else { // We don't have a pending connection result, so start anew. debugLog("beginUserInitiatedSignIn: starting new sign-in flow."); startConnections(); } } Context getContext() { return mActivity; } void addToScope(StringBuilder scopeStringBuilder, String scope) { if (scopeStringBuilder.length() == 0) { scopeStringBuilder.append("oauth2:"); } else { scopeStringBuilder.append(" "); } scopeStringBuilder.append(scope); } void startConnections() { if (!checkState(TYPE_GAMEHELPER_BUG, "startConnections", "startConnections should " + "only get called when disconnected.", STATE_DISCONNECTED)) { return; } debugLog("Starting connections."); setState(STATE_CONNECTING); mInvitationId = null; connectNextClient(); } void connectNextClient() { // do we already have all the clients we need? debugLog("connectNextClient: requested clients: " + mRequestedClients + ", connected clients: " + mConnectedClients); // failsafe, in case we somehow lost track of what clients are connected or not. if (mGamesClient != null && mGamesClient.isConnected() && (0 == (mConnectedClients & CLIENT_GAMES))) { logWarn("GamesClient was already connected. Fixing."); mConnectedClients |= CLIENT_GAMES; } if (mPlusClient != null && mPlusClient.isConnected() && (0 == (mConnectedClients & CLIENT_PLUS))) { logWarn("PlusClient was already connected. Fixing."); mConnectedClients |= CLIENT_PLUS; } if (mAppStateClient != null && mAppStateClient.isConnected() && (0 == (mConnectedClients & CLIENT_APPSTATE))) { logWarn("AppStateClient was already connected. Fixing"); mConnectedClients |= CLIENT_APPSTATE; } int pendingClients = mRequestedClients & ~mConnectedClients; debugLog("Pending clients: " + pendingClients); if (pendingClients == 0) { debugLog("All clients now connected. Sign-in successful!"); succeedSignIn(); return; } // which client should be the next one to connect? if (mGamesClient != null && (0 != (pendingClients & CLIENT_GAMES))) { debugLog("Connecting GamesClient."); mClientCurrentlyConnecting = CLIENT_GAMES; } else if (mPlusClient != null && (0 != (pendingClients & CLIENT_PLUS))) { debugLog("Connecting PlusClient."); mClientCurrentlyConnecting = CLIENT_PLUS; } else if (mAppStateClient != null && (0 != (pendingClients & CLIENT_APPSTATE))) { debugLog("Connecting AppStateClient."); mClientCurrentlyConnecting = CLIENT_APPSTATE; } else { // hmmm, getting here would be a bug. throw new AssertionError("Not all clients connected, yet no one is next. R=" + mRequestedClients + ", C=" + mConnectedClients); } connectCurrentClient(); } void connectCurrentClient() { if (mState == STATE_DISCONNECTED) { // we got disconnected during the connection process, so abort logWarn("GameHelper got disconnected during connection process. Aborting."); return; } if (!checkState(TYPE_GAMEHELPER_BUG, "connectCurrentClient", "connectCurrentClient " + "should only get called when connecting.", STATE_CONNECTING)) { return; } switch (mClientCurrentlyConnecting) { case CLIENT_GAMES: mGamesClient.connect(); break; case CLIENT_APPSTATE: mAppStateClient.connect(); break; case CLIENT_PLUS: mPlusClient.connect(); break; } } /** * Disconnects the indicated clients, then connects them again. * @param whatClients Indicates which clients to reconnect. */ public void reconnectClients(int whatClients) { checkState(TYPE_DEVELOPER_ERROR, "reconnectClients", "reconnectClients should " + "only be called when connected. Proceeding anyway.", STATE_CONNECTED); boolean actuallyReconnecting = false; if ((whatClients & CLIENT_GAMES) != 0 && mGamesClient != null && mGamesClient.isConnected()) { debugLog("Reconnecting GamesClient."); actuallyReconnecting = true; mConnectedClients &= ~CLIENT_GAMES; mGamesClient.reconnect(); } if ((whatClients & CLIENT_APPSTATE) != 0 && mAppStateClient != null && mAppStateClient.isConnected()) { debugLog("Reconnecting AppStateClient."); actuallyReconnecting = true; mConnectedClients &= ~CLIENT_APPSTATE; mAppStateClient.reconnect(); } if ((whatClients & CLIENT_PLUS) != 0 && mPlusClient != null && mPlusClient.isConnected()) { // PlusClient doesn't need reconnections. logWarn("GameHelper is ignoring your request to reconnect " + "PlusClient because this is unnecessary."); } if (actuallyReconnecting) { setState(STATE_CONNECTING); } else { // No reconnections are to take place, so for consistency we call the listener // as if sign in had just succeeded. debugLog("No reconnections needed, so behaving as if sign in just succeeded"); notifyListener(true); } } /** Called when we successfully obtain a connection to a client. */ public void onConnected(Bundle connectionHint) { debugLog("onConnected: connected! client=" + mClientCurrentlyConnecting); // Mark the current client as connected mConnectedClients |= mClientCurrentlyConnecting; debugLog("Connected clients updated to: " + mConnectedClients); // If this was the games client and it came with an invite, store it for // later retrieval. if (mClientCurrentlyConnecting == CLIENT_GAMES && connectionHint != null) { debugLog("onConnected: connection hint provided. Checking for invite."); Invitation inv = connectionHint.getParcelable(GamesClient.EXTRA_INVITATION); if (inv != null && inv.getInvitationId() != null) { // accept invitation debugLog("onConnected: connection hint has a room invite!"); mInvitationId = inv.getInvitationId(); debugLog("Invitation ID: " + mInvitationId); } } // connect the next client in line, if any. connectNextClient(); } void succeedSignIn() { checkState(TYPE_GAMEHELPER_BUG, "succeedSignIn", "succeedSignIn should only " + "get called in the connecting or connected state. Proceeding anyway.", STATE_CONNECTING, STATE_CONNECTED); debugLog("All requested clients connected. Sign-in succeeded!"); setState(STATE_CONNECTED); mSignInFailureReason = null; mAutoSignIn = true; mUserInitiatedSignIn = false; notifyListener(true); } /** Handles a connection failure reported by a client. */ public void onConnectionFailed(ConnectionResult result) { // save connection result for later reference debugLog("onConnectionFailed"); mConnectionResult = result; debugLog("Connection failure:"); debugLog(" - code: " + errorCodeToString(mConnectionResult.getErrorCode())); debugLog(" - resolvable: " + mConnectionResult.hasResolution()); debugLog(" - details: " + mConnectionResult.toString()); if (!mUserInitiatedSignIn) { // If the user didn't initiate the sign-in, we don't try to resolve // the connection problem automatically -- instead, we fail and wait // for the user to want to sign in. That way, they won't get an // authentication (or other) popup unless they are actively trying // to // sign in. debugLog("onConnectionFailed: since user didn't initiate sign-in, failing now."); mConnectionResult = result; setState(STATE_DISCONNECTED); notifyListener(false); return; } debugLog("onConnectionFailed: since user initiated sign-in, resolving problem."); // Resolve the connection result. This usually means showing a dialog or // starting an Activity that will allow the user to give the appropriate // consents so that sign-in can be successful. resolveConnectionResult(); } /** * Attempts to resolve a connection failure. This will usually involve * starting a UI flow that lets the user give the appropriate consents * necessary for sign-in to work. */ void resolveConnectionResult() { // Try to resolve the problem checkState(TYPE_GAMEHELPER_BUG, "resolveConnectionResult", "resolveConnectionResult should only be called when connecting. Proceeding anyway.", STATE_CONNECTING); if (mExpectingResolution) { debugLog("We're already expecting the result of a previous resolution."); return; } debugLog("resolveConnectionResult: trying to resolve result: " + mConnectionResult); if (mConnectionResult.hasResolution()) { // This problem can be fixed. So let's try to fix it. debugLog("Result has resolution. Starting it."); try { // launch appropriate UI flow (which might, for example, be the // sign-in flow) mExpectingResolution = true; mConnectionResult.startResolutionForResult(mActivity, RC_RESOLVE); } catch (SendIntentException e) { // Try connecting again debugLog("SendIntentException, so connecting again."); connectCurrentClient(); } } else { // It's not a problem what we can solve, so give up and show an // error. debugLog("resolveConnectionResult: result has no resolution. Giving up."); giveUp(new SignInFailureReason(mConnectionResult.getErrorCode())); } } /** * Give up on signing in due to an error. Shows the appropriate error * message to the user, using a standard error dialog as appropriate to the * cause of the error. That dialog will indicate to the user how the problem * can be solved (for example, re-enable Google Play Services, upgrade to a * new version, etc). */ void giveUp(SignInFailureReason reason) { checkState(TYPE_GAMEHELPER_BUG, "giveUp", "giveUp should only be called when " + "connecting. Proceeding anyway.", STATE_CONNECTING); mAutoSignIn = false; killConnections(); mSignInFailureReason = reason; showFailureDialog(); notifyListener(false); } /** Called when we are disconnected from a client. */ public void onDisconnected() { debugLog("onDisconnected."); if (mState == STATE_DISCONNECTED) { // This is expected. debugLog("onDisconnected is expected, so no action taken."); return; } // Unexpected disconnect (rare!) logWarn("Unexpectedly disconnected. Severing remaining connections."); // kill the other connections too, and revert to DISCONNECTED state. killConnections(); mSignInFailureReason = null; // call the sign in failure callback debugLog("Making extraordinary call to onSignInFailed callback"); notifyListener(false); } /** Shows an error dialog that's appropriate for the failure reason. */ void showFailureDialog() { Context ctx = getContext(); if (ctx == null) { debugLog("*** No context. Can't show failure dialog."); return; } debugLog("Making error dialog for failure: " + mSignInFailureReason); Dialog errorDialog = null; int errorCode = mSignInFailureReason.getServiceErrorCode(); int actResp = mSignInFailureReason.getActivityResultCode(); switch (actResp) { case GamesActivityResultCodes.RESULT_APP_MISCONFIGURED: errorDialog = makeSimpleDialog(ctx.getString( R.string.gamehelper_app_misconfigured)); printMisconfiguredDebugInfo(); break; case GamesActivityResultCodes.RESULT_SIGN_IN_FAILED: errorDialog = makeSimpleDialog(ctx.getString( R.string.gamehelper_sign_in_failed)); break; case GamesActivityResultCodes.RESULT_LICENSE_FAILED: errorDialog = makeSimpleDialog(ctx.getString( R.string.gamehelper_license_failed)); break; default: // No meaningful Activity response code, so generate default Google // Play services dialog errorDialog = GooglePlayServicesUtil.getErrorDialog(errorCode, mActivity, RC_UNUSED, null); if (errorDialog == null) { // get fallback dialog debugLog("No standard error dialog available. Making fallback dialog."); errorDialog = makeSimpleDialog(ctx.getString(R.string.gamehelper_unknown_error) + " " + errorCodeToString(errorCode)); } } debugLog("Showing error dialog."); errorDialog.show(); } Dialog makeSimpleDialog(String text) { return (new AlertDialog.Builder(getContext())).setMessage(text) .setNeutralButton(android.R.string.ok, null).create(); } void debugLog(String message) { if (mDebugLog) { Log.d(mDebugTag, "GameHelper: " + message); } } void logWarn(String message) { Log.w(mDebugTag, "!!! GameHelper WARNING: " + message); } void logError(String message) { Log.e(mDebugTag, "*** GameHelper ERROR: " + message); } static String errorCodeToString(int errorCode) { switch (errorCode) { case ConnectionResult.DEVELOPER_ERROR: return "DEVELOPER_ERROR(" + errorCode + ")"; case ConnectionResult.INTERNAL_ERROR: return "INTERNAL_ERROR(" + errorCode + ")"; case ConnectionResult.INVALID_ACCOUNT: return "INVALID_ACCOUNT(" + errorCode + ")"; case ConnectionResult.LICENSE_CHECK_FAILED: return "LICENSE_CHECK_FAILED(" + errorCode + ")"; case ConnectionResult.NETWORK_ERROR: return "NETWORK_ERROR(" + errorCode + ")"; case ConnectionResult.RESOLUTION_REQUIRED: return "RESOLUTION_REQUIRED(" + errorCode + ")"; case ConnectionResult.SERVICE_DISABLED: return "SERVICE_DISABLED(" + errorCode + ")"; case ConnectionResult.SERVICE_INVALID: return "SERVICE_INVALID(" + errorCode + ")"; case ConnectionResult.SERVICE_MISSING: return "SERVICE_MISSING(" + errorCode + ")"; case ConnectionResult.SERVICE_VERSION_UPDATE_REQUIRED: return "SERVICE_VERSION_UPDATE_REQUIRED(" + errorCode + ")"; case ConnectionResult.SIGN_IN_REQUIRED: return "SIGN_IN_REQUIRED(" + errorCode + ")"; case ConnectionResult.SUCCESS: return "SUCCESS(" + errorCode + ")"; default: return "Unknown error code " + errorCode; } } // Represents the reason for a sign-in failure public static class SignInFailureReason { public static final int NO_ACTIVITY_RESULT_CODE = -100; int mServiceErrorCode = 0; int mActivityResultCode = NO_ACTIVITY_RESULT_CODE; public int getServiceErrorCode() { return mServiceErrorCode; } public int getActivityResultCode() { return mActivityResultCode; } public SignInFailureReason(int serviceErrorCode, int activityResultCode) { mServiceErrorCode = serviceErrorCode; mActivityResultCode = activityResultCode; } public SignInFailureReason(int serviceErrorCode) { this(serviceErrorCode, NO_ACTIVITY_RESULT_CODE); } @Override public String toString() { return "SignInFailureReason(serviceErrorCode:" + errorCodeToString(mServiceErrorCode) + ((mActivityResultCode == NO_ACTIVITY_RESULT_CODE) ? ")" : (",activityResultCode:" + activityResponseCodeToString(mActivityResultCode) + ")")); } } void printMisconfiguredDebugInfo() { debugLog("****"); debugLog("****"); debugLog("**** APP NOT CORRECTLY CONFIGURED TO USE GOOGLE PLAY GAME SERVICES"); debugLog("**** This is usually caused by one of these reasons:"); debugLog("**** (1) Your package name and certificate fingerprint do not match"); debugLog("**** the client ID you registered in Developer Console."); debugLog("**** (2) Your App ID was incorrectly entered."); debugLog("**** (3) Your game settings have not been published and you are "); debugLog("**** trying to log in with an account that is not listed as"); debugLog("**** a test account."); debugLog("****"); Context ctx = getContext(); if (ctx == null) { debugLog("*** (no Context, so can't print more debug info)"); return; } debugLog("**** To help you debug, here is the information about this app"); debugLog("**** Package name : " + getContext().getPackageName()); debugLog("**** Cert SHA1 fingerprint: " + getSHA1CertFingerprint()); debugLog("**** App ID from : " + getAppIdFromResource()); debugLog("****"); debugLog("**** Check that the above information matches your setup in "); debugLog("**** Developer Console. Also, check that you're logging in with the"); debugLog("**** right account (it should be listed in the Testers section if"); debugLog("**** your project is not yet published)."); debugLog("****"); debugLog("**** For more information, refer to the troubleshooting guide:"); debugLog("**** http://developers.google.com/games/services/android/troubleshooting"); } String getAppIdFromResource() { try { Resources res = getContext().getResources(); String pkgName = getContext().getPackageName(); int res_id = res.getIdentifier("app_id", "string", pkgName); return res.getString(res_id); } catch (Exception ex) { ex.printStackTrace(); return "??? (failed to retrieve APP ID)"; } } String getSHA1CertFingerprint() { try { Signature[] sigs = getContext().getPackageManager().getPackageInfo( getContext().getPackageName(), PackageManager.GET_SIGNATURES).signatures; if (sigs.length == 0) { return "ERROR: NO SIGNATURE."; } else if (sigs.length > 1) { return "ERROR: MULTIPLE SIGNATURES"; } byte[] digest = MessageDigest.getInstance("SHA1").digest(sigs[0].toByteArray()); StringBuilder hexString = new StringBuilder(); for (int i = 0; i < digest.length; ++i) { if (i > 0) { hexString.append(":"); } byteToString(hexString, digest[i]); } return hexString.toString(); } catch (PackageManager.NameNotFoundException ex) { ex.printStackTrace(); return "(ERROR: package not found)"; } catch (NoSuchAlgorithmException ex) { ex.printStackTrace(); return "(ERROR: SHA1 algorithm not found)"; } } void byteToString(StringBuilder sb, byte b) { int unsigned_byte = b < 0 ? b + 256 : b; int hi = unsigned_byte / 16; int lo = unsigned_byte % 16; sb.append("0123456789ABCDEF".substring(hi, hi + 1)); sb.append("0123456789ABCDEF".substring(lo, lo + 1)); } }
package com.rafaskoberg.gdx.typinglabel; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.BitmapFont.Glyph; import com.badlogic.gdx.graphics.g2d.BitmapFontCache; import com.badlogic.gdx.graphics.g2d.GlyphLayout; import com.badlogic.gdx.graphics.g2d.GlyphLayout.GlyphRun; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.scenes.scene2d.ui.Label; import com.badlogic.gdx.scenes.scene2d.ui.Skin; import com.badlogic.gdx.scenes.scene2d.utils.Drawable; import com.badlogic.gdx.utils.Align; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.IntArray; import com.badlogic.gdx.utils.ObjectMap; import com.badlogic.gdx.utils.ObjectMap.Entry; import com.badlogic.gdx.utils.StringBuilder; import com.badlogic.gdx.utils.reflect.ClassReflection; /** * An extension of {@link Label} that progressively shows the text as if it was being typed in real time, and allows the * use of tokens in the following format: <tt>{TOKEN=PARAMETER}</tt>. */ public class TypingLabel extends Label { /////////////////////// /// --- Members --- /// /////////////////////// // Collections private final ObjectMap<String, String> variables = new ObjectMap<String, String>(); protected final Array<TokenEntry> tokenEntries = new Array<TokenEntry>(); // Config private Color clearColor = new Color(TypingConfig.DEFAULT_CLEAR_COLOR); private TypingListener listener = null; boolean forceMarkupColor = TypingConfig.FORCE_COLOR_MARKUP_BY_DEFAULT; // Internal state private final StringBuilder originalText = new StringBuilder(); private final Array<TypingGlyph> glyphCache = new Array<TypingGlyph>(); private final IntArray glyphRunCapacities = new IntArray(); private final IntArray offsetCache = new IntArray(); private final IntArray layoutLineBreaks = new IntArray(); private final Array<Effect> activeEffects = new Array<Effect>(); private float textSpeed = TypingConfig.DEFAULT_SPEED_PER_CHAR; private float charCooldown = textSpeed; private int rawCharIndex = -2; // All chars, including color codes private int glyphCharIndex = -1; // Only renderable chars, excludes color codes private int glyphCharCompensation = 0; private int cachedGlyphCharIndex = -1; // Last glyphCharIndex sent to the cache private float lastLayoutX = 0; private float lastLayoutY = 0; private boolean parsed = false; private boolean paused = false; private boolean ended = false; private boolean skipping = false; private boolean ignoringEvents = false; private boolean ignoringEffects = false; private String defaultToken = ""; // Superclass mirroring boolean wrap; String ellipsis; float lastPrefHeight; boolean fontScaleChanged = false; //////////////////////////// /// --- Constructors --- /// //////////////////////////// public TypingLabel(CharSequence text, LabelStyle style) { super(text, style); saveOriginalText(); } public TypingLabel(CharSequence text, Skin skin, String fontName, Color color) { super(text, skin, fontName, color); saveOriginalText(); } public TypingLabel(CharSequence text, Skin skin, String fontName, String colorName) { super(text, skin, fontName, colorName); saveOriginalText(); } public TypingLabel(CharSequence text, Skin skin, String styleName) { super(text, skin, styleName); saveOriginalText(); } public TypingLabel(CharSequence text, Skin skin) { super(text, skin); saveOriginalText(); } ///////////////////////////// /// --- Text Handling --- /// ///////////////////////////// /** * Modifies the text of this label. If the char progression is already running, it's highly recommended to use * {@link #restart(CharSequence)} instead. */ @Override public void setText(CharSequence newText) { this.setText(newText, true); } /** * Sets the text of this label. * * @param modifyOriginalText Flag determining if the original text should be modified as well. If {@code false}, * only the display text is changed while the original text is untouched. * @see #restart(CharSequence) */ protected void setText(CharSequence newText, boolean modifyOriginalText) { setText(newText, modifyOriginalText, true); } /** * Sets the text of this label. * * @param modifyOriginalText Flag determining if the original text should be modified as well. If {@code false}, * only the display text is changed while the original text is untouched. * @param restart Whether or not this label should restart. Defaults to true. * @see #restart(CharSequence) */ protected void setText(CharSequence newText, boolean modifyOriginalText, boolean restart) { final boolean hasEnded = this.hasEnded(); super.setText(newText); if(modifyOriginalText) saveOriginalText(); if(restart) { this.restart(); } if(hasEnded) { this.skipToTheEnd(true, false); } } /** Similar to {@link #getText()}, but returns the original text with all the tokens unchanged. */ public StringBuilder getOriginalText() { return originalText; } /** * Copies the content of {@link #getText()} to the {@link StringBuilder} containing the original text with all * tokens unchanged. */ protected void saveOriginalText() { originalText.setLength(0); originalText.insert(0, this.getText()); originalText.trimToSize(); } /** * Restores the original text with all tokens unchanged to this label. Make sure to call {@link #parseTokens()} to * parse the tokens again. */ protected void restoreOriginalText() { super.setText(originalText); this.parsed = false; } //////////////////////////// /// --- External API --- /// //////////////////////////// /** Returns the {@link TypingListener} associated with this label. May be {@code null}. */ public TypingListener getTypingListener() { return listener; } /** Sets the {@link TypingListener} associated with this label, or {@code null} to remove the current one. */ public void setTypingListener(TypingListener listener) { this.listener = listener; } /** * Returns a {@link Color} instance with the color to be used on {@code CLEARCOLOR} tokens. Modify this instance to * change the token color. Default value is specified by {@link TypingConfig}. * * @see TypingConfig#DEFAULT_CLEAR_COLOR */ public Color getClearColor() { return clearColor; } /** * Sets whether or not this instance should enable markup color by force. * * @see TypingConfig#FORCE_COLOR_MARKUP_BY_DEFAULT */ public void setForceMarkupColor(boolean forceMarkupColor) { this.forceMarkupColor = forceMarkupColor; } /** Returns the default token being used in this label. Defaults to empty string. */ public String getDefaultToken() { return defaultToken; } /** * Sets the default token being used in this label. This token will be used before the label's text, and after each * {RESET} call. Useful if you want a certain token to be active at all times without having to type it all the * time. */ public void setDefaultToken(String defaultToken) { this.defaultToken = defaultToken == null ? "" : defaultToken; this.parsed = false; } /** Parses all tokens of this label. Use this after setting the text and any variables that should be replaced. */ public void parseTokens() { this.setText(getDefaultToken() + getText(), false, false); Parser.parseTokens(this); parsed = true; } /** * Skips the char progression to the end, showing the entire label. Useful for when users don't want to wait for too * long. Ignores all subsequent events by default. */ public void skipToTheEnd() { skipToTheEnd(true); } /** * Skips the char progression to the end, showing the entire label. Useful for when users don't want to wait for too * long. * * @param ignoreEvents If {@code true}, skipped events won't be reported to the listener. */ public void skipToTheEnd(boolean ignoreEvents) { skipToTheEnd(ignoreEvents, false); } /** * Skips the char progression to the end, showing the entire label. Useful for when users don't want to wait for too * long. * * @param ignoreEvents If {@code true}, skipped events won't be reported to the listener. * @param ignoreEffects If {@code true}, all text effects will be instantly cancelled. */ public void skipToTheEnd(boolean ignoreEvents, boolean ignoreEffects) { skipping = true; ignoringEvents = ignoreEvents; ignoringEffects = ignoreEffects; } /** * Cancels calls to {@link #skipToTheEnd()}. Useful if you need to restore the label's normal behavior at some event * after skipping. */ public void cancelSkipping() { if(skipping) { skipping = false; ignoringEvents = false; ignoringEffects = false; } } /** * Returns whether or not this label is currently skipping its typing progression all the way to the end. This is * only true if skipToTheEnd is called. */ public boolean isSkipping() { return skipping; } /** Returns whether or not this label is paused. */ public boolean isPaused() { return paused; } /** Pauses this label's character progression. */ public void pause() { paused = true; } /** Resumes this label's character progression. */ public void resume() { paused = false; } /** Returns whether or not this label's char progression has ended. */ public boolean hasEnded() { return ended; } /** * Restarts this label with the original text and starts the char progression right away. All tokens are * automatically parsed. */ public void restart() { restart(getOriginalText()); } /** * Restarts this label with the given text and starts the char progression right away. All tokens are automatically * parsed. */ public void restart(CharSequence newText) { // Reset cache collections GlyphUtils.freeAll(glyphCache); glyphCache.clear(); glyphRunCapacities.clear(); offsetCache.clear(); layoutLineBreaks.clear(); activeEffects.clear(); // Reset state textSpeed = TypingConfig.DEFAULT_SPEED_PER_CHAR; charCooldown = textSpeed; rawCharIndex = -2; glyphCharIndex = -1; glyphCharCompensation = 0; cachedGlyphCharIndex = -1; lastLayoutX = 0; lastLayoutY = 0; parsed = false; paused = false; ended = false; skipping = false; ignoringEvents = false; ignoringEffects = false; // Set new text this.setText(newText, true, false); invalidate(); // Parse tokens tokenEntries.clear(); parseTokens(); } /** Returns an {@link ObjectMap} with all the variable names and their respective replacement values. */ public ObjectMap<String, String> getVariables() { return variables; } /** Registers a variable and its respective replacement value to this label. */ public void setVariable(String var, String value) { variables.put(var.toUpperCase(), value); } /** Registers a set of variables and their respective replacement values to this label. */ public void setVariables(ObjectMap<String, String> variableMap) { this.variables.clear(); for(Entry<String, String> entry : variableMap.entries()) { this.variables.put(entry.key.toUpperCase(), entry.value); } } /** Registers a set of variables and their respective replacement values to this label. */ public void setVariables(java.util.Map<String, String> variableMap) { this.variables.clear(); for(java.util.Map.Entry<String, String> entry : variableMap.entrySet()) { this.variables.put(entry.getKey().toUpperCase(), entry.getValue()); } } /** Removes all variables from this label. */ public void clearVariables() { this.variables.clear(); } ////////////////////////////////// /// --- Core Functionality --- /// ////////////////////////////////// @Override public void act(float delta) { super.act(delta); // Force token parsing if(!parsed) { parseTokens(); } // Update cooldown and process char progression if(skipping || (!ended && !paused)) { if(skipping || (charCooldown -= delta) < 0.0f) { processCharProgression(); } } // Restore glyph offsets if(activeEffects.size > 0) { for(int i = 0; i < glyphCache.size; i++) { TypingGlyph glyph = glyphCache.get(i); glyph.xoffset = offsetCache.get(i * 2); glyph.yoffset = offsetCache.get(i * 2 + 1); } } // Apply effects if(!ignoringEffects) { for(int i = activeEffects.size - 1; i >= 0; i--) { Effect effect = activeEffects.get(i); effect.update(delta); int start = effect.indexStart; int end = effect.indexEnd >= 0 ? effect.indexEnd : glyphCharIndex; // If effect is finished, remove it if(effect.isFinished()) { activeEffects.removeIndex(i); continue; } // Apply effect to glyph for(int j = Math.max(0, start); j <= glyphCharIndex && j <= end && j < glyphCache.size; j++) { TypingGlyph glyph = glyphCache.get(j); effect.apply(glyph, j, delta); } } } } /** Proccess char progression according to current cooldown and process all tokens in the current index. */ private void processCharProgression() { // Keep a counter of how many chars we're processing in this tick. int charCounter = 0; // Process chars while there's room for it while(skipping || charCooldown < 0.0f) { // Apply compensation to glyph index, if any if(glyphCharCompensation != 0) { if(glyphCharCompensation > 0) { glyphCharIndex++; glyphCharCompensation--; } else { glyphCharIndex--; glyphCharCompensation++; } // Increment cooldown and wait for it charCooldown += textSpeed; continue; } // Increase raw char index rawCharIndex++; // Get next character and calculate cooldown increment int safeIndex = MathUtils.clamp(rawCharIndex, 0, getText().length - 1); char primitiveChar = '\u0000'; // Null character by default if(getText().length > 0) { primitiveChar = getText().charAt(safeIndex); float intervalMultiplier = TypingConfig.INTERVAL_MULTIPLIERS_BY_CHAR.get(primitiveChar, 1); charCooldown += textSpeed * intervalMultiplier; } // If char progression is finished, or if text is empty, notify listener and abort routine int textLen = getText().length; if(textLen == 0 || rawCharIndex >= textLen) { if(!ended) { ended = true; skipping = false; if(listener != null) listener.end(); } return; } // Detect layout line breaks boolean isLayoutLineBreak = false; if(layoutLineBreaks.contains(glyphCharIndex)) { layoutLineBreaks.removeValue(glyphCharIndex); isLayoutLineBreak = true; } // Increase glyph char index for all characters, except new lines. if(rawCharIndex >= 0 && primitiveChar != '\n' && primitiveChar != '\r' && !isLayoutLineBreak) glyphCharIndex++; // Process tokens according to the current index while(tokenEntries.size > 0 && tokenEntries.peek().index == rawCharIndex) { TokenEntry entry = tokenEntries.pop(); String token = entry.token; TokenCategory category = entry.category; // Process tokens switch(category) { case SPEED: { textSpeed = entry.floatValue; continue; } case WAIT: { glyphCharIndex--; glyphCharCompensation++; charCooldown += entry.floatValue; continue; } case SKIP: { if(entry.stringValue != null) { rawCharIndex += entry.stringValue.length(); } continue; } case EVENT: { if(this.listener != null && !ignoringEvents) { listener.event(entry.stringValue); } continue; } case EFFECT_START: case EFFECT_END: { // Get effect class boolean isStart = category == TokenCategory.EFFECT_START; Class<? extends Effect> effectClass = isStart ? TypingConfig.EFFECT_START_TOKENS.get(token) : TypingConfig.EFFECT_END_TOKENS.get(token); // End all effects of the same type for(int i = 0; i < activeEffects.size; i++) { Effect effect = activeEffects.get(i); if(effect.indexEnd < 0) { if(ClassReflection.isAssignableFrom(effectClass, effect.getClass())) { effect.indexEnd = glyphCharIndex - 1; } } } // Create new effect if necessary if(isStart) { entry.effect.indexStart = glyphCharIndex; activeEffects.add(entry.effect); } } } } // Notify listener about char progression int nextIndex = rawCharIndex == 0 ? 0 : MathUtils.clamp(rawCharIndex, 0, getText().length - 1); Character nextChar = nextIndex == 0 ? null : getText().charAt(nextIndex); if(nextChar != null && listener != null) { listener.onChar(nextChar); } // Increment char counter charCounter++; // Break loop if this was our first glyph to prevent glyph issues. if(glyphCharIndex == -1) { charCooldown = textSpeed; break; } // Break loop if enough chars were processed charCounter++; int charLimit = TypingConfig.CHAR_LIMIT_PER_FRAME; if(!skipping && charLimit > 0 && charCounter > charLimit) { charCooldown = Math.max(charCooldown, textSpeed); break; } } } @Override public boolean remove() { GlyphUtils.freeAll(glyphCache); glyphCache.clear(); return super.remove(); } //////////////////////////////////// /// --- Superclass Mirroring --- /// //////////////////////////////////// @Override public BitmapFontCache getBitmapFontCache() { return super.getBitmapFontCache(); } @Override public void setEllipsis(String ellipsis) { // Mimics superclass but keeps an accessible reference super.setEllipsis(ellipsis); this.ellipsis = ellipsis; } @Override public void setEllipsis(boolean ellipsis) { // Mimics superclass but keeps an accessible reference super.setEllipsis(ellipsis); if(ellipsis) this.ellipsis = "..."; else this.ellipsis = null; } @Override public void setWrap(boolean wrap) { // Mimics superclass but keeps an accessible reference super.setWrap(wrap); this.wrap = wrap; } @Override public void setFontScale(float fontScale) { super.setFontScale(fontScale); this.fontScaleChanged = true; } @Override public void setFontScale(float fontScaleX, float fontScaleY) { super.setFontScale(fontScaleX, fontScaleY); this.fontScaleChanged = true; } @Override public void setFontScaleX(float fontScaleX) { super.setFontScaleX(fontScaleX); this.fontScaleChanged = true; } @Override public void setFontScaleY(float fontScaleY) { super.setFontScaleY(fontScaleY); this.fontScaleChanged = true; } @Override public void layout() { // --- SUPERCLASS IMPLEMENTATION (but with accessible getters instead) --- BitmapFontCache cache = getBitmapFontCache(); StringBuilder text = getText(); GlyphLayout layout = super.getGlyphLayout(); int lineAlign = getLineAlign(); int labelAlign = getLabelAlign(); LabelStyle style = getStyle(); BitmapFont font = cache.getFont(); float oldScaleX = font.getScaleX(); float oldScaleY = font.getScaleY(); if(fontScaleChanged) font.getData().setScale(getFontScaleX(), getFontScaleY()); boolean wrap = this.wrap && ellipsis == null; if(wrap) { float prefHeight = getPrefHeight(); if(prefHeight != lastPrefHeight) { lastPrefHeight = prefHeight; invalidateHierarchy(); } } float width = getWidth(), height = getHeight(); Drawable background = style.background; float x = 0, y = 0; if(background != null) { x = background.getLeftWidth(); y = background.getBottomHeight(); width -= background.getLeftWidth() + background.getRightWidth(); height -= background.getBottomHeight() + background.getTopHeight(); } float textWidth, textHeight; if(wrap || text.indexOf("\n") != -1) { // If the text can span multiple lines, determine the text's actual size so it can be aligned within the label. layout.setText(font, text, 0, text.length, Color.WHITE, width, lineAlign, wrap, ellipsis); textWidth = layout.width; textHeight = layout.height; if((labelAlign & Align.left) == 0) { if((labelAlign & Align.right) != 0) x += width - textWidth; else x += (width - textWidth) / 2; } } else { textWidth = width; textHeight = font.getData().capHeight; } if((labelAlign & Align.top) != 0) { y += cache.getFont().isFlipped() ? 0 : height - textHeight; y += style.font.getDescent(); } else if((labelAlign & Align.bottom) != 0) { y += cache.getFont().isFlipped() ? height - textHeight : 0; y -= style.font.getDescent(); } else { y += (height - textHeight) / 2; } if(!cache.getFont().isFlipped()) y += textHeight; layout.setText(font, text, 0, text.length, Color.WHITE, textWidth, lineAlign, wrap, ellipsis); cache.setText(layout, x, y); if(fontScaleChanged) font.getData().setScale(oldScaleX, oldScaleY); // --- END OF SUPERCLASS IMPLEMENTATION --- // Store coordinates passed to BitmapFontCache lastLayoutX = x; lastLayoutY = y; // Perform cache layout operation, where the magic happens GlyphUtils.freeAll(glyphCache); glyphCache.clear(); layoutCache(); } /** * Reallocate glyph clones according to the updated {@link GlyphLayout}. This should only be called when the text or * the layout changes. */ private void layoutCache() { BitmapFontCache cache = getBitmapFontCache(); GlyphLayout layout = super.getGlyphLayout(); Array<GlyphRun> runs = layout.runs; // Reset layout line breaks layoutLineBreaks.clear(); // Store GlyphRun sizes and count how many glyphs we have int glyphCount = 0; glyphRunCapacities.setSize(runs.size); for(int i = 0; i < runs.size; i++) { Array<Glyph> glyphs = runs.get(i).glyphs; glyphRunCapacities.set(i, glyphs.size); glyphCount += glyphs.size; } // Make sure our cache array can hold all glyphs if(glyphCache.size < glyphCount) { glyphCache.setSize(glyphCount); offsetCache.setSize(glyphCount * 2); } // Clone original glyphs with independent instances int index = -1; float lastY = 0; for(int i = 0; i < runs.size; i++) { GlyphRun run = runs.get(i); Array<Glyph> glyphs = run.glyphs; for(int j = 0; j < glyphs.size; j++) { // Detect and store layout line breaks if(!MathUtils.isEqual(run.y, lastY)) { lastY = run.y; layoutLineBreaks.add(index); } // Increment index index++; // Get original glyph Glyph original = glyphs.get(j); // Get clone glyph TypingGlyph clone = null; if(index < glyphCache.size) { clone = glyphCache.get(index); } if(clone == null) { clone = GlyphUtils.obtain(); glyphCache.set(index, clone); } GlyphUtils.clone(original, clone); clone.width *= getFontScaleX(); clone.height *= getFontScaleY(); clone.xoffset *= getFontScaleX(); clone.yoffset *= getFontScaleY(); clone.run = run; // Store offset data offsetCache.set(index * 2, clone.xoffset); offsetCache.set(index * 2 + 1, clone.yoffset); // Replace glyph in original array glyphs.set(j, clone); } } // Remove exceeding glyphs from original array int glyphCountdown = glyphCharIndex; for(int i = 0; i < runs.size; i++) { Array<Glyph> glyphs = runs.get(i).glyphs; if(glyphs.size < glyphCountdown) { glyphCountdown -= glyphs.size; continue; } for(int j = 0; j < glyphs.size; j++) { if(glyphCountdown < 0) { glyphs.removeRange(j, glyphs.size - 1); break; } glyphCountdown--; } } // Pass new layout with custom glyphs to BitmapFontCache cache.setText(layout, lastLayoutX, lastLayoutY); } /** Adds cached glyphs to the active BitmapFontCache as the char index progresses. */ private void addMissingGlyphs() { // Add additional glyphs to layout array, if any int glyphLeft = glyphCharIndex - cachedGlyphCharIndex; if(glyphLeft < 1) return; // Get runs GlyphLayout layout = super.getGlyphLayout(); Array<GlyphRun> runs = layout.runs; // Iterate through GlyphRuns to find the next glyph spot int glyphCount = 0; for(int runIndex = 0; runIndex < glyphRunCapacities.size; runIndex++) { int runCapacity = glyphRunCapacities.get(runIndex); if((glyphCount + runCapacity) < cachedGlyphCharIndex) { glyphCount += runCapacity; continue; } // Get run and increase glyphCount up to its current size Array<Glyph> glyphs = runs.get(runIndex).glyphs; glyphCount += glyphs.size; // Next glyphs go here while(glyphLeft > 0) { // Skip run if this one is full int runSize = glyphs.size; if(runCapacity == runSize) { break; } // Put new glyph to this run cachedGlyphCharIndex++; TypingGlyph glyph = glyphCache.get(cachedGlyphCharIndex); glyphs.add(glyph); // Cache glyph's vertex index glyph.internalIndex = glyphCount; // Advance glyph count glyphCount++; glyphLeft--; } } } @Override public void draw(Batch batch, float parentAlpha) { super.validate(); addMissingGlyphs(); // Update cache with new glyphs BitmapFontCache bitmapFontCache = getBitmapFontCache(); getBitmapFontCache().setText(getGlyphLayout(), lastLayoutX, lastLayoutY); // Tint glyphs for(TypingGlyph glyph : glyphCache) { if(glyph.internalIndex >= 0 && glyph.color != null) { bitmapFontCache.setColors(glyph.color, glyph.internalIndex, glyph.internalIndex + 1); } } super.draw(batch, parentAlpha); } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.flowable; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import org.reactivestreams.*; import io.reactivex.*; import io.reactivex.internal.disposables.SequentialDisposable; import io.reactivex.internal.subscriptions.*; import io.reactivex.plugins.RxJavaPlugins; public final class FlowableTimeoutTimed<T> extends AbstractFlowableWithUpstream<T, T> { final long timeout; final TimeUnit unit; final Scheduler scheduler; final Publisher<? extends T> other; public FlowableTimeoutTimed(Flowable<T> source, long timeout, TimeUnit unit, Scheduler scheduler, Publisher<? extends T> other) { super(source); this.timeout = timeout; this.unit = unit; this.scheduler = scheduler; this.other = other; } @Override protected void subscribeActual(Subscriber<? super T> s) { if (other == null) { TimeoutSubscriber<T> parent = new TimeoutSubscriber<T>(s, timeout, unit, scheduler.createWorker()); s.onSubscribe(parent); parent.startTimeout(0L); source.subscribe(parent); } else { TimeoutFallbackSubscriber<T> parent = new TimeoutFallbackSubscriber<T>(s, timeout, unit, scheduler.createWorker(), other); s.onSubscribe(parent); parent.startTimeout(0L); source.subscribe(parent); } } static final class TimeoutSubscriber<T> extends AtomicLong implements FlowableSubscriber<T>, Subscription, TimeoutSupport { private static final long serialVersionUID = 3764492702657003550L; final Subscriber<? super T> actual; final long timeout; final TimeUnit unit; final Scheduler.Worker worker; final SequentialDisposable task; final AtomicReference<Subscription> upstream; final AtomicLong requested; TimeoutSubscriber(Subscriber<? super T> actual, long timeout, TimeUnit unit, Scheduler.Worker worker) { this.actual = actual; this.timeout = timeout; this.unit = unit; this.worker = worker; this.task = new SequentialDisposable(); this.upstream = new AtomicReference<Subscription>(); this.requested = new AtomicLong(); } @Override public void onSubscribe(Subscription s) { SubscriptionHelper.deferredSetOnce(upstream, requested, s); } @Override public void onNext(T t) { long idx = get(); if (idx == Long.MAX_VALUE || !compareAndSet(idx, idx + 1)) { return; } task.get().dispose(); actual.onNext(t); startTimeout(idx + 1); } void startTimeout(long nextIndex) { task.replace(worker.schedule(new TimeoutTask(nextIndex, this), timeout, unit)); } @Override public void onError(Throwable t) { if (getAndSet(Long.MAX_VALUE) != Long.MAX_VALUE) { task.dispose(); actual.onError(t); worker.dispose(); } else { RxJavaPlugins.onError(t); } } @Override public void onComplete() { if (getAndSet(Long.MAX_VALUE) != Long.MAX_VALUE) { task.dispose(); actual.onComplete(); worker.dispose(); } } @Override public void onTimeout(long idx) { if (compareAndSet(idx, Long.MAX_VALUE)) { SubscriptionHelper.cancel(upstream); actual.onError(new TimeoutException()); worker.dispose(); } } @Override public void request(long n) { SubscriptionHelper.deferredRequest(upstream, requested, n); } @Override public void cancel() { SubscriptionHelper.cancel(upstream); worker.dispose(); } } static final class TimeoutTask implements Runnable { final TimeoutSupport parent; final long idx; TimeoutTask(long idx, TimeoutSupport parent) { this.idx = idx; this.parent = parent; } @Override public void run() { parent.onTimeout(idx); } } static final class TimeoutFallbackSubscriber<T> extends SubscriptionArbiter implements FlowableSubscriber<T>, TimeoutSupport { private static final long serialVersionUID = 3764492702657003550L; final Subscriber<? super T> actual; final long timeout; final TimeUnit unit; final Scheduler.Worker worker; final SequentialDisposable task; final AtomicReference<Subscription> upstream; final AtomicLong index; long consumed; Publisher<? extends T> fallback; TimeoutFallbackSubscriber(Subscriber<? super T> actual, long timeout, TimeUnit unit, Scheduler.Worker worker, Publisher<? extends T> fallback) { this.actual = actual; this.timeout = timeout; this.unit = unit; this.worker = worker; this.fallback = fallback; this.task = new SequentialDisposable(); this.upstream = new AtomicReference<Subscription>(); this.index = new AtomicLong(); } @Override public void onSubscribe(Subscription s) { if (SubscriptionHelper.setOnce(upstream, s)) { setSubscription(s); } } @Override public void onNext(T t) { long idx = index.get(); if (idx == Long.MAX_VALUE || !index.compareAndSet(idx, idx + 1)) { return; } task.get().dispose(); consumed++; actual.onNext(t); startTimeout(idx + 1); } void startTimeout(long nextIndex) { task.replace(worker.schedule(new TimeoutTask(nextIndex, this), timeout, unit)); } @Override public void onError(Throwable t) { if (index.getAndSet(Long.MAX_VALUE) != Long.MAX_VALUE) { task.dispose(); actual.onError(t); worker.dispose(); } else { RxJavaPlugins.onError(t); } } @Override public void onComplete() { if (index.getAndSet(Long.MAX_VALUE) != Long.MAX_VALUE) { task.dispose(); actual.onComplete(); worker.dispose(); } } @Override public void onTimeout(long idx) { if (index.compareAndSet(idx, Long.MAX_VALUE)) { SubscriptionHelper.cancel(upstream); long c = consumed; if (c != 0L) { produced(c); } Publisher<? extends T> f = fallback; fallback = null; f.subscribe(new FallbackSubscriber<T>(actual, this)); worker.dispose(); } } @Override public void cancel() { super.cancel(); worker.dispose(); } } static final class FallbackSubscriber<T> implements FlowableSubscriber<T> { final Subscriber<? super T> actual; final SubscriptionArbiter arbiter; FallbackSubscriber(Subscriber<? super T> actual, SubscriptionArbiter arbiter) { this.actual = actual; this.arbiter = arbiter; } @Override public void onSubscribe(Subscription s) { arbiter.setSubscription(s); } @Override public void onNext(T t) { actual.onNext(t); } @Override public void onError(Throwable t) { actual.onError(t); } @Override public void onComplete() { actual.onComplete(); } } interface TimeoutSupport { void onTimeout(long idx); } }
package restaurant; import java.util.ArrayList; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.List; import java.util.concurrent.Semaphore; import restaurant.HostAgent; import restaurant.Menu; import restaurant.gui.*; import agent.Agent; import restaurant.interfaces.*; import restaurant.test.mock.EventLog; public abstract class WaiterAgent extends Agent implements Waiter{ public Restaurant r; public EventLog log = new EventLog(); private List<MyCustomer> customers = Collections.synchronizedList(new ArrayList<MyCustomer>()); public Cook cook = null; public Host host = null; public Cashier cashier = null; public WaiterGui waiterGui = null; private Semaphore atTable = new Semaphore(0,true); //The four booleans below are for Gui purposes, they have NOTHING to do with agent design boolean breakRequest = false, backRequest = false;//Two booleans from gui to tell whether to go on break or to boolean OnBreak = false, breakEnabled = true; //Two booleans to tell gui what to show and whether to enable private String name; public WaiterAgent(String name) { super(); this.name = name; } public String getName() { return name; } public boolean getBreakStatus(){ return OnBreak; } public boolean getBreakEnable(){ return breakEnabled; } public void setHost(HostAgent h){ this.host = h; } public void setCook(CookAgent c){ this.cook = c; } public void setCashier(CashierAgent c){ this.cashier = c; } public void releaseSemaphore(){ atTable.release(); stateChanged(); } // Messages public void msgSitAtTable(Customer cust, int tablenumber, int count) { customers.add(new MyCustomer(cust, tablenumber, MyCustomer.CustomerState.waiting, count)); stateChanged(); } public void msgNoMoneyAndLeaving(Customer cust){ for (MyCustomer c: customers) { if (c.c == cust) { c.state = MyCustomer.CustomerState.noMoney; stateChanged(); } } } public void msgReadyToOrder(Customer cust) { for (MyCustomer c: customers) { if (c.c == cust) { c.state = MyCustomer.CustomerState.readyToOrder; stateChanged(); } } } public void msgHereIsTheChoice(Customer cust, String choice) { for (MyCustomer c: customers) { if (c.c == cust) { c.choice = choice; c.state = MyCustomer.CustomerState.orderGiven; atTable.release(); stateChanged(); return; } } } public void msgOrderIsReady(String choice, int tableNumber) { for (MyCustomer c: customers) { if (c.tableNumber == tableNumber) { c.state = MyCustomer.CustomerState.orderReady; stateChanged(); } } } public void msgFoodRunsOut(String choice, int tableNumber) { for (MyCustomer c: customers) { if (c.tableNumber == tableNumber) { Do("Got msg " + choice + " is running out."); c.state = MyCustomer.CustomerState.noFood; stateChanged(); } } } public void msgDoneEating(Customer cust) { for (MyCustomer c: customers) { if (c.c == cust) { c.state = MyCustomer.CustomerState.finishedEating; stateChanged(); } } } public void msgHereIsTheCheck(double money, Customer cust){ for (MyCustomer c: customers) { if (c.c == cust) { c.state = MyCustomer.CustomerState.checkComputed; c.check = money; stateChanged(); } } } public void msgLeavingRestaurant(Customer cust){ for (MyCustomer c: customers) { if (c.c == cust) { c.state = MyCustomer.CustomerState.leaving; stateChanged(); } } } public void msgAskForBreak(){ breakRequest = true; OnBreak = true;// for gui purpose breakEnabled = false;//for gui purpose stateChanged(); } public void msgBreakGranted(){ breakEnabled = true;//for gui purpose Do("Break request granted."); waiterGui.setButtonEnabled(); stateChanged(); } public void msgAskToComeBack(){ backRequest = true; OnBreak = false; // for gui purpose stateChanged(); } /** * Scheduler. Determine what action is called for, and do it. */ public boolean pickAndExecuteAnAction() { try{ if (breakRequest){ Do("Tell host to break"); host.msgWantToBreak(this); breakRequest = false; return true; } if (backRequest){ Do("Tell host I'm coming back"); host.msgWantToComeBack(this); backRequest = false; return true; } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.finishedEating) { computeBill(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.noMoney) { clearCustomer(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.checkComputed) { giveCheck(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.leaving) { clearCustomer(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.waiting) { seatCustomer(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.readyToOrder){ askForChoice(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.noFood){ giveNewMenu(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.orderGiven) { processOrder(customer); return true; } } for (MyCustomer customer : customers) { if (customer.state == MyCustomer.CustomerState.orderReady) { giveOrderToCustomer(customer); return true; } } } catch(ConcurrentModificationException e){ return false; } DoGoHome(); return false; //we have tried all our rules and found //nothing to do. So return false to main loop of abstract agent //and wait. } // Actions private void seatCustomer(MyCustomer customer) { waiterGui.DoFetchCustomer(customer.count); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } customer.state = MyCustomer.CustomerState.none; customer.c.msgFollowMe(this, customer.tableNumber, new Menu()); DoSeatCustomer(customer.c, customer.tableNumber); } private void askForChoice(MyCustomer customer){ customer.state = MyCustomer.CustomerState.none; DoGoToCustomer(customer.c, customer.tableNumber); customer.c.msgWhatWouldYouLike(); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } private void giveNewMenu(MyCustomer customer){ Do("give new menu"); DoGoToCustomer(customer.c, customer.tableNumber); customer.state = MyCustomer.CustomerState.none; Menu m = new Menu(); m.remove(customer.choice); customer.c.msgNoFood(m); } protected abstract void processOrder(MyCustomer customer); private void giveOrderToCustomer(MyCustomer customer){ DoFetchPlate(); Do("Give order to customer"); customer.state = MyCustomer.CustomerState.none; DoGiveFoodToCustomer(customer.c, customer.tableNumber, customer.choice); customer.c.msgHereIsYourFood(customer.choice); } private void computeBill(MyCustomer customer){ Do("Ask Cashier to compute bill"); cashier.msgComputeBill(this, customer.c, customer.choice); customer.state = MyCustomer.CustomerState.none; } private void giveCheck(MyCustomer customer){ Do("Give Customer the bill"); customer.c.msgHereIsTheCheck(customer.check, cashier); customer.state = MyCustomer.CustomerState.none; } private void clearCustomer(MyCustomer customer){ Do("Clear customer"); host.msgTableIsFree(customer.c, customer.tableNumber); customers.remove(customer); } private void DoSeatCustomer(Customer customer, int table){ print("Seating " + customer + " at " + table); waiterGui.DoGoToTable(table); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } private void DoGoToCustomer(Customer customer, int table){ print("Going to " + customer + " at " + table); waiterGui.DoGoToTable(table); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } private void DoGoHome(){ waiterGui.DoLeaveCustomer(); } protected void DoGoToCook(){ print("Going to cook"); waiterGui.DoGoToCook(); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } protected void DoFetchPlate(){ print("Fetching the food."); waiterGui.DoFetchDish(); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } protected void DoGoToRevolvingStand(){ print("Putting on the revolving stand."); waiterGui.DoGoToRevolvingStand(); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } private void DoGiveFoodToCustomer(Customer customer, int table, String food){ print("Giving food to " + customer + " at " + table); waiterGui.DoBringFood(table, food); try { atTable.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } //utilities public void setGui(WaiterGui gui) { waiterGui = gui; } public WaiterGui getGui() { return waiterGui; } protected static class MyCustomer { Customer c; int tableNumber, count; String choice = ""; double check = 0; public enum CustomerState {none, waiting, noMoney, readyToOrder, orderGiven, orderReady, noFood, finishedEating, checkComputed, leaving}; public CustomerState state = CustomerState.none; MyCustomer(Customer c, int tableNumber, CustomerState s, int count) { this.c = c; this.tableNumber = tableNumber; this.state = s; this.count = count; } public String toString() { return "table " + tableNumber; } } }
package com.codahale.metrics.graphite; import com.codahale.metrics.*; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.*; public class GraphiteReporterTest { private final long timestamp = 1000198; private final Clock clock = mock(Clock.class); private final Graphite graphite = mock(Graphite.class); private final MetricRegistry registry = mock(MetricRegistry.class); private final GraphiteReporter reporter = GraphiteReporter.forRegistry(registry) .withClock(clock) .prefixedWith("prefix") .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .filter(MetricFilter.ALL) .build(graphite); @Before public void setUp() throws Exception { when(clock.getTime()).thenReturn(timestamp * 1000); } @Test public void doesNotReportStringGaugeValues() throws Exception { reporter.report(map("gauge", gauge("value")), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite, never()).send("prefix.gauge", "value", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsByteGaugeValues() throws Exception { reporter.report(map("gauge", gauge((byte) 1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsShortGaugeValues() throws Exception { reporter.report(map("gauge", gauge((short) 1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsIntegerGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsLongGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1L)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsFloatGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1f)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsDoubleGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsCounters() throws Exception { final Counter counter = mock(Counter.class); when(counter.getCount()).thenReturn(100L); reporter.report(this.<Gauge>map(), this.<Counter>map("counter", counter), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.counter.count", "100", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsHistograms() throws Exception { final Histogram histogram = mock(Histogram.class); when(histogram.getCount()).thenReturn(1L); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(2L); when(snapshot.getMean()).thenReturn(3.0); when(snapshot.getMin()).thenReturn(4L); when(snapshot.getStdDev()).thenReturn(5.0); when(snapshot.getMedian()).thenReturn(6.0); when(snapshot.get75thPercentile()).thenReturn(7.0); when(snapshot.get95thPercentile()).thenReturn(8.0); when(snapshot.get98thPercentile()).thenReturn(9.0); when(snapshot.get99thPercentile()).thenReturn(10.0); when(snapshot.get999thPercentile()).thenReturn(11.0); when(histogram.getSnapshot()).thenReturn(snapshot); reporter.report(this.<Gauge>map(), this.<Counter>map(), this.<Histogram>map("histogram", histogram), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.histogram.count", "1", timestamp); inOrder.verify(graphite).send("prefix.histogram.max", "2", timestamp); // inOrder.verify(graphite).send("prefix.histogram.mean", "3.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.min", "4", timestamp); // inOrder.verify(graphite).send("prefix.histogram.stddev", "5.00", timestamp); // inOrder.verify(graphite).send("prefix.histogram.p50", "6.00", timestamp); // inOrder.verify(graphite).send("prefix.histogram.p75", "7.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p95", "8.00", timestamp); // inOrder.verify(graphite).send("prefix.histogram.p98", "9.00", timestamp); // inOrder.verify(graphite).send("prefix.histogram.p99", "10.00", timestamp); // inOrder.verify(graphite).send("prefix.histogram.p999", "11.00", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsMeters() throws Exception { final Meter meter = mock(Meter.class); when(meter.getCount()).thenReturn(1L); when(meter.getOneMinuteRate()).thenReturn(2.0); when(meter.getFiveMinuteRate()).thenReturn(3.0); when(meter.getFifteenMinuteRate()).thenReturn(4.0); when(meter.getMeanRate()).thenReturn(5.0); reporter.report(this.<Gauge>map(), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map("meter", meter), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.meter.count", "1", timestamp); inOrder.verify(graphite).send("prefix.meter.m1_rate", "2.00", timestamp); // inOrder.verify(graphite).send("prefix.meter.m5_rate", "3.00", timestamp); // inOrder.verify(graphite).send("prefix.meter.m15_rate", "4.00", timestamp); // inOrder.verify(graphite).send("prefix.meter.mean_rate", "5.00", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void reportsTimers() throws Exception { final Timer timer = mock(Timer.class); when(timer.getCount()).thenReturn(1L); when(timer.getMeanRate()).thenReturn(2.0); when(timer.getOneMinuteRate()).thenReturn(3.0); when(timer.getFiveMinuteRate()).thenReturn(4.0); when(timer.getFifteenMinuteRate()).thenReturn(5.0); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100)); when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200)); when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300)); when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400)); when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500)); when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600)); when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700)); when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800)); when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900)); when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS .toNanos(1000)); when(timer.getSnapshot()).thenReturn(snapshot); reporter.report(this.<Gauge>map(), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), map("timer", timer)); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.timer.count", "1", timestamp); inOrder.verify(graphite).send("prefix.timer.m1_rate", "3.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.m5_rate", "4.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.m15_rate", "5.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.mean_rate", "2.00", timestamp); inOrder.verify(graphite).send("prefix.timer.max", "100.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.mean", "200.00", timestamp); inOrder.verify(graphite).send("prefix.timer.min", "300.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.stddev", "400.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.p50", "500.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.p75", "600.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p95", "700.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.p98", "800.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.p99", "900.00", timestamp); // inOrder.verify(graphite).send("prefix.timer.p999", "1000.00", timestamp); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } private <T> SortedMap<String, T> map() { return new TreeMap<String, T>(); } private <T> SortedMap<String, T> map(String name, T metric) { final TreeMap<String, T> map = new TreeMap<String, T>(); map.put(name, metric); return map; } private <T> Gauge gauge(T value) { final Gauge gauge = mock(Gauge.class); when(gauge.getValue()).thenReturn(value); return gauge; } }
/* * Copyright 2014 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.co.real_logic.aeron.common.command; import uk.co.real_logic.aeron.common.Flyweight; import java.nio.ByteOrder; import static java.nio.ByteOrder.LITTLE_ENDIAN; import static uk.co.real_logic.agrona.BitUtil.SIZE_OF_INT; import static uk.co.real_logic.agrona.BitUtil.SIZE_OF_LONG; /** * Message to denote that new buffers have been setup for a publication. * * @see uk.co.real_logic.aeron.common.command.ControlProtocolEvents * * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Correlation ID | * | | * +---------------------------------------------------------------+ * | Session ID | * +---------------------------------------------------------------+ * | Stream ID | * +---------------------------------------------------------------+ * | Position Indicator Offset | * +---------------------------------------------------------------+ * | MTU Length | * +---------------------------------------------------------------+ * | Channel Length | * +---------------------------------------------------------------+ * | Channel ... * ... | * +---------------------------------------------------------------+ * | Log File Length | * +---------------------------------------------------------------+ * | Log File Name ... * ... | * +---------------------------------------------------------------+ */ public class PublicationBuffersReadyFlyweight extends Flyweight { private static final int CORRELATION_ID_OFFSET = 0; private static final int SESSION_ID_OFFSET = CORRELATION_ID_OFFSET + SIZE_OF_LONG; private static final int STREAM_ID_FIELD_OFFSET = SESSION_ID_OFFSET + SIZE_OF_INT; private static final int POSITION_COUNTER_ID_OFFSET = STREAM_ID_FIELD_OFFSET + SIZE_OF_INT; private static final int MTU_LENGTH_OFFSET = POSITION_COUNTER_ID_OFFSET + SIZE_OF_INT; private static final int CHANNEL_FIELD_OFFSET = MTU_LENGTH_OFFSET + SIZE_OF_INT; /** * return correlation id field * * @return correlation id field */ public long correlationId() { return buffer().getLong(offset() + CORRELATION_ID_OFFSET, ByteOrder.LITTLE_ENDIAN); } /** * set correlation id field * * @param correlationId field value * @return flyweight */ public PublicationBuffersReadyFlyweight correlationId(final long correlationId) { buffer().putLong(offset() + CORRELATION_ID_OFFSET, correlationId, ByteOrder.LITTLE_ENDIAN); return this; } /** * return session id field * * @return session id field */ public int sessionId() { return buffer().getInt(offset() + SESSION_ID_OFFSET, LITTLE_ENDIAN); } /** * set session id field * * @param sessionId field value * @return flyweight */ public PublicationBuffersReadyFlyweight sessionId(final int sessionId) { buffer().putInt(offset() + SESSION_ID_OFFSET, sessionId, LITTLE_ENDIAN); return this; } /** * return stream id field * * @return stream id field */ public int streamId() { return buffer().getInt(offset() + STREAM_ID_FIELD_OFFSET, LITTLE_ENDIAN); } /** * set stream id field * * @param streamId field value * @return flyweight */ public PublicationBuffersReadyFlyweight streamId(final int streamId) { buffer().putInt(offset() + STREAM_ID_FIELD_OFFSET, streamId, LITTLE_ENDIAN); return this; } /** * return position counter id field * * @return position counter id field */ public int positionCounterId() { return buffer().getInt(offset() + POSITION_COUNTER_ID_OFFSET, LITTLE_ENDIAN); } /** * set position counter id field * * @param positionCounterId field value * @return flyweight */ public PublicationBuffersReadyFlyweight positionCounterId(final int positionCounterId) { buffer().putInt(offset() + POSITION_COUNTER_ID_OFFSET, positionCounterId, LITTLE_ENDIAN); return this; } /** * return mtu length field * * @return mtu length field */ public int mtuLength() { return buffer().getInt(offset() + MTU_LENGTH_OFFSET, LITTLE_ENDIAN); } /** * set mtu length field * * @param mtuLength field value * @return flyweight */ public PublicationBuffersReadyFlyweight mtuLength(final int mtuLength) { buffer().putInt(offset() + MTU_LENGTH_OFFSET, mtuLength, LITTLE_ENDIAN); return this; } /** * return channel field * * @return channel field */ public String channel() { return buffer().getStringUtf8(offset() + CHANNEL_FIELD_OFFSET, ByteOrder.LITTLE_ENDIAN); } /** * set channel field * * @param channel field value * @return flyweight */ public PublicationBuffersReadyFlyweight channel(final String channel) { buffer().putStringUtf8(offset() + CHANNEL_FIELD_OFFSET, channel, ByteOrder.LITTLE_ENDIAN); return this; } public String logFileName() { return buffer().getStringUtf8(logFileNameOffset(), LITTLE_ENDIAN); } public PublicationBuffersReadyFlyweight logFileName(final String logFileName) { buffer().putStringUtf8(logFileNameOffset(), logFileName, ByteOrder.LITTLE_ENDIAN); return this; } private int logFileNameOffset() { final int channelStart = offset() + CHANNEL_FIELD_OFFSET; return buffer().getInt(channelStart) + channelStart + SIZE_OF_INT; } /** * Get the length of the current message * * NB: must be called after the data is written in order to be accurate. * * @return the length of the current message */ public int length() { final int logFileNameOffset = logFileNameOffset(); return logFileNameOffset + buffer().getInt(logFileNameOffset) + SIZE_OF_INT; } }
package jenkins.security; import com.gargoylesoftware.htmlunit.Page; import com.gargoylesoftware.htmlunit.html.HtmlPage; import hudson.ExtensionList; import hudson.FilePath; import hudson.model.DirectoryBrowserSupport; import hudson.model.FreeStyleProject; import hudson.model.Item; import hudson.model.UnprotectedRootAction; import jenkins.model.Jenkins; import jenkins.model.JenkinsLocationConfiguration; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.CreateFileBuilder; import org.jvnet.hudson.test.For; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.MockAuthorizationStrategy; import org.jvnet.hudson.test.TestExtension; import org.kohsuke.stapler.HttpResponse; import edu.umd.cs.findbugs.annotations.CheckForNull; import java.net.URL; import java.time.Instant; import java.util.UUID; @Issue("JENKINS-41891") @For({ ResourceDomainRootAction.class, ResourceDomainFilter.class, ResourceDomainConfiguration.class }) public class ResourceDomainTest { @Rule public JenkinsRule j = new JenkinsRule(); private static final String RESOURCE_DOMAIN = "127.0.0.1"; @Before public void prepare() throws Exception { String resourceRoot; URL root = j.getURL(); // which always will use "localhost", see JenkinsRule#getURL() Assert.assertTrue(root.toString().contains("localhost")); // to be safe resourceRoot = root.toString().replace("localhost", RESOURCE_DOMAIN); ResourceDomainConfiguration configuration = ExtensionList.lookupSingleton(ResourceDomainConfiguration.class); configuration.setUrl(resourceRoot); } @Test public void secondDomainBasics() throws Exception { JenkinsRule.WebClient webClient = j.createWebClient(); { // DBS directory listing is shown as always Page page = webClient.goTo("userContent"); Assert.assertEquals("successful request", 200, page.getWebResponse().getStatusCode()); Assert.assertTrue("still on the original URL", page.getUrl().toString().contains("/userContent")); Assert.assertTrue("web page", page.isHtmlPage()); Assert.assertTrue("complex web page", page.getWebResponse().getContentAsString().contains("javascript")); } String resourceResponseUrl; { // DBS on primary domain forwards to second domain when trying to access a file URL webClient.setRedirectEnabled(true); Page page = webClient.goTo("userContent/readme.txt", "text/plain"); resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response success", 200, page.getWebResponse().getStatusCode()); Assert.assertNull("no CSP headers", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertTrue("Served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); Assert.assertTrue("Served from resource action", resourceResponseUrl.contains("static-files")); } { // direct access to resource URL works Page page = webClient.getPage(resourceResponseUrl); resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response success", 200, page.getWebResponse().getStatusCode()); Assert.assertNull("no CSP headers", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertTrue("Served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); Assert.assertTrue("Served from resource action", resourceResponseUrl.contains("static-files")); } { // show directory index webClient.setRedirectEnabled(false); webClient.setThrowExceptionOnFailingStatusCode(false); Page page = webClient.getPage(resourceResponseUrl.replace("readme.txt", "")); Assert.assertEquals("directory listing response", 200, page.getWebResponse().getStatusCode()); String responseContent = page.getWebResponse().getContentAsString(); Assert.assertTrue("directory listing shown", responseContent.contains("readme.txt")); Assert.assertTrue("is HTML", responseContent.contains("href=")); } String resourceRootUrl = ResourceDomainConfiguration.get().getUrl(); { webClient.setThrowExceptionOnFailingStatusCode(false); Page page = webClient.getPage(resourceRootUrl); Assert.assertEquals("resource root URL response is 404", 404, page.getWebResponse().getStatusCode()); } { webClient.setThrowExceptionOnFailingStatusCode(false); Page page = webClient.getPage(resourceRootUrl + "/static-files/"); Assert.assertEquals("resource action index page response is 404", 404, page.getWebResponse().getStatusCode()); } { // second domain invalid URL gets 404 webClient.setThrowExceptionOnFailingStatusCode(false); String uuid = UUID.randomUUID().toString(); Page page = webClient.getPage(resourceRootUrl + "static-files/" + uuid); Assert.assertEquals("resource response is 404", 404, page.getWebResponse().getStatusCode()); Assert.assertTrue("response URL is still the same", page.getUrl().toString().contains(uuid)); } j.jenkins.setSecurityRealm(j.createDummySecurityRealm()); MockAuthorizationStrategy a = new MockAuthorizationStrategy(); j.jenkins.setAuthorizationStrategy(a); { // fails without Overall/Read webClient.withRedirectEnabled(false).withThrowExceptionOnFailingStatusCode(false); Page page = webClient.getPage(resourceResponseUrl); resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response failed", 403, page.getWebResponse().getStatusCode()); Assert.assertNull("no CSP headers", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertTrue("Served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); } a.grant(Jenkins.READ).onRoot().to("anonymous"); { // now it works again Page page = webClient.getPage(resourceResponseUrl); resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response success", 200, page.getWebResponse().getStatusCode()); Assert.assertNull("no CSP headers", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertTrue("Served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); Assert.assertTrue("Served from resource action", resourceResponseUrl.contains("static-files")); } } @Test public void clearRootUrl() throws Exception { JenkinsLocationConfiguration.get().setUrl(null); JenkinsRule.WebClient webClient = j.createWebClient(); String resourceResponseUrl; { webClient.setRedirectEnabled(true); Page page = webClient.goTo("userContent/readme.txt", "text/plain"); resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response success", 200, page.getWebResponse().getStatusCode()); Assert.assertNotNull("CSP headers set", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertFalse("Not served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); Assert.assertFalse("Not served from resource action", resourceResponseUrl.contains("static-files")); Assert.assertTrue("Original URL", resourceResponseUrl.contains("userContent/readme.txt")); } } @Test public void secondDomainCannotBeFaked() throws Exception { JenkinsRule.WebClient webClient = j.createWebClient(); String resourceResponseUrl; { // first, obtain a resource response URL webClient.setRedirectEnabled(true); webClient.setThrowExceptionOnFailingStatusCode(false); Page page = webClient.goTo("userContent/readme.txt", "text/plain"); resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response success", 200, page.getWebResponse().getStatusCode()); Assert.assertNull("no CSP headers", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertTrue("Served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); Assert.assertTrue("Served from resource action", resourceResponseUrl.contains("static-files")); } { // now, modify its prefix to have an invalid HMAC String modifiedUrl = resourceResponseUrl.replaceAll("static[-]files[/]....", "static-files/aaaa"); Page page = webClient.getPage(modifiedUrl); Assert.assertEquals("resource not found", 404, page.getWebResponse().getStatusCode()); assertThat("resource not found", page.getWebResponse().getContentAsString(), containsString(ResourceDomainFilter.ERROR_RESPONSE)); } } @Test public void missingPermissionsCause403() throws Exception { // setup: A job that creates a file in its workspace FreeStyleProject project = j.createFreeStyleProject(); project.getBuildersList().add(new CreateFileBuilder("file.html", "<html><body>the content</body></html>")); project.save(); // setup: Everyone has permission to Jenkins and the job j.jenkins.setSecurityRealm(j.createDummySecurityRealm()); MockAuthorizationStrategy a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ).everywhere().toEveryone(); a.grant(Item.READ, Item.WORKSPACE).onItems(project).toEveryone(); j.jenkins.setAuthorizationStrategy(a); j.buildAndAssertSuccess(project); JenkinsRule.WebClient webClient = j.createWebClient(); webClient.setThrowExceptionOnFailingStatusCode(false); webClient.setRedirectEnabled(true); // basics work HtmlPage page = webClient.getPage(project, "ws/file.html"); Assert.assertEquals("page is found", 200, page.getWebResponse().getStatusCode()); Assert.assertTrue("page content is as expected", page.getWebResponse().getContentAsString().contains("the content")); URL anonUrl = page.getUrl(); Assert.assertTrue("page is served by resource domain", anonUrl.toString().contains("/static-files/")); // now remove workspace permission from all users a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ).everywhere().toEveryone(); a.grant(Item.READ).onItems(project).toEveryone(); j.jenkins.setAuthorizationStrategy(a); // and we get a 403 response page = webClient.getPage(anonUrl); Assert.assertEquals("page is not found", 403, page.getWebResponse().getStatusCode()); assertThat("Response mentions workspace permission", page.getWebResponse().getContentAsString(), containsString("Failed permission check: anonymous is missing the Job/Workspace permission")); // now remove Job/Read permission from all users (but grant Discover) a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ).everywhere().toEveryone(); a.grant(Item.DISCOVER).onItems(project).toEveryone(); j.jenkins.setAuthorizationStrategy(a); // and we get a 403 response asking to log in (Job/Discover is basically meant to be granted to anonymous only) page = webClient.getPage(anonUrl); Assert.assertEquals("page is not found", 403, page.getWebResponse().getStatusCode()); assertThat("Response mentions workspace permission", page.getWebResponse().getContentAsString(), containsString("Failed permission check: Please login to access job")); } @Test public void projectWasRenamedCauses404() throws Exception { // setup: A job that creates a file in its workspace FreeStyleProject project = j.createFreeStyleProject(); project.getBuildersList().add(new CreateFileBuilder("file.html", "<html><body>the content</body></html>")); project.save(); // setup: Everyone has permission to Jenkins and the job j.jenkins.setSecurityRealm(j.createDummySecurityRealm()); MockAuthorizationStrategy a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ, Item.READ, Item.WORKSPACE).everywhere().toEveryone(); j.jenkins.setAuthorizationStrategy(a); j.buildAndAssertSuccess(project); JenkinsRule.WebClient webClient = j.createWebClient(); webClient.setThrowExceptionOnFailingStatusCode(false); webClient.setRedirectEnabled(true); HtmlPage page = webClient.getPage(project, "ws/file.html"); Assert.assertEquals("page is found", 200, page.getWebResponse().getStatusCode()); Assert.assertTrue("page content is as expected", page.getWebResponse().getContentAsString().contains("the content")); URL url = page.getUrl(); Assert.assertTrue("page is served by resource domain", url.toString().contains("/static-files/")); project.renameTo("new-job-name"); // or delete, doesn't really matter Page failedPage = webClient.getPage(url); Assert.assertEquals("page is not found", 404, failedPage.getWebResponse().getStatusCode()); Assert.assertEquals("page is not found", "Not Found", failedPage.getWebResponse().getStatusMessage()); // TODO Is this not done through our exception handler? } // @Test public void indexFileIsUsedIfDefined() throws Exception { // TODO Test with DBS with and without directory index file } @Test public void adminMonitorShowsUpWithOverriddenCSP() throws Exception { ResourceDomainRecommendation monitor = ExtensionList.lookupSingleton(ResourceDomainRecommendation.class); Assert.assertFalse(monitor.isActivated()); System.setProperty(DirectoryBrowserSupport.class.getName() + ".CSP", ""); try { Assert.assertFalse(monitor.isActivated()); ResourceDomainConfiguration.get().setUrl(null); Assert.assertTrue(monitor.isActivated()); } finally { System.clearProperty(DirectoryBrowserSupport.class.getName() + ".CSP"); } Assert.assertFalse(monitor.isActivated()); } @Test public void testColonUserName() throws Exception { j.jenkins.setSecurityRealm(j.createDummySecurityRealm()); MockAuthorizationStrategy a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ).everywhere().toEveryone(); j.jenkins.setAuthorizationStrategy(a); JenkinsRule.WebClient webClient = j.createWebClient(); webClient.setRedirectEnabled(true); webClient.login("foo:bar"); Page page = webClient.goTo("userContent/readme.txt", "text/plain"); String resourceResponseUrl = page.getUrl().toString(); Assert.assertEquals("resource response success", 200, page.getWebResponse().getStatusCode()); Assert.assertNull("no CSP headers", page.getWebResponse().getResponseHeaderValue("Content-Security-Policy")); Assert.assertTrue("Served from resource domain", resourceResponseUrl.contains(RESOURCE_DOMAIN)); Assert.assertTrue("Served from resource action", resourceResponseUrl.contains("static-files")); } @Test public void testRedirectUrls() throws Exception { ResourceDomainRootAction rootAction = ResourceDomainRootAction.get(); String url = rootAction.getRedirectUrl(new ResourceDomainRootAction.Token("foo", "bar", Instant.now()), "foo bar baz"); Assert.assertFalse("urlencoded", url.contains(" ")); } @Test @Issue("JENKINS-59849") public void testUrlEncoding() throws Exception { FreeStyleProject project = j.createFreeStyleProject(); project.getBuildersList().add(new CreateFileBuilder("This has spaces and is 100% evil.html", "<html><body>the content</body></html>")); project.save(); j.buildAndAssertSuccess(project); JenkinsRule.WebClient webClient = j.createWebClient(); webClient.setThrowExceptionOnFailingStatusCode(false); webClient.setRedirectEnabled(true); HtmlPage page = webClient.getPage(project, "ws/This%20has%20spaces%20and%20is%20100%25%20evil.html"); Assert.assertEquals("page is found", 200, page.getWebResponse().getStatusCode()); Assert.assertTrue("page content is as expected", page.getWebResponse().getContentAsString().contains("the content")); URL url = page.getUrl(); Assert.assertTrue("page is served by resource domain", url.toString().contains("/static-files/")); } @Test @Issue("JENKINS-59849") public void testMoreUrlEncoding() throws Exception { JenkinsRule.WebClient webClient = j.createWebClient(); webClient.setThrowExceptionOnFailingStatusCode(false); webClient.setRedirectEnabled(true); Page page = webClient.goTo("100%25%20evil/%20100%25%20evil%20dir%20name%20%20%20/%20100%25%20evil%20content%20.html"); Assert.assertEquals("page is found", 200, page.getWebResponse().getStatusCode()); Assert.assertTrue("page content is as expected", page.getWebResponse().getContentAsString().contains("this is the content")); URL url = page.getUrl(); Assert.assertTrue("page is served by resource domain", url.toString().contains("/static-files/")); URL dirUrl = new URL(url.toString().replace("%20100%25%20evil%20content%20.html", "")); Page dirPage = webClient.getPage(dirUrl); Assert.assertEquals("page is found", 200, dirPage.getWebResponse().getStatusCode()); Assert.assertTrue("page content is HTML", dirPage.getWebResponse().getContentAsString().contains("href")); Assert.assertTrue("page content references file", dirPage.getWebResponse().getContentAsString().contains("evil content")); URL topDirUrl = new URL(url.toString().replace("%20100%25%20evil%20dir%20name%20%20%20/%20100%25%20evil%20content%20.html", "")); Page topDirPage = webClient.getPage(topDirUrl); Assert.assertEquals("page is found", 200, topDirPage.getWebResponse().getStatusCode()); Assert.assertTrue("page content is HTML", topDirPage.getWebResponse().getContentAsString().contains("href")); Assert.assertTrue("page content references directory", topDirPage.getWebResponse().getContentAsString().contains("evil dir name")); } @TestExtension public static class RootActionImpl implements UnprotectedRootAction { @CheckForNull @Override public String getIconFileName() { return null; } @CheckForNull @Override public String getDisplayName() { return null; } @CheckForNull @Override public String getUrlName() { return "100% evil"; } public HttpResponse doDynamic() throws Exception { Jenkins jenkins = Jenkins.get(); FilePath tempDir = jenkins.getRootPath().createTempDir("root", "tmp"); tempDir.child(" 100% evil dir name ").child(" 100% evil content .html").write("this is the content", "UTF-8"); return new DirectoryBrowserSupport(jenkins, tempDir, "title", "", true); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.watcher.notification.email; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslConfiguration; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.watcher.crypto.CryptoService; import org.elasticsearch.xpack.watcher.notification.NotificationService; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.mail.MessagingException; import javax.net.ssl.SSLSocketFactory; import static org.elasticsearch.xpack.core.watcher.WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX; /** * A component to store email credentials and handle sending email notifications. */ public class EmailService extends NotificationService<Account> { private static final Setting<String> SETTING_DEFAULT_ACCOUNT = Setting.simpleString( "xpack.notification.email.default_account", Property.Dynamic, Property.NodeScope ); private static final Setting.AffixSetting<String> SETTING_PROFILE = Setting.affixKeySetting( "xpack.notification.email.account.", "profile", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Settings> SETTING_EMAIL_DEFAULTS = Setting.affixKeySetting( "xpack.notification.email.account.", "email_defaults", (key) -> Setting.groupSetting(key + ".", Property.Dynamic, Property.NodeScope) ); // settings that can be configured as smtp properties private static final Setting.AffixSetting<Boolean> SETTING_SMTP_AUTH = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.auth", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_STARTTLS_ENABLE = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.starttls.enable", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_STARTTLS_REQUIRED = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.starttls.required", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<String> SETTING_SMTP_HOST = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.host", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Integer> SETTING_SMTP_PORT = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.port", (key) -> Setting.intSetting(key, 587, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<String> SETTING_SMTP_USER = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.user", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<SecureString> SETTING_SECURE_PASSWORD = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.secure_password", (key) -> SecureSetting.secureString(key, null) ); private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_TIMEOUT = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_CONNECTION_TIMEOUT = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.connection_timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_WRITE_TIMEOUT = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.write_timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<String> SETTING_SMTP_LOCAL_ADDRESS = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.local_address", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<String> SETTING_SMTP_SSL_TRUST_ADDRESS = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.ssl.trust", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Integer> SETTING_SMTP_LOCAL_PORT = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.local_port", (key) -> Setting.intSetting(key, 25, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_SEND_PARTIAL = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.send_partial", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope) ); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_WAIT_ON_QUIT = Setting.affixKeySetting( "xpack.notification.email.account.", "smtp.wait_on_quit", (key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope) ); private static final SSLConfigurationSettings SSL_SETTINGS = SSLConfigurationSettings.withPrefix(EMAIL_NOTIFICATION_SSL_PREFIX, true); private static final Logger logger = LogManager.getLogger(EmailService.class); private final CryptoService cryptoService; private final SSLService sslService; public EmailService(Settings settings, @Nullable CryptoService cryptoService, SSLService sslService, ClusterSettings clusterSettings) { super("email", settings, clusterSettings, EmailService.getDynamicSettings(), EmailService.getSecureSettings()); this.cryptoService = cryptoService; this.sslService = sslService; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_PROFILE, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_EMAIL_DEFAULTS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_AUTH, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_STARTTLS_ENABLE, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_STARTTLS_REQUIRED, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_HOST, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_USER, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SSL_TRUST_ADDRESS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_ADDRESS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SEND_PARTIAL, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WAIT_ON_QUIT, (s, o) -> {}, (s, o) -> {}); // do an initial load reload(settings); } @Override protected Account createAccount(String name, Settings accountSettings) { Account.Config config = new Account.Config(name, accountSettings, getSmtpSslSocketFactory(), logger); return new Account(config, cryptoService, logger); } @Nullable private SSLSocketFactory getSmtpSslSocketFactory() { final SslConfiguration sslConfiguration = sslService.getSSLConfiguration(EMAIL_NOTIFICATION_SSL_PREFIX); if (sslConfiguration == null || sslConfiguration.isExplicitlyConfigured() == false) { return null; } return sslService.sslSocketFactory(sslConfiguration); } public EmailSent send(Email email, Authentication auth, Profile profile, String accountName) throws MessagingException { Account account = getAccount(accountName); if (account == null) { throw new IllegalArgumentException( "failed to send email with subject [" + email.subject() + "] via account [" + accountName + "]. account does not exist" ); } return send(email, auth, profile, account); } private EmailSent send(Email email, Authentication auth, Profile profile, Account account) throws MessagingException { assert account != null; try { email = account.send(email, auth, profile); } catch (MessagingException me) { throw new MessagingException( "failed to send email with subject [" + email.subject() + "] via account [" + account.name() + "]", me ); } return new EmailSent(account.name(), email); } public static class EmailSent { private final String account; private final Email email; public EmailSent(String account, Email email) { this.account = account; this.email = email; } public String account() { return account; } public Email email() { return email; } } private static List<Setting<?>> getDynamicSettings() { return Arrays.asList( SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS, SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS ); } private static List<Setting<?>> getSecureSettings() { return Arrays.asList(SETTING_SECURE_PASSWORD); } public static List<Setting<?>> getSettings() { List<Setting<?>> allSettings = new ArrayList<Setting<?>>(EmailService.getDynamicSettings()); allSettings.addAll(EmailService.getSecureSettings()); allSettings.addAll(SSL_SETTINGS.getEnabledSettings()); return allSettings; } }
/* * Copyright (C) 2015-2016 Neo Visionaries Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.neovisionaries.ws.client; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.util.List; import java.util.Map; class ProxyHandshaker { private static final String RN = "\r\n"; private final String mHost; private final int mPort; private final ProxySettings mSettings; public ProxyHandshaker(String host, int port, ProxySettings settings) { mHost = host; mPort = port; mSettings = settings; } public void perform(Socket socket) throws IOException { // Send a CONNECT request to the proxy server. sendRequest(socket); // Receive a response. receiveResponse(socket); } private void sendRequest(Socket socket) throws IOException { // Build a CONNECT request. String request = buildRequest(); // Convert the request to a byte array. byte[] requestBytes = Misc.getBytesUTF8(request); // Get the stream to send data to the proxy server. OutputStream output = socket.getOutputStream(); // Send the request to the proxy server. output.write(requestBytes); output.flush(); } private String buildRequest() { String host = String.format("%s:%d", mHost, mPort); // CONNECT StringBuilder builder = new StringBuilder() .append("CONNECT ").append(host).append(" HTTP/1.1").append(RN) .append("Host: ").append(host).append(RN); // Additional headers addHeaders(builder); // Proxy-Authorization addProxyAuthorization(builder); // The entire request. return builder.append(RN).toString(); } private void addHeaders(StringBuilder builder) { // For each additional header. for (Map.Entry<String, List<String>> header : mSettings.getHeaders().entrySet()) { // Header name. String name = header.getKey(); // For each header value. for (String value : header.getValue()) { if (value == null) { value = ""; } builder.append(name).append(": ").append(value).append(RN); } } } private void addProxyAuthorization(StringBuilder builder) { String id = mSettings.getId(); if (id == null || id.length() == 0) { return; } String password = mSettings.getPassword(); if (password == null) { password = ""; } // {id}:{password} String credentials = String.format("%s:%s", id, password); // The current implementation always uses Basic Authentication. builder .append("Proxy-Authorization: Basic ") .append(Base64.encode(credentials)) .append(RN); } private void receiveResponse(Socket socket) throws IOException { // Get the stream to read data from the proxy server. InputStream input = socket.getInputStream(); // Read the status line. readStatusLine(input); // Skip HTTP headers, including an empty line (= the separator // between the header part and the body part). skipHeaders(input); } private void readStatusLine(InputStream input) throws IOException { // Read the status line. String statusLine = Misc.readLine(input, "UTF-8"); // If the response from the proxy server does not contain a status line. if (statusLine == null || statusLine.length() == 0) { throw new IOException("The response from the proxy server does not contain a status line."); } // Expect "HTTP/1.1 200 Connection established" String[] elements = statusLine.split(" +", 3); if (elements.length < 2) { throw new IOException( "The status line in the response from the proxy server is badly formatted. " + "The status line is: " + statusLine); } // If the status code is not "200". if ("200".equals(elements[1]) == false) { throw new IOException( "The status code in the response from the proxy server is not '200 Connection established'. " + "The status line is: " + statusLine); } // OK. A connection was established. } private void skipHeaders(InputStream input) throws IOException { // The number of normal letters in a line. int count = 0; while (true) { // Read a byte from the stream. int ch = input.read(); // If the end of the stream was reached. if (ch == -1) { // Unexpected EOF. throw new EOFException("The end of the stream from the proxy server was reached unexpectedly."); } // If the end of the line was reached. if (ch == '\n') { // If there is no normal byte in the line. if (count == 0) { // An empty line (the separator) was found. return; } // Reset the counter and go to the next line. count = 0; continue; } // If the read byte is not a carriage return. if (ch != '\r') { // Increment the number of normal bytes on the line. ++count; continue; } // Read the next byte. ch = input.read(); // If the end of the stream was reached. if (ch == -1) { // Unexpected EOF. throw new EOFException("The end of the stream from the proxy server was reached unexpectedly after a carriage return."); } if (ch != '\n') { // Regard the last '\r' as a normal byte as well as the current 'ch'. count += 2; continue; } // '\r\n' was detected. // If there is no normal byte in the line. if (count == 0) { // An empty line (the separator) was found. return; } // Reset the counter and go to the next line. count = 0; } } /** * To be able to verify the hostname of the certificate received * if a connection is made to an https/wss endpoint, access to this * hostname is required. * * @return the hostname of the server the proxy is asked to connect to. */ String getProxiedHostname() { return mHost; } }
/* * Copyright 2019 The gRPC Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc.xds; import static com.google.common.base.Preconditions.checkNotNull; import com.github.udpa.udpa.data.orca.v1.OrcaLoadReport; import com.google.common.annotations.VisibleForTesting; import io.grpc.CallOptions; import io.grpc.ClientStreamTracer; import io.grpc.ClientStreamTracer.StreamInfo; import io.grpc.ExperimentalApi; import io.grpc.LoadBalancer; import io.grpc.Metadata; import io.grpc.protobuf.ProtoUtils; import io.grpc.util.ForwardingClientStreamTracer; import java.util.ArrayList; import java.util.List; /** * Utility class that provides method for {@link LoadBalancer} to install listeners to receive * per-request backend cost metrics in the format of Open Request Cost Aggregation (ORCA). */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/5790") public abstract class OrcaPerRequestUtil { private static final ClientStreamTracer NOOP_CLIENT_STREAM_TRACER = new ClientStreamTracer() {}; private static final ClientStreamTracer.Factory NOOP_CLIENT_STREAM_TRACER_FACTORY = new ClientStreamTracer.Factory() { @Override public ClientStreamTracer newClientStreamTracer(StreamInfo info, Metadata headers) { return NOOP_CLIENT_STREAM_TRACER; } }; private static final OrcaPerRequestUtil DEFAULT_INSTANCE = new OrcaPerRequestUtil() { @Override public ClientStreamTracer.Factory newOrcaClientStreamTracerFactory( OrcaPerRequestReportListener listener) { return newOrcaClientStreamTracerFactory(NOOP_CLIENT_STREAM_TRACER_FACTORY, listener); } @Override public ClientStreamTracer.Factory newOrcaClientStreamTracerFactory( ClientStreamTracer.Factory delegate, OrcaPerRequestReportListener listener) { return new OrcaReportingTracerFactory(delegate, listener); } }; /** * Gets an {@code OrcaPerRequestUtil} instance that provides actual implementation of * {@link #newOrcaClientStreamTracerFactory}. */ public static OrcaPerRequestUtil getInstance() { return DEFAULT_INSTANCE; } /** * Creates a new {@link ClientStreamTracer.Factory} with provided {@link * OrcaPerRequestReportListener} installed to receive callback when a per-request ORCA report is * received. * * <p>Example usages for leaf level policy (e.g., WRR policy) * * <pre> * {@code * class WrrPicker extends SubchannelPicker { * * public PickResult pickSubchannel(PickSubchannelArgs args) { * Subchannel subchannel = ... // WRR picking logic * return PickResult.withSubchannel( * subchannel, * OrcaPerRequestReportUtil.getInstance().newOrcaClientStreamTracerFactory(listener)); * } * } * } * </pre> * * @param listener contains the callback to be invoked when a per-request ORCA report is received. */ public abstract ClientStreamTracer.Factory newOrcaClientStreamTracerFactory( OrcaPerRequestReportListener listener); /** * Creates a new {@link ClientStreamTracer.Factory} with provided {@link * OrcaPerRequestReportListener} installed to receive callback when a per-request ORCA report is * received. * * <p>Example usages: * * <ul> * <li> Delegating policy (e.g., xDS) * <pre> * {@code * class XdsPicker extends SubchannelPicker { * * public PickResult pickSubchannel(PickSubchannelArgs args) { * SubchannelPicker perLocalityPicker = ... // locality picking logic * Result result = perLocalityPicker.pickSubchannel(args); * return PickResult.withSubchannel( * result.getSubchannel(), * OrcaPerRequestReportUtil.getInstance().newOrcaClientTracerFactory( * result.getStreamTracerFactory(), listener)); * * } * } * } * </pre> * </li> * <li> Delegating policy with additional tracing logic * <pre> * {@code * class WrappingPicker extends SubchannelPicker { * * public PickResult pickSubchannel(PickSubchannelArgs args) { * Result result = delegate.pickSubchannel(args); * return PickResult.withSubchannel( * result.getSubchannel(), * new ClientStreamTracer.Factory() { * public ClientStreamTracer newClientStreamTracer( * StreamInfo info, Metadata metadata) { * ClientStreamTracer.Factory orcaTracerFactory = * OrcaPerRequestReportUtil.getInstance().newOrcaClientStreamTracerFactory( * result.getStreamTracerFactory(), listener); * * // Wrap the tracer from the delegate factory if you need to trace the * // stream for your own. * final ClientStreamTracer orcaTracer = * orcaTracerFactory.newClientStreamTracer(info, metadata); * * return ForwardingClientStreamTracer() { * protected ClientStreamTracer delegate() { * return orcaTracer; * } * * public void inboundMessage(int seqNo) { * // Handle this event. * ... * } * }; * } * }); * } * } * } * </pre> * </li> * </ul> * * @param delegate the delegate factory to produce other client stream tracing. * @param listener contains the callback to be invoked when a per-request ORCA report is received. */ public abstract ClientStreamTracer.Factory newOrcaClientStreamTracerFactory( ClientStreamTracer.Factory delegate, OrcaPerRequestReportListener listener); /** * The listener interface for receiving per-request ORCA reports from backends. The class that is * interested in processing backend cost metrics implements this interface, and the object created * with that class is registered with a component, using methods in {@link OrcaPerRequestUtil}. * When an ORCA report is received, that object's {@code onLoadReport} method is invoked. */ public interface OrcaPerRequestReportListener { /** * Invoked when an per-request ORCA report is received. * * <p>Note this callback will be invoked from the network thread as the RPC finishes, * implementations should not block. * * @param report load report in the format of ORCA format. */ void onLoadReport(OrcaLoadReport report); } /** * An {@link OrcaReportingTracerFactory} wraps a delegated {@link ClientStreamTracer.Factory} with * additional functionality to produce {@link ClientStreamTracer} instances that extract * per-request ORCA reports and push to registered listeners for calls they trace. */ @VisibleForTesting static final class OrcaReportingTracerFactory extends ClientStreamTracer.Factory { @VisibleForTesting static final Metadata.Key<OrcaLoadReport> ORCA_ENDPOINT_LOAD_METRICS_KEY = Metadata.Key.of( "x-endpoint-load-metrics-bin", ProtoUtils.metadataMarshaller(OrcaLoadReport.getDefaultInstance())); private static final CallOptions.Key<OrcaReportBroker> ORCA_REPORT_BROKER_KEY = CallOptions.Key.create("internal-orca-report-broker"); private final ClientStreamTracer.Factory delegate; private final OrcaPerRequestReportListener listener; OrcaReportingTracerFactory( ClientStreamTracer.Factory delegate, OrcaPerRequestReportListener listener) { this.delegate = checkNotNull(delegate, "delegate"); this.listener = checkNotNull(listener, "listener"); } @Override public ClientStreamTracer newClientStreamTracer(StreamInfo info, Metadata headers) { OrcaReportBroker broker = info.getCallOptions().getOption(ORCA_REPORT_BROKER_KEY); boolean augmented = false; if (broker == null) { broker = new OrcaReportBroker(); info = info.toBuilder() .setCallOptions(info.getCallOptions().withOption(ORCA_REPORT_BROKER_KEY, broker)) .build(); augmented = true; } broker.addListener(listener); ClientStreamTracer tracer = delegate.newClientStreamTracer(info, headers); if (augmented) { final ClientStreamTracer currTracer = tracer; final OrcaReportBroker currBroker = broker; // The actual tracer that performs ORCA report deserialization. tracer = new ForwardingClientStreamTracer() { @Override protected ClientStreamTracer delegate() { return currTracer; } @Override public void inboundTrailers(Metadata trailers) { OrcaLoadReport report = trailers.get(ORCA_ENDPOINT_LOAD_METRICS_KEY); if (report != null) { currBroker.onReport(report); } delegate().inboundTrailers(trailers); } }; } return tracer; } } /** * A container class to hold registered {@link OrcaPerRequestReportListener}s and invoke all of * them when an {@link OrcaLoadReport} is received. */ private static final class OrcaReportBroker { private final List<OrcaPerRequestReportListener> listeners = new ArrayList<>(); void addListener(OrcaPerRequestReportListener listener) { listeners.add(listener); } void onReport(OrcaLoadReport report) { for (OrcaPerRequestReportListener listener : listeners) { listener.onLoadReport(report); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.wink.json4j.compat.tests; import java.io.InputStreamReader; import java.io.Reader; import junit.framework.TestCase; import org.apache.wink.json4j.compat.JSONArray; import org.apache.wink.json4j.compat.JSONException; import org.apache.wink.json4j.compat.JSONFactory; import org.apache.wink.json4j.compat.JSONObject; /** * Tests for the basic Java JSONArray model */ public class ApacheJSONArrayTest extends TestCase { /** * Test the noargs contructor. */ public void test_new() { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); assertTrue(jArray != null); assertTrue(jArray.length() == 0); } /** * Test the String empty object contructor. */ public void test_newFromEmptyObjectString() { JSONArray jArray = null; Exception ex = null; // Load from empty object string. try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); jArray = factory.createJSONArray("[]"); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); assertTrue(jArray != null); assertTrue(jArray.length() == 0); } /** * Test the String non-empty object contructor. */ public void test_newFromString() { JSONArray jArray = null; Exception ex = null; // Load a basic JSON string try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); jArray = factory.createJSONArray("[\"foo\", \"bar\", \"bool\", true]"); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); assertTrue(jArray != null); assertTrue(jArray.length() == 4); } /** * Test the construction from a reader. */ public void test_newFromReader() { JSONArray jArray = null; Exception ex = null; // read in a basic JSON file of a toplevel array that has all the various types in it. try { Reader rdr = new InputStreamReader(this.getClass().getClassLoader().getResourceAsStream("utf8_basic_array.json"), "UTF-8"); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); jArray = factory.createJSONArray(rdr); rdr.close(); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(jArray != null); assertTrue(jArray.length() == 7); assertTrue(ex == null); } /** * Test the String non-empty object contructor parse failure. */ public void test_newFromStringFailure() { JSONArray jArray = null; Exception ex = null; // Load a basic JSON string that's corrupt try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); jArray = factory.createJSONArray("[\"foo\", bar}, \"bool\", true]"); } catch (Exception ex1) { ex = ex1; } assertTrue(ex != null); assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putLong() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put((long)1); Long l = (Long)jArray.get(0); assertTrue(l != null); assertTrue(l instanceof java.lang.Long); assertTrue(jArray.getLong(0) == 1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putInt() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put(1); Integer i = (Integer)jArray.get(0); assertTrue(i != null); assertTrue(i instanceof java.lang.Integer); assertTrue(jArray.getInt(0) == 1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putShort() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put((short)1); Short s = (Short)jArray.get(0); assertTrue(s != null); assertTrue(s instanceof java.lang.Short); assertTrue(jArray.getShort(0) == 1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putDouble() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put((double)1.123); Double d = (Double)jArray.get(0); assertTrue(d != null); assertTrue(d instanceof java.lang.Double); assertTrue(jArray.getDouble(0) == 1.123); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putBoolean() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put(true); Boolean b = (Boolean)jArray.get(0); assertTrue(b != null); assertTrue(b instanceof java.lang.Boolean); assertTrue(jArray.getBoolean(0) == true); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putString() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put("Hello World."); String s = (String)jArray.get(0); assertTrue(s != null); assertTrue(s instanceof java.lang.String); assertTrue(jArray.getString(0).equals("Hello World.")); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put((Object)null); String s = (String)jArray.get(0); assertTrue(s == null); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putJSONObject() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put(factory.createJSONObject()); JSONObject obj = (JSONObject)jArray.get(0); assertTrue(obj != null); assertTrue(obj instanceof JSONObject); assertTrue(((JSONObject)jArray.get(0)).toString().equals("{}")); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'put' function */ public void test_putJSONArray() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray(); jArray.put(factory.createJSONArray()); JSONArray obj = (JSONArray)jArray.get(0); assertTrue(obj != null); assertTrue(obj instanceof JSONArray); assertTrue(((JSONArray)jArray.get(0)).toString().equals("[]")); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getLong() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[1]"); assertTrue(jArray.getLong(0) == (long)1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getLongNgative() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[-1]"); assertTrue(jArray.getLong(0) == (long)-1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getInt() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[1]"); assertTrue(jArray.getInt(0) == 1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getIntNegative() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[-1]"); assertTrue(jArray.getInt(0) == -1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getDouble() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[1]"); assertTrue(jArray.getDouble(0) == (double)1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getDoubleNegative() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[-1]"); assertTrue(jArray.getDouble(0) == (double)-1); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getDoubleWithDecimal() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[100.959]"); assertTrue(jArray.getDouble(0) == (double)100.959); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getDoubleNegativeWithDecimal() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[-100.959]"); assertTrue(jArray.getDouble(0) == (double)-100.959); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getDoubleWithExponential() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[100959e-3]"); assertTrue(jArray.getDouble(0) == (double)100.959); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getDoubleNegativeWithExponential() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[-100959e-3]"); assertTrue(jArray.getDouble(0) == (double)-100.959); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getString() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[\"some string\"]"); assertTrue(jArray.getString(0).equals("some string")); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getBoolean() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[true]"); assertTrue(jArray.getBoolean(0)); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getBoolean_StringValue() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[\"true\"]"); assertTrue(jArray.getBoolean(0)); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a basic JSON Array construction and helper 'get' function */ public void test_getNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.get(0) == null); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /**************************************************************************/ /* The following tests all test failure scenarios due to type mismatching.*/ /**************************************************************************/ /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getLong_typeMisMatch() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[\"1\"]"); assertTrue(jArray.getLong(0) == (long)1); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getDouble_typeMisMatch() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[\"1\"]"); assertTrue(jArray.getDouble(0) == 1); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getInt_typeMisMatch() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[\"1\"]"); assertTrue(jArray.getLong(0) == (int)1); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getString_typeMisMatch() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.getString(0) == "null"); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getBoolean_typeMisMatch() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[\"1\"]"); assertTrue(jArray.getBoolean(0) == true); } catch (Exception ex1) { ex = ex1; } System.out.println("Error: " + ex); assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getLong_typeMisMatchNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.getLong(0) == (long)1); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getInt_typeMisMatchNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.getLong(0) == (int)1); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getDouble_typeMisMatchNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.getDouble(0) == (double)1); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getString_typeMisMatchNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.getString(0) == "1"); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a basic JSON Array construction and helper 'get' function failure due to type mismatch */ public void test_getBoolean_typeMisMatchNull() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[null]"); assertTrue(jArray.getBoolean(0) == true); } catch (Exception ex1) { ex = ex1; } assertTrue(ex instanceof JSONException); } /** * Test a 'join' of a JSONArray */ public void test_JoinNoDelimiter() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[1, true, false, null, \"My String\", [1,2,3], {\"foo\":\"bar\"}]"); String joined = jArray.join(""); assertTrue(joined.equals("1truefalsenullMy String[1,2,3]{\"foo\":\"bar\"}")); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a 'join' of a JSONArray */ public void test_JoinDelimiter() { Exception ex = null; try { System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONArray jArray = factory.createJSONArray("[1, true, false, null, \"My String\", [1,2,3], {\"foo\":\"bar\"}]"); String joined = jArray.join("|"); assertTrue(joined.equals("1|true|false|null|My String|[1,2,3]|{\"foo\":\"bar\"}")); } catch (Exception ex1) { ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } }
/*-------------------------------------------------------------------------------------------------------------------*\ | Copyright (C) 2014 eBay Software Foundation | | | | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance | | with the License. | | | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed | | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for | | the specific language governing permissions and limitations under the License. | \*-------------------------------------------------------------------------------------------------------------------*/ package com.paypal.selion.platform.dataprovider; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import java.io.IOException; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Hashtable; import java.util.List; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import com.paypal.selion.platform.dataprovider.DefaultCustomType; import com.paypal.selion.platform.dataprovider.ExcelDataProviderException; import com.paypal.selion.platform.dataprovider.SimpleExcelDataProvider; import com.paypal.selion.platform.dataprovider.pojos.excel.AREA_CODE; import com.paypal.selion.platform.dataprovider.pojos.excel.USER; public class SimpleExcelDataProviderTest { public static class MyCustomClass { private String name = ""; public MyCustomClass(String name) { this.name = name; } public String getName() { return name; } } private static String pathName = "src/test/resources/"; private static String fileName = "User.xlsx"; private static final String assertFailedMsg = "Assert condition failed."; private SimpleExcelDataProvider dataSource = null; @BeforeClass(alwaysRun = true) public void init() throws IOException { dataSource = new SimpleExcelDataProvider(pathName, fileName); } public static class ColorsData { /** * @return the productName */ public String getProductName() { return productName; } /** * @param productName * the productName to set */ public void setProductName(String productName) { this.productName = productName; } /** * @return the whatColor */ public Colors getWhatColor() { return whatColor; } /** * @param whatColor * the whatColor to set */ public void setWhatColor(Colors whatColor) { this.whatColor = whatColor; } private String productName; private Colors whatColor; } public static class TweakedColorsData { /** * @return the productName */ public String getProductName() { return productName; } /** * @param productName * the productName to set */ public void setProductName(String productName) { this.productName = productName; } /** * @return the whatColor */ public List<String> getWhatColor() { return whatColor; } /** * @param whatColor * the whatColor to set */ public void setWhatColor(List<String> whatColor) { this.whatColor = whatColor; } private String productName; private List<String> whatColor; } @Test(groups = "unit") public void testInjectCustomData() throws IOException, NoSuchMethodException, SecurityException, ExcelDataProviderException { SimpleExcelDataProvider provider = new SimpleExcelDataProvider("src/test/resources/sampleData.xlsx"); DefaultCustomType type = new DefaultCustomType(Colors.class, Colors.class.getMethod("whatColor", String.class)); provider.addCustomTypes(type); Object[][] data = provider.getAllExcelRows(new ColorsData()); List<Colors> expectedValues = Arrays.asList(Colors.values()); assertTrue(data.length == 3); for (Object[] eachObjectRow : data) { ColorsData tData = (ColorsData) eachObjectRow[0]; assertTrue(expectedValues.contains(tData.whatColor)); } } @Test(groups = "unit", expectedExceptions = { IllegalArgumentException.class }) public void testBehaviorWhenPojoClassHasInterfaces() throws IOException, ExcelDataProviderException { SimpleExcelDataProvider provider = new SimpleExcelDataProvider("src/test/resources/sampleData.xlsx"); provider.getAllExcelRows(new TweakedColorsData()); } @Test(groups = "unit") public void testGetSingleExcelRowWithIndexFirstRowCondition() throws ExcelDataProviderException { Object[][] allUsers = new Object[][] { { dataSource.getSingleExcelRow(new USER(), 1) } }; List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "Thomas" }, fetchedNames.toArray()), assertFailedMsg); } @Test(groups = "unit", expectedExceptions = { ExcelDataProviderException.class }, expectedExceptionsMessageRegExp = "Unable to instantiate an object of class .*") public void testPrepareObject() throws IOException, IllegalAccessException, ExcelDataProviderException, SecurityException { MyCustomClass foo = new MyCustomClass("foo"); dataSource.prepareObject(foo, foo.getClass().getDeclaredFields(), new ArrayList<String>()); } @Test(groups = "unit") public void testGetSingleExcelRowWithIndex() throws ExcelDataProviderException { Object[][] allUsers = new Object[][] { { dataSource.getSingleExcelRow(new USER(), 4) } }; List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "suri" }, fetchedNames.toArray()), assertFailedMsg); } @Test(groups = "unit") public void testGetSingleExcelRowWithKeyFirstRowCondition() throws ExcelDataProviderException { Object[][] allUsers = new Object[][] { { dataSource.getSingleExcelRow(new USER(), "tom") } }; List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "Thomas" }, fetchedNames.toArray()), assertFailedMsg); } @Test(groups = "unit") public void testGetSingleExcelRowWithKey() throws ExcelDataProviderException { Object[][] allUsers = new Object[][] { { dataSource.getSingleExcelRow(new USER(), "3") } }; List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "suri" }, fetchedNames.toArray()), assertFailedMsg); } @Test(expectedExceptions = { ExcelDataProviderException.class }, groups = "unit") public void testGetSingleExcelRowWithInvalidKey() throws ExcelDataProviderException { dataSource.getSingleExcelRow(new USER(), "selion"); } @Test(groups = "unit", expectedExceptions = { ExcelDataProviderException.class }) public void testGetSingleExcelRowWithInvalidIndex() throws ExcelDataProviderException { assertNull(dataSource.getSingleExcelRow(new USER(), 100), "Returned data should have been null"); } @Test(expectedExceptions = { ExcelDataProviderException.class }, groups = "unit") public void testGetExcelRowsNegativeConditions() throws ExcelDataProviderException { dataSource.getExcelRows(new USER(), "2~3"); } @Test(groups = "unit") public void testGetExcelRowsWithKeys() throws ExcelDataProviderException { Object[][] allUsers = dataSource.getExcelRows(new USER(), new String[] { "tom", "binh" }); List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "Thomas", "binh" }, fetchedNames.toArray()), assertFailedMsg); } @Test(expectedExceptions = { ExcelDataProviderException.class }, groups = "unit") public void testGetExcelRowsWithInvalidKeys() throws ExcelDataProviderException { dataSource.getExcelRows(new USER(), new String[] { "selion" }); } @Test(groups = "unit") public void testGetExcelRowsWithIndividualIndexes() throws ExcelDataProviderException { Object[][] allUsers = dataSource.getExcelRows(new USER(), "2,3"); List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "rama", "binh" }, fetchedNames.toArray()), assertFailedMsg); } public synchronized List<String> transformExcelDataIntoList(Object[][] allUsers) { List<String> fetchedNames = new ArrayList<String>(); for (Object[] object : allUsers) { USER user = (USER) object[0]; fetchedNames.add(user.getName()); } return fetchedNames; } @Test(groups = "unit") public void testGetExcelRowsWithRangeOfIndexes() throws ExcelDataProviderException { Object[][] allUsers = dataSource.getExcelRows(new USER(), "1-2"); List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "Thomas", "rama" }, fetchedNames.toArray()), assertFailedMsg); } @Test(groups = "unit") public void testGetExcelRowsWithIndividualAndRangeOfIndexes() throws ExcelDataProviderException { Object[][] allUsers = dataSource.getExcelRows(new USER(), "1-2,4,6"); List<String> fetchedNames = transformExcelDataIntoList(allUsers); assertTrue(arrayComparer(new String[] { "Thomas", "rama", "suri", "suri" }, fetchedNames.toArray()), assertFailedMsg); } @Test(groups = "unit", expectedExceptions = { ExcelDataProviderException.class }) public void testGetExcelRowsWhereRowIsNull() throws ExcelDataProviderException { Object[][] allUsers = dataSource.getExcelRows(new USER(), "5"); assertNull(allUsers[0][0], assertFailedMsg); } private synchronized boolean arrayComparer(String[] expected, Object[] actual) { boolean isSame = false; for (int i = 0; i < expected.length; i++) { isSame = expected[i].matches((String) actual[i]); } return isSame; } @Test(groups = "unit") public void testGetAllExcelRows() throws ExcelDataProviderException { Object[][] allUsers = dataSource.getAllExcelRows(new USER()); assertNotNull(allUsers, "Data read from excel sheet failed"); // Reduce 2 from the actual count, since the test excel sheet has 1 blank row // and 1 row for header assertEquals(allUsers.length, getRowCountFromSheet(USER.class.getSimpleName()) - 1, "Failed reading all rows from spreadsheet"); } @Test(groups = "unit") public void testGetAllRowsAsHashTable() throws ExcelDataProviderException { Hashtable<String, Object> allValues = dataSource.getAllRowsAsHashTable(new USER()); assertNotNull(allValues, "Data read from excel sheet failed"); assertEquals(allValues.size(), getRowCountFromSheet(USER.class.getSimpleName()) - 2, "Failed reading all rows from spreadsheet"); } @Test(expectedExceptions = { IllegalArgumentException.class }, groups = "unit") public void testGetAllRowsAsHashTableInvalidSheetName() throws ExcelDataProviderException { Student student = new SimpleExcelDataProviderTest().new Student(); dataSource.getAllRowsAsHashTable(student); } @Test(expectedExceptions = { IllegalArgumentException.class }, groups = "unit") public void testGetallExcelRowsInvalidSheetName() throws ExcelDataProviderException { Student student = new SimpleExcelDataProviderTest().new Student(); dataSource.getAllExcelRows(student); } @Test(expectedExceptions = { IllegalArgumentException.class }, groups = "unit") public void negativeTestsWithExcelDataProviderConstructor() throws IOException { new SimpleExcelDataProvider(null); } @Test(expectedExceptions = { IOException.class }, groups = "unit") public void negativeTestsInvalidFileName() throws IOException { new SimpleExcelDataProvider(null, "IdontExist.xls"); } @Test(groups = "unit") public void getAllRowsAsHash() throws ExcelDataProviderException { assertNotNull(dataSource.getAllRowsAsHashTable(new USER())); } @Test(groups = "unit") public void getSheetAsHashByKeyTest1() throws ExcelDataProviderException { USER user = (USER) dataSource.getAllRowsAsHashTable(new USER()).get("binh"); assertData(user); } @Test(groups = "unit") public void getSheetAsHashByKeyTest2() throws ExcelDataProviderException { USER user = (USER) dataSource.getAllRowsAsHashTable(new USER()).get("1"); assertData(user); } @DataProvider(parallel = true) public Object[][] getExcelDataRowsByKeys() throws Exception { return dataSource.getExcelRows(new USER(), new String[] { "1", "binh" }); } @Test(dataProvider = "getExcelDataRowsByKeys", groups = "unit") public void getExcelDataRowsByKeys(USER myData) { assertData(myData); for (AREA_CODE eachArea : myData.getAreaCode()) { assertNotNull(eachArea.getAreaCode(), "Area code should not have been null"); } } @DataProvider(parallel = true) public Object[][] getExcelDataRowsByIndexes() throws ExcelDataProviderException { return dataSource.getExcelRows(new USER(), "2, 3-4"); } @Test(dataProvider = "getExcelDataRowsByIndexes", groups = "unit") public void getExcelDataRowsByIndexes(USER myData) { assertData(myData); for (AREA_CODE eachArea : myData.getAreaCode()) { assertNotNull(eachArea.getAreaCode(), "Area code should not have been null"); } } @DataProvider(parallel = true) public Object[][] getAllExcelRows() throws ExcelDataProviderException { return dataSource.getAllExcelRows(new USER()); } @Test(dataProvider = "getAllExcelRows", groups = "unit") public void getAllExcelRows(USER myData) { assertData(myData); for (AREA_CODE eachArea : myData.getAreaCode()) { assertNotNull(eachArea.getAreaCode(), "Area code should not have been null"); } } private void assertData(USER data) { assertNotNull(data); assertNotNull(data.getName()); assertNotNull(data.getPassword()); assertNotNull(data.getAreaCode()[0].getAreaCode()); } private int getRowCountFromSheet(String sheetName) { int rowCount = 0; try { XSSFWorkbook workBook = new XSSFWorkbook(pathName + fileName); rowCount = workBook.getSheet(sheetName).getPhysicalNumberOfRows(); } catch (IOException e) { // do nothing with the exception here } return rowCount; } public class Student { private String studentName; public void setStudentName(String name) { this.studentName = name; } public String getStudentName() { return this.studentName; } } @Test(groups = "unit") public void testGetRowContents() { Field[] fields = USER.class.getDeclaredFields(); List<String> rowContents = dataSource.getRowContents("User", 3, fields.length); assertNotNull(rowContents); assertTrue("[rama, abc123, 123456, 100.00, ph1,ph2,ph3, bnk1, 1-408-225-8040, 12, true, 12.5, 167045, 12.5, 2]" .equals(rowContents.toString())); } @Test(groups = "unit") public void testGetAllRawExcelRows() { List<Row> rows = dataSource.getAllRawExcelRows("User", true); assertNotNull(rows); Row singleRow = rows.get(3); assertTrue(singleRow.getCell(1).getStringCellValue().equals("binh")); assertTrue(singleRow.getCell(2).getStringCellValue().equals("abc124")); } }
/* * The MIT License * * Copyright 2018 Martin van Zijl. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.kohsuke.github; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import java.util.Locale; import static org.kohsuke.github.internal.Previews.INERTIA; /** * A GitHub project. * * @author Martin van Zijl * @see <a href="https://developer.github.com/v3/projects/">Projects</a> */ public class GHProject extends GHObject { protected GHObject owner; private String owner_url; private String html_url; private String name; private String body; private int number; private String state; private GHUser creator; @Override public URL getHtmlUrl() throws IOException { return GitHubClient.parseURL(html_url); } /** * Gets owner. * * @return the owner * @throws IOException * the io exception */ @SuppressFBWarnings(value = { "EI_EXPOSE_REP" }, justification = "Expected behavior") public GHObject getOwner() throws IOException { if (owner == null) { try { if (owner_url.contains("/orgs/")) { owner = root().createRequest().withUrlPath(getOwnerUrl().getPath()).fetch(GHOrganization.class); } else if (owner_url.contains("/users/")) { owner = root().createRequest().withUrlPath(getOwnerUrl().getPath()).fetch(GHUser.class); } else if (owner_url.contains("/repos/")) { String[] pathElements = getOwnerUrl().getPath().split("/"); owner = GHRepository.read(root(), pathElements[1], pathElements[2]); } } catch (FileNotFoundException e) { return null; } } return owner; } /** * Gets owner url. * * @return the owner url */ public URL getOwnerUrl() { return GitHubClient.parseURL(owner_url); } /** * Gets node id. * * @deprecated Use {@link GHObject#getNodeId()} * @return the node id */ @Deprecated public String getNode_id() { return getNodeId(); } /** * Gets name. * * @return the name */ public String getName() { return name; } /** * Gets body. * * @return the body */ public String getBody() { return body; } /** * Gets number. * * @return the number */ public int getNumber() { return number; } /** * Gets state. * * @return the state */ public ProjectState getState() { return Enum.valueOf(ProjectState.class, state.toUpperCase(Locale.ENGLISH)); } /** * Gets creator. * * @return the creator */ @SuppressFBWarnings(value = { "EI_EXPOSE_REP" }, justification = "Expected behavior") public GHUser getCreator() { return creator; } /** * Wrap gh project. * * @param root * the root * @return the gh project */ @Deprecated public GHProject wrap(GitHub root) { throw new RuntimeException("Do not use this method."); } /** * Wrap gh project. * * @param repo * the repo * @return the gh project */ @Deprecated public GHProject wrap(GHRepository repo) { throw new RuntimeException("Do not use this method."); } /** * Wrap gh project. * * @param repo * the repo * @return the gh project */ GHProject lateBind(GHRepository repo) { this.owner = repo; return this; } private void edit(String key, Object value) throws IOException { root().createRequest().method("PATCH").withPreview(INERTIA).with(key, value).withUrlPath(getApiRoute()).send(); } /** * Gets api route. * * @return the api route */ protected String getApiRoute() { return "/projects/" + getId(); } /** * Sets name. * * @param name * the name * @throws IOException * the io exception */ public void setName(String name) throws IOException { edit("name", name); } /** * Sets body. * * @param body * the body * @throws IOException * the io exception */ public void setBody(String body) throws IOException { edit("body", body); } /** * The enum ProjectState. */ public enum ProjectState { OPEN, CLOSED } /** * Sets state. * * @param state * the state * @throws IOException * the io exception */ public void setState(ProjectState state) throws IOException { edit("state", state.toString().toLowerCase()); } /** * The enum ProjectStateFilter. */ public static enum ProjectStateFilter { ALL, OPEN, CLOSED } /** * Set the permission level that all members of the project's organization will have on this project. Only * applicable for organization-owned projects. * * @param permission * the permission * @throws IOException * the io exception */ public void setOrganizationPermission(GHPermissionType permission) throws IOException { edit("organization_permission", permission.toString().toLowerCase()); } /** * Sets visibility of the project within the organization. Only applicable for organization-owned projects. * * @param isPublic * the is public * @throws IOException * the io exception */ public void setPublic(boolean isPublic) throws IOException { edit("public", isPublic); } /** * Delete. * * @throws IOException * the io exception */ public void delete() throws IOException { root().createRequest().withPreview(INERTIA).method("DELETE").withUrlPath(getApiRoute()).send(); } /** * List columns paged iterable. * * @return the paged iterable * @throws IOException * the io exception */ public PagedIterable<GHProjectColumn> listColumns() throws IOException { final GHProject project = this; return root().createRequest() .withPreview(INERTIA) .withUrlPath(String.format("/projects/%d/columns", getId())) .toIterable(GHProjectColumn[].class, item -> item.lateBind(project)); } /** * Create column gh project column. * * @param name * the name * @return the gh project column * @throws IOException * the io exception */ public GHProjectColumn createColumn(String name) throws IOException { return root().createRequest() .method("POST") .withPreview(INERTIA) .with("name", name) .withUrlPath(String.format("/projects/%d/columns", getId())) .fetch(GHProjectColumn.class) .lateBind(this); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.plugins; import com.google.common.hash.Hashing; import com.google.common.io.Files; import com.intellij.ide.IdeBundle; import com.intellij.idea.IdeaApplication; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.updateSettings.impl.UpdateSettings; import com.intellij.openapi.util.BuildNumber; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.io.HttpRequests; import com.intellij.util.io.RequestBuilder; import com.intellij.util.io.URLUtil; import org.apache.http.client.utils.URIBuilder; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import java.io.*; import java.net.HttpURLConnection; import java.net.URISyntaxException; import java.net.URLConnection; import java.util.*; /** * @author stathik * @since Mar 28, 2003 */ public class RepositoryHelper { private static final Logger LOG = Logger.getInstance(RepositoryHelper.class); @SuppressWarnings("SpellCheckingInspection") private static final String PLUGIN_LIST_FILE = "availables.xml"; /** * Returns a list of configured plugin hosts. * Note that the list always ends with {@code null} element denoting a main plugin repository. */ @NotNull public static List<String> getPluginHosts() { List<String> hosts = ContainerUtil.newArrayList(UpdateSettings.getInstance().getPluginHosts()); ContainerUtil.addIfNotNull(hosts, ApplicationInfoEx.getInstanceEx().getBuiltinPluginsUrl()); hosts.add(null); // main plugin repository return hosts; } /** * Loads list of plugins, compatible with a current build, from all configured repositories */ @NotNull public static List<IdeaPluginDescriptor> loadPluginsFromAllRepositories(@Nullable ProgressIndicator indicator) throws IOException { List<IdeaPluginDescriptor> result = new ArrayList<IdeaPluginDescriptor>(); Set<String> addedPluginIds = new HashSet<String>(); for (String host : getPluginHosts()) { List<IdeaPluginDescriptor> plugins = loadPlugins(host, null, indicator); for (IdeaPluginDescriptor plugin : plugins) { if (addedPluginIds.add(plugin.getPluginId().getIdString())) { result.add(plugin); } } } return result; } /** * Loads list of plugins, compatible with a current build, from a main plugin repository. */ @NotNull public static List<IdeaPluginDescriptor> loadPlugins(@Nullable ProgressIndicator indicator) throws IOException { return loadPlugins(null, null, indicator); } /** * Loads list of plugins, compatible with a given build, from a given plugin repository (main repository if null). */ @NotNull public static List<IdeaPluginDescriptor> loadPlugins(@Nullable String repositoryUrl, @Nullable BuildNumber buildnumber, @Nullable final ProgressIndicator indicator) throws IOException { boolean forceHttps = repositoryUrl == null && IdeaApplication.isLoaded() && UpdateSettings.getInstance().canUseSecureConnection(); return loadPlugins(repositoryUrl, buildnumber, forceHttps, indicator); } @NotNull public static List<IdeaPluginDescriptor> loadPlugins(@Nullable String repositoryUrl, @Nullable BuildNumber buildnumber, boolean forceHttps, @Nullable final ProgressIndicator indicator) throws IOException { final URIBuilder uriBuilder; final File pluginListFile; try { if (repositoryUrl == null) { uriBuilder = new URIBuilder(ApplicationInfoImpl.getShadowInstance().getPluginsListUrl()); pluginListFile = new File(PathManager.getPluginsPath(), PLUGIN_LIST_FILE); if (pluginListFile.length() > 0) { uriBuilder.addParameter("crc32", Files.hash(pluginListFile, Hashing.crc32()).toString()); } } else { uriBuilder = new URIBuilder(repositoryUrl); pluginListFile = null; } } catch (URISyntaxException e) { throw new IOException(e); } if (!URLUtil.FILE_PROTOCOL.equals(uriBuilder.getScheme())) { uriBuilder.addParameter("build", (buildnumber != null ? buildnumber.asString() : ApplicationInfoImpl.getShadowInstance().getApiVersion())); } if (indicator != null) { indicator.setText2(IdeBundle.message("progress.connecting.to.plugin.manager", uriBuilder.getHost())); } RequestBuilder request = HttpRequests.request(uriBuilder.toString()).forceHttps(forceHttps); return process(repositoryUrl, request.connect(new HttpRequests.RequestProcessor<List<IdeaPluginDescriptor>>() { @Override public List<IdeaPluginDescriptor> process(@NotNull HttpRequests.Request request) throws IOException { if (indicator != null) { indicator.checkCanceled(); } URLConnection connection = request.getConnection(); if (pluginListFile != null && pluginListFile.length() > 0 && connection instanceof HttpURLConnection && ((HttpURLConnection)connection).getResponseCode() == HttpURLConnection.HTTP_NOT_MODIFIED) { return loadPluginList(pluginListFile); } if (indicator != null) { indicator.checkCanceled(); indicator.setText2(IdeBundle.message("progress.downloading.list.of.plugins", uriBuilder.getHost())); } if (pluginListFile != null) { synchronized (RepositoryHelper.class) { FileUtil.ensureExists(pluginListFile.getParentFile()); request.saveToFile(pluginListFile, indicator); return loadPluginList(pluginListFile); } } else { return parsePluginList(request.getReader()); } } })); } /** * Reads cached plugin descriptors from a file. Returns null if cache file does not exist. */ @Nullable public static List<IdeaPluginDescriptor> loadCachedPlugins() throws IOException { File file = new File(PathManager.getPluginsPath(), PLUGIN_LIST_FILE); return file.length() == 0 ? null : loadPluginList(file); } private static List<IdeaPluginDescriptor> loadPluginList(@NotNull File file) throws IOException { return parsePluginList(new InputStreamReader(new FileInputStream(file), CharsetToolkit.UTF8_CHARSET)); } private static List<IdeaPluginDescriptor> parsePluginList(@NotNull Reader reader) throws IOException { try { SAXParser parser = SAXParserFactory.newInstance().newSAXParser(); RepositoryContentHandler handler = new RepositoryContentHandler(); parser.parse(new InputSource(reader), handler); return handler.getPluginsList(); } catch (ParserConfigurationException e) { throw new IOException(e); } catch (SAXException e) { throw new IOException(e); } finally { reader.close(); } } private static List<IdeaPluginDescriptor> process(@Nullable String repositoryUrl, List<IdeaPluginDescriptor> list) { for (Iterator<IdeaPluginDescriptor> i = list.iterator(); i.hasNext(); ) { PluginNode node = (PluginNode)i.next(); if (node.getPluginId() == null || repositoryUrl != null && node.getDownloadUrl() == null) { LOG.warn("Malformed plugin record (id:" + node.getPluginId() + " repository:" + repositoryUrl + ")"); i.remove(); continue; } if (repositoryUrl != null) { node.setRepositoryName(repositoryUrl); } if (node.getName() == null) { String url = node.getDownloadUrl(); String name = FileUtil.getNameWithoutExtension(url.substring(url.lastIndexOf('/') + 1)); node.setName(name); } } return list; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.globalaccelerator.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/globalaccelerator-2018-08-08/ListCustomRoutingPortMappingsByDestination" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListCustomRoutingPortMappingsByDestinationResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The port mappings for the endpoint IP address that you specified in the request. * </p> */ private java.util.List<DestinationPortMapping> destinationPortMappings; /** * <p> * The token for the next set of results. You receive this token from a previous call. * </p> */ private String nextToken; /** * <p> * The port mappings for the endpoint IP address that you specified in the request. * </p> * * @return The port mappings for the endpoint IP address that you specified in the request. */ public java.util.List<DestinationPortMapping> getDestinationPortMappings() { return destinationPortMappings; } /** * <p> * The port mappings for the endpoint IP address that you specified in the request. * </p> * * @param destinationPortMappings * The port mappings for the endpoint IP address that you specified in the request. */ public void setDestinationPortMappings(java.util.Collection<DestinationPortMapping> destinationPortMappings) { if (destinationPortMappings == null) { this.destinationPortMappings = null; return; } this.destinationPortMappings = new java.util.ArrayList<DestinationPortMapping>(destinationPortMappings); } /** * <p> * The port mappings for the endpoint IP address that you specified in the request. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setDestinationPortMappings(java.util.Collection)} or * {@link #withDestinationPortMappings(java.util.Collection)} if you want to override the existing values. * </p> * * @param destinationPortMappings * The port mappings for the endpoint IP address that you specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public ListCustomRoutingPortMappingsByDestinationResult withDestinationPortMappings(DestinationPortMapping... destinationPortMappings) { if (this.destinationPortMappings == null) { setDestinationPortMappings(new java.util.ArrayList<DestinationPortMapping>(destinationPortMappings.length)); } for (DestinationPortMapping ele : destinationPortMappings) { this.destinationPortMappings.add(ele); } return this; } /** * <p> * The port mappings for the endpoint IP address that you specified in the request. * </p> * * @param destinationPortMappings * The port mappings for the endpoint IP address that you specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public ListCustomRoutingPortMappingsByDestinationResult withDestinationPortMappings(java.util.Collection<DestinationPortMapping> destinationPortMappings) { setDestinationPortMappings(destinationPortMappings); return this; } /** * <p> * The token for the next set of results. You receive this token from a previous call. * </p> * * @param nextToken * The token for the next set of results. You receive this token from a previous call. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token for the next set of results. You receive this token from a previous call. * </p> * * @return The token for the next set of results. You receive this token from a previous call. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token for the next set of results. You receive this token from a previous call. * </p> * * @param nextToken * The token for the next set of results. You receive this token from a previous call. * @return Returns a reference to this object so that method calls can be chained together. */ public ListCustomRoutingPortMappingsByDestinationResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDestinationPortMappings() != null) sb.append("DestinationPortMappings: ").append(getDestinationPortMappings()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListCustomRoutingPortMappingsByDestinationResult == false) return false; ListCustomRoutingPortMappingsByDestinationResult other = (ListCustomRoutingPortMappingsByDestinationResult) obj; if (other.getDestinationPortMappings() == null ^ this.getDestinationPortMappings() == null) return false; if (other.getDestinationPortMappings() != null && other.getDestinationPortMappings().equals(this.getDestinationPortMappings()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDestinationPortMappings() == null) ? 0 : getDestinationPortMappings().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListCustomRoutingPortMappingsByDestinationResult clone() { try { return (ListCustomRoutingPortMappingsByDestinationResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Most of the code in the Qalingo project is copyrighted Hoteia and licensed * under the Apache License Version 2.0 (release version 0.8.0) * http://www.apache.org/licenses/LICENSE-2.0 * * Copyright (c) Hoteia, 2012-2014 * http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com * */ package org.hoteia.qalingo.core.web.mvc.controller; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.hoteia.qalingo.core.Constants; import org.hoteia.qalingo.core.ModelConstants; import org.hoteia.qalingo.core.domain.EngineSetting; import org.hoteia.qalingo.core.domain.EngineSettingValue; import org.hoteia.qalingo.core.i18n.enumtype.ScopeCommonMessage; import org.hoteia.qalingo.core.i18n.enumtype.ScopeReferenceDataMessage; import org.hoteia.qalingo.core.i18n.message.CoreMessageSource; import org.hoteia.qalingo.core.pojo.RequestData; import org.hoteia.qalingo.core.service.EngineSettingService; import org.hoteia.qalingo.core.service.ReferentialDataService; import org.hoteia.qalingo.core.service.UrlService; import org.hoteia.qalingo.core.web.mvc.viewbean.MonitoringViewBean; import org.hoteia.qalingo.core.web.mvc.viewbean.TrackingViewBean; import org.hoteia.qalingo.core.web.mvc.viewbean.ValueBean; import org.hoteia.qalingo.core.web.util.RequestUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; import org.springframework.web.bind.annotation.ModelAttribute; /** * * <p> * <a href="AbstractQalingoController.java.html"><i>View Source</i></a> * </p> * * @author Denis Gosset <a href="http://www.hoteia.com"><i>Hoteia.com</i></a> * */ public abstract class AbstractQalingoController { protected final Logger logger = LoggerFactory.getLogger(getClass()); @Autowired protected CoreMessageSource coreMessageSource; @Autowired protected EngineSettingService engineSettingService; @Autowired protected UrlService urlService; @Autowired protected ReferentialDataService referentialDataService; @Autowired protected RequestUtil requestUtil; /** * */ @ModelAttribute protected void initVelocityLayout(final HttpServletRequest request, final Model model) throws Exception { // Velocity layout mandatory attributes model.addAttribute(Constants.VELOCITY_LAYOUT_ATTRIBUTE_HEAD_META, "../_include/head-common-empty-content.vm"); model.addAttribute(Constants.VELOCITY_LAYOUT_ATTRIBUTE_HEAD_CSS_META, "../_include/head-common-empty-content.vm"); model.addAttribute(Constants.VELOCITY_LAYOUT_ATTRIBUTE_HEAD_CONTENT, "../_include/head-common-empty-content.vm"); model.addAttribute(Constants.VELOCITY_LAYOUT_ATTRIBUTE_FOOTER_SCRIPT_CONTENT, "../_include/body-footer-empty-script-content.vm"); } /** * */ @ModelAttribute protected void handleMessages(final HttpServletRequest request, final Model model) throws Exception { // WE USE SESSION FOR MESSAGES BECAUSE REDIRECT CLEAN REQUEST // ERROR MESSAGE String errorMessage = (String) request.getSession().getAttribute(Constants.ERROR_MESSAGE); if(StringUtils.isNotEmpty(errorMessage)){ model.addAttribute(Constants.ERROR_MESSAGE, errorMessage); request.getSession().removeAttribute(Constants.ERROR_MESSAGE); } // WARNING MESSAGE String warningMessage = (String) request.getSession().getAttribute(Constants.WARNING_MESSAGE); if(StringUtils.isNotEmpty(warningMessage)){ model.addAttribute(Constants.WARNING_MESSAGE, warningMessage); request.getSession().removeAttribute(Constants.WARNING_MESSAGE); } // INFO MESSAGE String infoMessage = (String) request.getSession().getAttribute(Constants.INFO_MESSAGE); if(StringUtils.isNotEmpty(infoMessage)){ model.addAttribute(Constants.INFO_MESSAGE, infoMessage); request.getSession().removeAttribute(Constants.INFO_MESSAGE); } // SUCCESS MESSAGE String successMessage = (String) request.getSession().getAttribute(Constants.SUCCESS_MESSAGE); if(StringUtils.isNotEmpty(successMessage)){ model.addAttribute(Constants.SUCCESS_MESSAGE, successMessage); request.getSession().removeAttribute(Constants.SUCCESS_MESSAGE); } } /** * */ @ModelAttribute(ModelConstants.TRACKING_VIEW_BEAN) protected TrackingViewBean initTracking(final HttpServletRequest request, final Model model) throws Exception { TrackingViewBean trackingViewBean = null; final String contextValue = requestUtil.getCurrentContextNameValue(); EngineSetting webTrackingNumberEngineSetting = engineSettingService.getSettingWebTrackingNumber(); if(webTrackingNumberEngineSetting != null){ EngineSettingValue webTrackingNumberEngineSettingValue = webTrackingNumberEngineSetting.getEngineSettingValue(contextValue); if(webTrackingNumberEngineSettingValue != null && StringUtils.isNotEmpty(webTrackingNumberEngineSettingValue.getValue())){ trackingViewBean = new TrackingViewBean(); trackingViewBean.setTrackingNumber(webTrackingNumberEngineSettingValue.getValue()); EngineSetting webTrackingNameEngineSetting = engineSettingService.getSettingWebTrackingName(); if(webTrackingNameEngineSetting != null){ EngineSettingValue webTrackingNameEngineSettingValue = webTrackingNameEngineSetting.getEngineSettingValue(contextValue); if(webTrackingNameEngineSettingValue != null){ trackingViewBean.setTrackingName(webTrackingNameEngineSettingValue.getValue()); } } } } return trackingViewBean; } /** * */ @ModelAttribute(ModelConstants.URL_BACK) protected String initBackUrl(final HttpServletRequest request, final Model model) throws Exception { String url = requestUtil.getCurrentRequestUrl(request); List<String> excludedPatterns = requestUtil.getCommonUrlExcludedPatterns(); excludedPatterns.add(url); return requestUtil.getLastRequestUrl(request, excludedPatterns); } /** * */ @ModelAttribute(ModelConstants.MONITORING_VIEW_BEAN) protected MonitoringViewBean initMonitoring(final HttpServletRequest request, final Model model) throws Exception { MonitoringViewBean monitoringViewBean = new MonitoringViewBean(); final String contextValue = requestUtil.getCurrentContextNameValue(); EngineSetting webMonitoringNumberEngineSetting = engineSettingService.getSettingWebMonitoringNumber(); if(webMonitoringNumberEngineSetting != null){ EngineSettingValue webMonitoringNumberEngineSettingValue = webMonitoringNumberEngineSetting.getEngineSettingValue(contextValue); if(webMonitoringNumberEngineSettingValue != null && StringUtils.isNotEmpty(webMonitoringNumberEngineSettingValue.getValue())){ monitoringViewBean = new MonitoringViewBean(); monitoringViewBean.setMonitoringNumber(webMonitoringNumberEngineSettingValue.getValue()); EngineSetting webMonitoringNameEngineSetting = engineSettingService.getSettingWebMonitoringName(); EngineSettingValue webMonitoringNameEngineSettingValue = webMonitoringNameEngineSetting.getEngineSettingValue(contextValue); if(webMonitoringNameEngineSettingValue != null){ monitoringViewBean.setMonitoringName(webMonitoringNameEngineSettingValue.getValue()); } } } return monitoringViewBean; } protected List<ValueBean> getCountries(final RequestData requestData) throws Exception { List<ValueBean> countriesValues = new ArrayList<ValueBean>(); try { final Locale locale = requestData.getLocale(); final Map<String, String> countries = referentialDataService.getCountriesByLocale(locale); Set<String> countriesKey = countries.keySet(); for (Iterator<String> iterator = countriesKey.iterator(); iterator.hasNext();) { final String countryKey = (String) iterator.next(); countriesValues.add(new ValueBean(countryKey.replace(Constants.COUNTRY_MESSAGE_PREFIX, ""), countries.get(countryKey))); } Collections.sort(countriesValues, new Comparator<ValueBean>() { @Override public int compare(ValueBean o1, ValueBean o2) { return o1.getValue().compareTo(o2.getValue()); } }); } catch (Exception e) { logger.error("", e); } return countriesValues; } /** * @throws Exception * */ protected String getCurrentVelocityPath(HttpServletRequest request) throws Exception { final RequestData requestData = requestUtil.getRequestData(request); return requestUtil.getCurrentVelocityWebPrefix(requestData); } protected void addMessageError(BindingResult result, Exception e, String formKey, String fieldKey, String errorMessage){ if(StringUtils.isEmpty(errorMessage)){ errorMessage = ""; // EMPTY VALUE TO EVENT VELOCITY MethodInvocationException } FieldError error = new FieldError(formKey, fieldKey, errorMessage); result.addError(error); result.rejectValue(error.getField(), ""); if(e != null){ logger.error(errorMessage, e); } else { logger.warn(errorMessage); } } /** * @throws Exception * */ protected void addSessionErrorMessage(HttpServletRequest request, String message) throws Exception { request.getSession().setAttribute(Constants.ERROR_MESSAGE, message); } /** * @throws Exception * */ protected void addSessionWarningMessage(HttpServletRequest request, String message) throws Exception { request.getSession().setAttribute(Constants.WARNING_MESSAGE, message); } /** * @throws Exception * */ protected void addSessionInfoMessage(HttpServletRequest request, String message) throws Exception { request.getSession().setAttribute(Constants.INFO_MESSAGE, message); } /** * @throws Exception * */ protected void addSessionSuccessMessage(HttpServletRequest request, String message) throws Exception { request.getSession().setAttribute(Constants.SUCCESS_MESSAGE, message); } /** * @throws Exception * */ protected void addRequestErrorMessage(HttpServletRequest request, String message) throws Exception { request.setAttribute(Constants.ERROR_MESSAGE, message); } /** * @throws Exception * */ protected void addRequestWarningMessage(HttpServletRequest request, String message) throws Exception { request.setAttribute(Constants.WARNING_MESSAGE, message); } /** * @throws Exception * */ protected void addRequestInfoMessage(HttpServletRequest request, String message) throws Exception { request.setAttribute(Constants.INFO_MESSAGE, message); } /** * @throws Exception * */ protected void addRequestSuccessMessage(HttpServletRequest request, String message) throws Exception { request.setAttribute(Constants.SUCCESS_MESSAGE, message); } protected String getCommonMessage(ScopeCommonMessage scope, String key, Locale locale) { return getCommonMessage(scope.getPropertyKey(), key, locale); } protected String getCommonMessage(ScopeCommonMessage scope, String key, Object[] params, Locale locale) { return getCommonMessage(scope.getPropertyKey(), key, params, locale); } protected String getCommonMessage(String scope, String key, Locale locale) { return coreMessageSource.getCommonMessage(scope, key, locale); } protected String getCommonMessage(String scope, String key, Object[] params, Locale locale) { return coreMessageSource.getCommonMessage(scope, key, params, locale); } protected String getReferenceData(ScopeReferenceDataMessage scope, String key, Locale locale) { return getReferenceData(scope.getPropertyKey(), key, locale); } protected String getReferenceData(ScopeReferenceDataMessage scope, String key, Object[] params, Locale locale) { return getReferenceData(scope.getPropertyKey(), key, params, locale); } protected String getReferenceData(String scope, String key, Locale locale) { return coreMessageSource.getReferenceData(scope, key, locale); } protected String getReferenceData(String scope, String key, Object[] params, Locale locale) { return coreMessageSource.getReferenceData(scope, key, params, locale); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server; import java.io.IOException; import java.net.ConnectException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.FailApplicationAttemptRequest; import org.apache.hadoop.yarn.api.protocolrecords.FailApplicationAttemptResponse; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationListRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationListResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse; import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest; import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse; import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerResponse; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityRequest; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityResponse; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsRequest; import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsResponse; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SignalContainerResponsePBImpl; import org.apache.hadoop.yarn.api.records.AMCommand; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NMToken; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.ReservationAllocationState; import org.apache.hadoop.yarn.api.records.ReservationId; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.UpdatedContainer; import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.exceptions.ApplicationMasterNotRegisteredException; import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.InvalidApplicationMasterRequestException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.CheckForDecommissioningNodesRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.CheckForDecommissioningNodesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshClusterMaxPriorityRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshClusterMaxPriorityResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesResourcesRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesResourcesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshQueuesRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshQueuesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshServiceAclsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshServiceAclsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsConfigurationRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsConfigurationResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse; import org.apache.hadoop.yarn.server.utils.AMRMClientUtils; import org.apache.hadoop.yarn.util.Records; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Strings; /** * Mock Resource Manager facade implementation that exposes all the methods * implemented by the YARN RM. The behavior and the values returned by this mock * implementation is expected by the Router/AMRMProxy unit test cases. So please * change the implementation with care. */ public class MockResourceManagerFacade implements ApplicationClientProtocol, ApplicationMasterProtocol, ResourceManagerAdministrationProtocol { private static final Logger LOG = LoggerFactory.getLogger(MockResourceManagerFacade.class); private HashSet<ApplicationId> applicationMap = new HashSet<>(); private HashMap<String, List<ContainerId>> applicationContainerIdMap = new HashMap<String, List<ContainerId>>(); private HashMap<ContainerId, Container> allocatedContainerMap = new HashMap<ContainerId, Container>(); private AtomicInteger containerIndex = new AtomicInteger(0); private Configuration conf; private int subClusterId; final private AtomicInteger applicationCounter = new AtomicInteger(0); // True if the Mock RM is running, false otherwise. // This property allows us to write tests for specific scenario as Yarn RM // down e.g. network issue, failover. private boolean isRunning; private boolean shouldReRegisterNext = false; // For unit test synchronization private static Object syncObj = new Object(); public static Object getSyncObj() { return syncObj; } public MockResourceManagerFacade(Configuration conf, int startContainerIndex) { this(conf, startContainerIndex, 0, true); } public MockResourceManagerFacade(Configuration conf, int startContainerIndex, int subClusterId, boolean isRunning) { this.conf = conf; this.containerIndex.set(startContainerIndex); this.subClusterId = subClusterId; this.isRunning = isRunning; } public void setShouldReRegisterNext() { shouldReRegisterNext = true; } public void setRunningMode(boolean mode) { this.isRunning = mode; } private static String getAppIdentifier() throws IOException { AMRMTokenIdentifier result = null; UserGroupInformation remoteUgi = UserGroupInformation.getCurrentUser(); Set<TokenIdentifier> tokenIds = remoteUgi.getTokenIdentifiers(); for (TokenIdentifier tokenId : tokenIds) { if (tokenId instanceof AMRMTokenIdentifier) { result = (AMRMTokenIdentifier) tokenId; break; } } return result != null ? result.getApplicationAttemptId().toString() : ""; } private void validateRunning() throws ConnectException { if (!isRunning) { throw new ConnectException("RM is stopped"); } } @Override public RegisterApplicationMasterResponse registerApplicationMaster( RegisterApplicationMasterRequest request) throws YarnException, IOException { validateRunning(); String amrmToken = getAppIdentifier(); LOG.info("Registering application attempt: " + amrmToken); shouldReRegisterNext = false; synchronized (applicationContainerIdMap) { if (applicationContainerIdMap.containsKey(amrmToken)) { throw new InvalidApplicationMasterRequestException( AMRMClientUtils.APP_ALREADY_REGISTERED_MESSAGE); } // Keep track of the containers that are returned to this application applicationContainerIdMap.put(amrmToken, new ArrayList<ContainerId>()); } // Make sure we wait for certain test cases last in the method synchronized (syncObj) { syncObj.notifyAll(); // We reuse the port number to indicate whether the unit test want us to // wait here if (request.getRpcPort() > 1000) { LOG.info("Register call in RM start waiting"); try { syncObj.wait(); LOG.info("Register call in RM wait finished"); } catch (InterruptedException e) { LOG.info("Register call in RM wait interrupted", e); } } } return RegisterApplicationMasterResponse.newInstance(null, null, null, null, null, request.getHost(), null); } @Override public FinishApplicationMasterResponse finishApplicationMaster( FinishApplicationMasterRequest request) throws YarnException, IOException { validateRunning(); String amrmToken = getAppIdentifier(); LOG.info("Finishing application attempt: " + amrmToken); if (shouldReRegisterNext) { String message = "AM is not registered, should re-register."; LOG.warn(message); throw new ApplicationMasterNotRegisteredException(message); } synchronized (applicationContainerIdMap) { // Remove the containers that were being tracked for this application Assert.assertTrue("The application id is NOT registered: " + amrmToken, applicationContainerIdMap.containsKey(amrmToken)); List<ContainerId> ids = applicationContainerIdMap.remove(amrmToken); for (ContainerId c : ids) { allocatedContainerMap.remove(c); } } return FinishApplicationMasterResponse.newInstance( request.getFinalApplicationStatus() == FinalApplicationStatus.SUCCEEDED ? true : false); } protected ApplicationId getApplicationId(int id) { return ApplicationId.newInstance(12345, id); } protected ApplicationAttemptId getApplicationAttemptId(int id) { return ApplicationAttemptId.newInstance(getApplicationId(id), 1); } @SuppressWarnings("deprecation") @Override public AllocateResponse allocate(AllocateRequest request) throws YarnException, IOException { validateRunning(); if (request.getAskList() != null && request.getAskList().size() > 0 && request.getReleaseList() != null && request.getReleaseList().size() > 0) { Assert.fail("The mock RM implementation does not support receiving " + "askList and releaseList in the same heartbeat"); } String amrmToken = getAppIdentifier(); LOG.info("Allocate from application attempt: " + amrmToken); if (shouldReRegisterNext) { String message = "AM is not registered, should re-register."; LOG.warn(message); throw new ApplicationMasterNotRegisteredException(message); } ArrayList<Container> containerList = new ArrayList<Container>(); if (request.getAskList() != null) { for (ResourceRequest rr : request.getAskList()) { for (int i = 0; i < rr.getNumContainers(); i++) { ContainerId containerId = ContainerId.newInstance( getApplicationAttemptId(1), containerIndex.incrementAndGet()); Container container = Records.newRecord(Container.class); container.setId(containerId); container.setPriority(rr.getPriority()); // We don't use the node for running containers in the test cases. So // it is OK to hard code it to some dummy value NodeId nodeId = NodeId.newInstance(!Strings.isNullOrEmpty(rr.getResourceName()) ? rr.getResourceName() : "dummy", 1000); container.setNodeId(nodeId); container.setResource(rr.getCapability()); containerList.add(container); synchronized (applicationContainerIdMap) { // Keep track of the containers returned to this application. We // will need it in future Assert.assertTrue( "The application id is Not registered before allocate(): " + amrmToken, applicationContainerIdMap.containsKey(amrmToken)); List<ContainerId> ids = applicationContainerIdMap.get(amrmToken); ids.add(containerId); this.allocatedContainerMap.put(containerId, container); } } } } if (request.getReleaseList() != null && request.getReleaseList().size() > 0) { LOG.info("Releasing containers: " + request.getReleaseList().size()); synchronized (applicationContainerIdMap) { Assert .assertTrue( "The application id is not registered before allocate(): " + amrmToken, applicationContainerIdMap.containsKey(amrmToken)); List<ContainerId> ids = applicationContainerIdMap.get(amrmToken); for (ContainerId id : request.getReleaseList()) { boolean found = false; for (ContainerId c : ids) { if (c.equals(id)) { found = true; break; } } Assert.assertTrue("ContainerId " + id + " being released is not valid for application: " + conf.get("AMRMTOKEN"), found); ids.remove(id); // Return the released container back to the AM with new fake Ids. The // test case does not care about the IDs. The IDs are faked because // otherwise the LRM will throw duplication identifier exception. This // returning of fake containers is ONLY done for testing purpose - for // the test code to get confirmation that the sub-cluster resource // managers received the release request ContainerId fakeContainerId = ContainerId.newInstance( getApplicationAttemptId(1), containerIndex.incrementAndGet()); Container fakeContainer = allocatedContainerMap.get(id); fakeContainer.setId(fakeContainerId); containerList.add(fakeContainer); } } } LOG.info("Allocating containers: " + containerList.size() + " for application attempt: " + conf.get("AMRMTOKEN")); // Always issue a new AMRMToken as if RM rolled master key Token newAMRMToken = Token.newInstance(new byte[0], "", new byte[0], ""); return AllocateResponse.newInstance(0, new ArrayList<ContainerStatus>(), containerList, new ArrayList<NodeReport>(), null, AMCommand.AM_RESYNC, 1, null, new ArrayList<NMToken>(), newAMRMToken, new ArrayList<UpdatedContainer>()); } @Override public GetApplicationReportResponse getApplicationReport( GetApplicationReportRequest request) throws YarnException, IOException { validateRunning(); GetApplicationReportResponse response = Records.newRecord(GetApplicationReportResponse.class); ApplicationReport report = Records.newRecord(ApplicationReport.class); report.setYarnApplicationState(YarnApplicationState.ACCEPTED); report.setApplicationId(request.getApplicationId()); report.setCurrentApplicationAttemptId( ApplicationAttemptId.newInstance(request.getApplicationId(), 1)); response.setApplicationReport(report); return response; } @Override public GetApplicationAttemptReportResponse getApplicationAttemptReport( GetApplicationAttemptReportRequest request) throws YarnException, IOException { validateRunning(); GetApplicationAttemptReportResponse response = Records.newRecord(GetApplicationAttemptReportResponse.class); ApplicationAttemptReport report = Records.newRecord(ApplicationAttemptReport.class); report.setApplicationAttemptId(request.getApplicationAttemptId()); report.setYarnApplicationAttemptState(YarnApplicationAttemptState.LAUNCHED); response.setApplicationAttemptReport(report); return response; } @Override public GetNewApplicationResponse getNewApplication( GetNewApplicationRequest request) throws YarnException, IOException { validateRunning(); return GetNewApplicationResponse.newInstance(ApplicationId.newInstance( subClusterId, applicationCounter.incrementAndGet()), null, null); } @Override public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) throws YarnException, IOException { validateRunning(); ApplicationId appId = null; if (request.getApplicationSubmissionContext() != null) { appId = request.getApplicationSubmissionContext().getApplicationId(); } LOG.info("Application submitted: " + appId); applicationMap.add(appId); return SubmitApplicationResponse.newInstance(); } @Override public KillApplicationResponse forceKillApplication( KillApplicationRequest request) throws YarnException, IOException { validateRunning(); ApplicationId appId = null; if (request.getApplicationId() != null) { appId = request.getApplicationId(); if (!applicationMap.remove(appId)) { throw new ApplicationNotFoundException( "Trying to kill an absent application: " + appId); } } LOG.info("Force killing application: " + appId); return KillApplicationResponse.newInstance(true); } @Override public GetClusterMetricsResponse getClusterMetrics( GetClusterMetricsRequest request) throws YarnException, IOException { validateRunning(); return GetClusterMetricsResponse.newInstance(null); } @Override public GetApplicationsResponse getApplications(GetApplicationsRequest request) throws YarnException, IOException { validateRunning(); return GetApplicationsResponse.newInstance(null); } @Override public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) throws YarnException, IOException { validateRunning(); return GetClusterNodesResponse.newInstance(null); } @Override public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) throws YarnException, IOException { validateRunning(); return GetQueueInfoResponse.newInstance(null); } @Override public GetQueueUserAclsInfoResponse getQueueUserAcls( GetQueueUserAclsInfoRequest request) throws YarnException, IOException { validateRunning(); return GetQueueUserAclsInfoResponse.newInstance(null); } @Override public GetDelegationTokenResponse getDelegationToken( GetDelegationTokenRequest request) throws YarnException, IOException { validateRunning(); return GetDelegationTokenResponse.newInstance(null); } @Override public RenewDelegationTokenResponse renewDelegationToken( RenewDelegationTokenRequest request) throws YarnException, IOException { validateRunning(); return RenewDelegationTokenResponse.newInstance(0); } @Override public CancelDelegationTokenResponse cancelDelegationToken( CancelDelegationTokenRequest request) throws YarnException, IOException { validateRunning(); return CancelDelegationTokenResponse.newInstance(); } @Override public MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues( MoveApplicationAcrossQueuesRequest request) throws YarnException, IOException { validateRunning(); return MoveApplicationAcrossQueuesResponse.newInstance(); } @Override public GetApplicationAttemptsResponse getApplicationAttempts( GetApplicationAttemptsRequest request) throws YarnException, IOException { validateRunning(); return GetApplicationAttemptsResponse.newInstance(null); } @Override public GetContainerReportResponse getContainerReport( GetContainerReportRequest request) throws YarnException, IOException { validateRunning(); return GetContainerReportResponse.newInstance(null); } @Override public GetContainersResponse getContainers(GetContainersRequest request) throws YarnException, IOException { validateRunning(); return GetContainersResponse.newInstance(null); } @Override public ReservationSubmissionResponse submitReservation( ReservationSubmissionRequest request) throws YarnException, IOException { validateRunning(); return ReservationSubmissionResponse.newInstance(); } @Override public ReservationListResponse listReservations( ReservationListRequest request) throws YarnException, IOException { validateRunning(); return ReservationListResponse .newInstance(new ArrayList<ReservationAllocationState>()); } @Override public ReservationUpdateResponse updateReservation( ReservationUpdateRequest request) throws YarnException, IOException { validateRunning(); return ReservationUpdateResponse.newInstance(); } @Override public ReservationDeleteResponse deleteReservation( ReservationDeleteRequest request) throws YarnException, IOException { validateRunning(); return ReservationDeleteResponse.newInstance(); } @Override public GetNodesToLabelsResponse getNodeToLabels( GetNodesToLabelsRequest request) throws YarnException, IOException { validateRunning(); return GetNodesToLabelsResponse .newInstance(new HashMap<NodeId, Set<String>>()); } @Override public GetClusterNodeLabelsResponse getClusterNodeLabels( GetClusterNodeLabelsRequest request) throws YarnException, IOException { validateRunning(); return GetClusterNodeLabelsResponse.newInstance(new ArrayList<NodeLabel>()); } @Override public GetLabelsToNodesResponse getLabelsToNodes( GetLabelsToNodesRequest request) throws YarnException, IOException { validateRunning(); return GetLabelsToNodesResponse.newInstance(null); } @Override public GetNewReservationResponse getNewReservation( GetNewReservationRequest request) throws YarnException, IOException { validateRunning(); return GetNewReservationResponse .newInstance(ReservationId.newInstance(0, 0)); } @Override public FailApplicationAttemptResponse failApplicationAttempt( FailApplicationAttemptRequest request) throws YarnException, IOException { validateRunning(); return FailApplicationAttemptResponse.newInstance(); } @Override public UpdateApplicationPriorityResponse updateApplicationPriority( UpdateApplicationPriorityRequest request) throws YarnException, IOException { validateRunning(); return UpdateApplicationPriorityResponse.newInstance(null); } @Override public SignalContainerResponse signalToContainer( SignalContainerRequest request) throws YarnException, IOException { validateRunning(); return new SignalContainerResponsePBImpl(); } @Override public UpdateApplicationTimeoutsResponse updateApplicationTimeouts( UpdateApplicationTimeoutsRequest request) throws YarnException, IOException { validateRunning(); return UpdateApplicationTimeoutsResponse.newInstance(); } @Override public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) throws StandbyException, YarnException, IOException { validateRunning(); return RefreshQueuesResponse.newInstance(); } @Override public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) throws StandbyException, YarnException, IOException { validateRunning(); return RefreshNodesResponse.newInstance(); } @Override public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) throws StandbyException, YarnException, IOException { validateRunning(); return RefreshSuperUserGroupsConfigurationResponse.newInstance(); } @Override public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( RefreshUserToGroupsMappingsRequest request) throws StandbyException, YarnException, IOException { validateRunning(); return RefreshUserToGroupsMappingsResponse.newInstance(); } @Override public RefreshAdminAclsResponse refreshAdminAcls( RefreshAdminAclsRequest request) throws YarnException, IOException { validateRunning(); return RefreshAdminAclsResponse.newInstance(); } @Override public RefreshServiceAclsResponse refreshServiceAcls( RefreshServiceAclsRequest request) throws YarnException, IOException { validateRunning(); return RefreshServiceAclsResponse.newInstance(); } @Override public UpdateNodeResourceResponse updateNodeResource( UpdateNodeResourceRequest request) throws YarnException, IOException { validateRunning(); return UpdateNodeResourceResponse.newInstance(); } @Override public RefreshNodesResourcesResponse refreshNodesResources( RefreshNodesResourcesRequest request) throws YarnException, IOException { validateRunning(); return RefreshNodesResourcesResponse.newInstance(); } @Override public AddToClusterNodeLabelsResponse addToClusterNodeLabels( AddToClusterNodeLabelsRequest request) throws YarnException, IOException { validateRunning(); return AddToClusterNodeLabelsResponse.newInstance(); } @Override public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels( RemoveFromClusterNodeLabelsRequest request) throws YarnException, IOException { validateRunning(); return RemoveFromClusterNodeLabelsResponse.newInstance(); } @Override public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( ReplaceLabelsOnNodeRequest request) throws YarnException, IOException { validateRunning(); return ReplaceLabelsOnNodeResponse.newInstance(); } @Override public CheckForDecommissioningNodesResponse checkForDecommissioningNodes( CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest) throws YarnException, IOException { validateRunning(); return CheckForDecommissioningNodesResponse.newInstance(null); } @Override public RefreshClusterMaxPriorityResponse refreshClusterMaxPriority( RefreshClusterMaxPriorityRequest request) throws YarnException, IOException { validateRunning(); return RefreshClusterMaxPriorityResponse.newInstance(); } @Override public String[] getGroupsForUser(String user) throws IOException { validateRunning(); return new String[0]; } }
package net.lightbody.bmp.util; import com.google.common.io.BaseEncoding; import com.google.common.net.HostAndPort; import com.google.common.net.MediaType; import io.netty.buffer.ByteBuf; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpResponse; import net.lightbody.bmp.exception.DecompressionException; import net.lightbody.bmp.exception.UnsupportedCharsetException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.zip.GZIPInputStream; import java.util.zip.InflaterInputStream; /** * Utility class with static methods for processing HTTP requests and responses. */ public class BrowserMobHttpUtil { private static final Logger log = LoggerFactory.getLogger(BrowserMobHttpUtil.class); /** * Default MIME content type if no Content-Type header is present. According to the HTTP 1.1 spec, section 7.2.1: * <pre> * Any HTTP/1.1 message containing an entity-body SHOULD include a Content-Type header field defining the media * type of that body. If and only if the media type is not given by a Content-Type field, the recipient MAY * attempt to guess the media type via inspection of its content and/or the name extension(s) of the URI used to * identify the resource. If the media type remains unknown, the recipient SHOULD treat it as * type "application/octet-stream". * </pre> */ public static final String UNKNOWN_CONTENT_TYPE = "application/octet-stream"; /** * The default charset when the Content-Type header does not specify a charset. According to RFC 7231 Appendix B: * <pre> * The default charset of ISO-8859-1 for text media types has been * removed; the default is now whatever the media type definition says. * Likewise, special treatment of ISO-8859-1 has been removed from the * Accept-Charset header field. * </pre> * * Technically, we would have to determine the charset on a per-content-type basis, but generally speaking, UTF-8 is a * pretty safe default. (NOTE: In the previous HTTP/1.1 spec, section 3.7.1, the default charset was defined as ISO-8859-1.) */ public static final Charset DEFAULT_HTTP_CHARSET = StandardCharsets.UTF_8; /** * Buffer size when decompressing content. */ public static final int DECOMPRESS_BUFFER_SIZE = 16192; /** * Returns the size of the headers, including the 2 CRLFs at the end of the header block. * * @param headers headers to size * @return length of the headers, in bytes */ public static long getHeaderSize(HttpHeaders headers) { long headersSize = 0; for (Map.Entry<String, String> header : headers.entries()) { // +2 for ': ', +2 for new line headersSize += header.getKey().length() + header.getValue().length() + 4; } return headersSize; } /** * Decompresses the gzipped byte stream. * * @param fullMessage gzipped byte stream to decomress * @return decompressed bytes * @throws DecompressionException thrown if the fullMessage cannot be read or decompressed for any reason */ public static byte[] decompressContents(byte[] fullMessage) throws DecompressionException { InflaterInputStream gzipReader = null; ByteArrayOutputStream uncompressed; try { gzipReader = new GZIPInputStream(new ByteArrayInputStream(fullMessage)); uncompressed = new ByteArrayOutputStream(fullMessage.length); byte[] decompressBuffer = new byte[DECOMPRESS_BUFFER_SIZE]; int bytesRead; while ((bytesRead = gzipReader.read(decompressBuffer)) > -1) { uncompressed.write(decompressBuffer, 0, bytesRead); } fullMessage = uncompressed.toByteArray(); } catch (IOException e) { throw new DecompressionException("Unable to decompress response", e); } finally { try { if (gzipReader != null) { gzipReader.close(); } } catch (IOException e) { log.warn("Unable to close gzip stream", e); } } return fullMessage; } /** * Returns true if the content type string indicates textual content. Currently these are any Content-Types that start with one of the * following: * <pre> * text/ * application/x-javascript * application/javascript * application/json * application/xml * application/xhtml+xml * </pre> * * @param contentType contentType string to parse * @return true if the content type is textual */ public static boolean hasTextualContent(String contentType) { return contentType != null && (contentType.startsWith("text/") || contentType.startsWith("application/x-javascript") || contentType.startsWith("application/javascript") || contentType.startsWith("application/json") || contentType.startsWith("application/xml") || contentType.startsWith("application/xhtml+xml") ); } /** * Extracts all readable bytes from the ByteBuf as a byte array. * * @param content ByteBuf to read * @return byte array containing the readable bytes from the ByteBuf */ public static byte[] extractReadableBytes(ByteBuf content) { byte[] binaryContent = new byte[content.readableBytes()]; content.markReaderIndex(); content.readBytes(binaryContent); content.resetReaderIndex(); return binaryContent; } /** * Converts the byte array into a String based on the specified charset. The charset cannot be null. * * @param content bytes to convert to a String * @param charset the character set of the content * @return String containing the converted content * @throws IllegalArgumentException if charset is null */ public static String getContentAsString(byte[] content, Charset charset) { if (charset == null) { throw new IllegalArgumentException("Charset cannot be null"); } return new String(content, charset); } /** * Reads the charset directly from the Content-Type header string. If the Content-Type header does not contain a charset, * is malformed or unparsable, or if the header is null or empty, this method returns null. * * @param contentTypeHeader the Content-Type header string; can be null or empty * @return the character set indicated in the contentTypeHeader, or null if the charset is not present or is not parsable * @throws UnsupportedCharsetException if there is a charset specified in the content-type header, but it is not supported on this platform */ public static Charset readCharsetInContentTypeHeader(String contentTypeHeader) throws UnsupportedCharsetException { if (contentTypeHeader == null || contentTypeHeader.isEmpty()) { return null; } MediaType mediaType; try { mediaType = MediaType.parse(contentTypeHeader); } catch (IllegalArgumentException e) { log.info("Unable to parse Content-Type header: {}. Content-Type header will be ignored.", contentTypeHeader, e); return null; } try { return mediaType.charset().orNull(); } catch (java.nio.charset.UnsupportedCharsetException e) { throw new UnsupportedCharsetException(e); } } /** * Retrieves the raw (unescaped) path + query string from the specified request. The returned path will not include * the scheme, host, or port. * * @param httpRequest HTTP request * @return the unescaped path + query string from the HTTP request * @throws URISyntaxException if the path could not be parsed (due to invalid characters in the URI, etc.) */ public static String getRawPathAndParamsFromRequest(HttpRequest httpRequest) throws URISyntaxException { // if this request's URI contains a full URI (including scheme, host, etc.), strip away the non-path components if (HttpUtil.startsWithHttpOrHttps(httpRequest.getUri())) { return getRawPathAndParamsFromUri(httpRequest.getUri()); } else { // to provide consistent validation behavior for URIs that contain a scheme and those that don't, attempt to parse // the URI, even though we discard the parsed URI object new URI(httpRequest.getUri()); return httpRequest.getUri(); } } /** * Retrieves the raw (unescaped) path and query parameters from the URI, stripping out the scheme, host, and port. * The path will begin with a leading '/'. For example, 'http://example.com/some/resource?param%20name=param%20value' * would return '/some/resource?param%20name=param%20value'. * * @param uriString the URI to parse, containing a scheme, host, port, path, and query parameters * @return the unescaped path and query parameters from the URI * @throws URISyntaxException if the specified URI is invalid or cannot be parsed */ public static String getRawPathAndParamsFromUri(String uriString) throws URISyntaxException { URI uri = new URI(uriString); String path = uri.getRawPath(); String query = uri.getRawQuery(); if (query != null) { return path + '?' + query; } else { return path; } } /** * Returns true if the specified response is an HTTP redirect response, i.e. a 300, 301, 302, 303, or 307. * * @param httpResponse HTTP response * @return true if the response is a redirect, otherwise false */ public static boolean isRedirect(HttpResponse httpResponse) { switch (httpResponse.getStatus().code()) { case 300: case 301: case 302: case 303: case 307: return true; default: return false; } } /** * Removes a port from a host+port if the string contains the specified port. If the host+port does not contain * a port, or contains another port, the string is returned unaltered. For example, if hostWithPort is the * string {@code www.website.com:443}, this method will return {@code www.website.com}. * * <b>Note:</b> The hostWithPort string is not a URI and should not contain a scheme or resource. This method does * not attempt to validate the specified host; it <i>might</i> throw IllegalArgumentException if there was a problem * parsing the hostname, but makes no guarantees. In general, it should be validated externally, if necessary. * * @param hostWithPort string containing a hostname and optional port * @param portNumber port to remove from the string * @return string with the specified port removed, or the original string if it did not contain the portNumber */ public static String removeMatchingPort(String hostWithPort, int portNumber) { HostAndPort parsedHostAndPort = HostAndPort.fromString(hostWithPort); if (parsedHostAndPort.hasPort() && parsedHostAndPort.getPort() == portNumber) { // HostAndPort.getHostText() strips brackets from ipv6 addresses, so reparse using fromHost return HostAndPort.fromHost(parsedHostAndPort.getHostText()).toString(); } else { return hostWithPort; } } /** * Base64-encodes the specified username and password for Basic Authorization for HTTP requests or upstream proxy * authorization. The format of Basic auth is "username:password" as a base64 string. * * @param username username to encode * @param password password to encode * @return a base-64 encoded string containing <code>username:password</code> */ public static String base64EncodeBasicCredentials(String username, String password) { String credentialsToEncode = username + ':' + password; // using UTF-8, which is the modern de facto standard, and which retains compatibility with US_ASCII for ASCII characters, // as required by RFC 7616, section 3: http://tools.ietf.org/html/rfc7617#section-3 byte[] credentialsAsUtf8Bytes = credentialsToEncode.getBytes(StandardCharsets.UTF_8); return BaseEncoding.base64().encode(credentialsAsUtf8Bytes); } }