gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*******************************************************************************
* Copyright 2021 Cognizant Technology Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.cognizant.devops.platformregressiontest.test.ui.reportmanagement;
import java.io.File;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.interactions.Action;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.PageFactory;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.testng.SkipException;
import com.cognizant.devops.platformregressiontest.test.common.ConfigOptionsTest;
import com.cognizant.devops.platformregressiontest.test.common.LoginAndSelectModule;
/**
* @author Ankita
*
*
*/
public class KpiConfigurationPage extends KPIObjectRepository {
WebDriverWait wait = new WebDriverWait(driver, 10);
private static final Logger log = LogManager.getLogger(KpiConfigurationPage.class);
public static String uploadFilePath = System.getenv().get(ConfigOptionsTest.INSIGHTS_HOME) + File.separator
+ ConfigOptionsTest.AUTO_DIR + File.separator + ConfigOptionsTest.REPORT_MANAGEMENT_DIR + File.separator;
public KpiConfigurationPage() {
PageFactory.initElements(driver, this);
}
public boolean navigateToKPIConfigurationLandingPage() {
return landingPage.isDisplayed();
}
private void clickAddButton() {
Actions actions = new Actions(driver);
wait.until(ExpectedConditions.elementToBeClickable(addNewKPIButton));
actions.moveToElement(addNewKPIButton).click();
Action action = actions.build();
action.perform();
}
/**
* This method handles KPI insertion
*
* @param kpiId
* @param kpiName
* @param toolName
* @param category
* @param resultField
* @param groupName
* @param datasource
* @param dbQuery
* @param isActive
* @return
*/
public String saveKPI(String kpiId, String kpiName, String toolName, String category, String resultField,
String groupName, String datasource, String dbQuery, String isActive) {
clickAddButton();
kpiIdEl.sendKeys(kpiId);
kpiNameEl.sendKeys(kpiName);
resultFieldEl.sendKeys(resultField);
toolNameEl.sendKeys(toolName);
categoryNameEl.sendKeys(category);
groupNameEl.sendKeys(groupName);
datasourceEl.sendKeys(datasource);
dbQueryEl.sendKeys(dbQuery);
wait.until(ExpectedConditions.elementToBeClickable(isActiveEl));
isActiveEl.click();
wait.until(ExpectedConditions.elementToBeClickable(saveEl)).click();
wait.until(ExpectedConditions.elementToBeClickable(yesBtnEl)).click();
wait.until(ExpectedConditions.elementToBeClickable(btnOKEl));
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
try {
if (kpiExistsEl.isDisplayed()) {
btnOKEl.click();
navigateToKPILandingPage();
log.debug("Skipping test case as KPI : {} already exists", kpiId);
throw new SkipException("Skipping test case as KPI : " + kpiId + " already exists");
}
} catch (NoSuchElementException e) {
log.info("Something went wrong while saving KPI : {} exception : {}", kpiId, e.getMessage());
}
btnOKEl.click();
return kpiId;
}
/**
* This method does KPI validation
*
* @param kpiId
* @param kpiName
* @param toolName
* @param category
* @param resultField
* @param groupName
* @param datasource
* @param dbQuery
* @param isActive
* @return
*/
public boolean validateKPI(String kpiId, String kpiName, String toolName, String category, String resultField,
String groupName, String datasource, String dbQuery, String isActive) {
clickAddButton();
kpiIdEl.sendKeys(kpiId);
kpiNameEl.sendKeys(kpiName);
resultFieldEl.sendKeys(resultField);
toolNameEl.sendKeys(toolName);
categoryNameEl.sendKeys(category);
groupNameEl.sendKeys(groupName);
datasourceEl.sendKeys(datasource);
dbQueryEl.sendKeys(dbQuery);
wait.until(ExpectedConditions.elementToBeClickable(isActiveEl));
isActiveEl.click();
wait.until(ExpectedConditions.elementToBeClickable(saveEl));
saveEl.click();
wait.until(ExpectedConditions.elementToBeClickable(btnOKEl));
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
try {
if (kpiValidateEl.isDisplayed()) {
btnOKEl.click();
navigateToKPILandingPage();
log.info("add screen KPI : {} validated successfully", kpiId);
return true;
}
} catch (NoSuchElementException e) {
log.error("unable to validate add screen kpi {}", kpiId);
return true;
}
btnOKEl.click();
return false;
}
/**
* This method handles KPI screen navigation.
*
*/
public void navigateToKPILandingPage() {
Actions actions = new Actions(driver);
WebElement kpiElement = driver.findElement(By.xpath("//a[@title='Kpi Creation']"));
wait.until(ExpectedConditions.elementToBeClickable(kpiElement));
actions.moveToElement(kpiElement).click();
Action action = actions.build();
action.perform();
}
/**
* This methods checks for duplicate KPI
*
* @param kpiId
* @return
*/
public boolean isKpiExists(String kpiId) {
for (WebElement we : kpiListEl) {
wait.until(ExpectedConditions.visibilityOf(we));
if (we.getText().equals(kpiId)) {
return true;
}
}
return false;
}
/**
* This method handles edit kpi functionality.
*
* @param kpiId
* @param category
* @return
*/
public boolean editKPI(String kpiId, String category) {
selectKPI(kpiId);
wait.until(ExpectedConditions.visibilityOf(btnEditEl));
btnEditEl.click();
categoryNameEl.sendKeys(category);
wait.until(ExpectedConditions.elementToBeClickable(saveEl));
saveEl.click();
wait.until(ExpectedConditions.elementToBeClickable(yesBtnEl));
yesBtnEl.click();
wait.until(ExpectedConditions.elementToBeClickable(btnOKEl));
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
try {
if (kpiUpdateEl.isDisplayed()) {
btnOKEl.click();
log.info(" kpiId {} updated successfully ", kpiId);
return true;
}
} catch (NoSuchElementException e) {
log.error("Unable to edit kpiId {} ", kpiId);
return true;
}
btnOKEl.click();
return false;
}
/**
* This method handles edit screen validation.
*
* @param kpiId
* @param category
* @param resultField
* @param dbQuery
* @param datasource
* @return
*/
public boolean editValidateKPI(String kpiId, String category, String resultField, String dbQuery,
String datasource) {
selectKPI(kpiId);
wait.until(ExpectedConditions.visibilityOf(btnEditEl));
btnEditEl.click();
categoryNameEl.sendKeys(category);
resultFieldEl.sendKeys(Keys.chord(Keys.CONTROL, "a", Keys.DELETE));
resultFieldEl.sendKeys(resultField);
dbQueryEl.sendKeys(Keys.chord(Keys.CONTROL, "a", Keys.DELETE));
dbQueryEl.sendKeys(dbQuery);
wait.until(ExpectedConditions.elementToBeClickable(saveEl));
saveEl.click();
wait.until(ExpectedConditions.elementToBeClickable(btnOKEl));
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
try {
if (kpiValidateEl.isDisplayed()) {
btnOKEl.click();
navigateToKPILandingPage();
log.info(" edit screen kpiId {} validated successfully ", kpiId);
return true;
}
} catch (NoSuchElementException e) {
log.error(" edit screen kpiId {} validation unsuccessful ", kpiId);
return false;
}
btnOKEl.click();
return false;
}
/**
* This method checks for non-editable fields.
*
* @param kpiId
* @return
*/
public boolean nonEditableFields(String kpiId) {
selectKPI(kpiId);
wait.until(ExpectedConditions.elementToBeClickable(btnEditEl));
btnEditEl.click();
driver.manage().timeouts().implicitlyWait(5, TimeUnit.SECONDS);
try {
if (kpiIdEl.getAttribute("ng-reflect-is-disabled").equals("true")
&& kpiNameEl.getAttribute("ng-reflect-is-disabled").equals("true")
&& groupNameEl.getAttribute("ng-reflect-is-disabled").equals("true")
&& toolNameEl.getAttribute("ng-reflect-is-disabled").equals("true")) {
navigateToKPILandingPage();
return true;
}
} catch (Exception e) {
log.info(e.getMessage());
navigateToKPILandingPage();
return true;
}
navigateToKPILandingPage();
return false;
}
/**
* This method handles KPI deletion.
*
* @param kpiId
* @return
*/
public boolean deleteKPI(String kpiId) {
selectKPI(kpiId);
wait.until(ExpectedConditions.visibilityOf(btnDeleteEl));
btnDeleteEl.click();
wait.until(ExpectedConditions.elementToBeClickable(yesBtnEl));
yesBtnEl.click();
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
try {
wait.until(ExpectedConditions.visibilityOf(kpiDeletedEl));
if (kpiDeletedEl.isDisplayed()) {
wait.until(ExpectedConditions.elementToBeClickable(btnOKEl));
btnOKEl.click();
log.info("kpiId {} deleted successfully ", kpiId);
return true;
}
} catch (NoSuchElementException e) {
log.error(" unable to deletge kpiId {} with exception {} :", kpiId, e.getMessage());
return true;
}
btnOKEl.click();
return false;
}
/**
* This method handles KPI selection.
*
* @param kpiId
*/
public void selectKPI(String kpiId) {
for (int i = 0; i < kpiListEl.size(); i++) {
if (kpiListEl.get(i).getText().equals(kpiId)) {
List<WebElement> radioButtons = kpiListEl.get(i)
.findElements(By.xpath(".//preceding::span[contains(@class, 'mat-radio-container')]"));
driver.manage().timeouts().implicitlyWait(6, TimeUnit.SECONDS);
radioButtons.get(i).click();
break;
}
}
}
/**
* This method handles refresh button functionality
*
* @param kpiId
* @return
*/
public boolean checkRefreshButton(String kpiId) {
try {
selectKPI(kpiId);
driver.manage().timeouts().implicitlyWait(3, TimeUnit.SECONDS);
wait.until(ExpectedConditions.elementToBeClickable(refreshBtnE1));
refreshBtnE1.click();
driver.manage().timeouts().implicitlyWait(5, TimeUnit.SECONDS);
for (int i = 0; i < kpiListEl.size(); i++) {
WebElement radioButton = kpiListEl.get(i).findElement(By.xpath(".//preceding::mat-radio-button"));
if (radioButton.isSelected()) {
log.info("KPI is in selected mode ");
return false;
} else {
log.info("Not-Selected");
}
}
return true;
} catch (Exception e) {
}
return true;
}
/**
* This method handles upload kpi json.
*
* @param fileName
*/
public boolean uploadJson(String fileName) {
String path = uploadFilePath + fileName;
uploadBtnE1.click();
driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);
chooseFileBtnE1.sendKeys(path);
wait.until(ExpectedConditions.elementToBeClickable(uploadJsonBtnE1));
uploadJsonBtnE1.click();
wait.until(ExpectedConditions.elementToBeClickable(btnOKEl));
btnOKEl.click();
log.info("upload json successful");
return true;
}
/**
* This method handles KPI search.
*
* @param kpiId
* @return
*/
public boolean searchKPI(String kpiId) {
Actions actions = new Actions(driver);
actions.moveToElement(searchKPIEl).click();
actions.sendKeys(kpiId);
Action action = actions.build();
action.perform();
if (kpiListEl.size() == 1) {
searchKPIEl.sendKeys(Keys.chord(Keys.CONTROL, "a", Keys.DELETE));
navigateToKPILandingPage();
log.info("Kpi search box test successful");
return true;
}
log.info("Kpi search box test unsuccessful");
return false;
}
}
|
|
/*
* Copyright (C) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.VisibleForTesting;
import java.io.Serializable;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.RandomAccess;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import javax.annotation.Nullable;
/**
* Synchronized collection views. The returned synchronized collection views are
* serializable if the backing collection and the mutex are serializable.
*
* <p>If a {@code null} is passed as the {@code mutex} parameter to any of this
* class's top-level methods or inner class constructors, the created object
* uses itself as the synchronization mutex.
*
* <p>This class should be used by other collection classes only.
*
* @author Mike Bostock
* @author Jared Levy
*/
@GwtCompatible(emulated = true)
final class Synchronized {
private Synchronized() {}
static class SynchronizedObject implements Serializable {
final Object delegate;
final Object mutex;
SynchronizedObject(Object delegate, @Nullable Object mutex) {
this.delegate = checkNotNull(delegate);
this.mutex = (mutex == null) ? this : mutex;
}
Object delegate() {
return delegate;
}
// No equals and hashCode; see ForwardingObject for details.
@Override public String toString() {
synchronized (mutex) {
return delegate.toString();
}
}
// Serialization invokes writeObject only when it's private.
// The SynchronizedObject subclasses don't need a writeObject method since
// they don't contain any non-transient member variables, while the
// following writeObject() handles the SynchronizedObject members.
}
private static <E> Collection<E> collection(
Collection<E> collection, @Nullable Object mutex) {
return new SynchronizedCollection<E>(collection, mutex);
}
@VisibleForTesting static class SynchronizedCollection<E>
extends SynchronizedObject implements Collection<E> {
private SynchronizedCollection(
Collection<E> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@SuppressWarnings("unchecked")
@Override Collection<E> delegate() {
return (Collection<E>) super.delegate();
}
public boolean add(E e) {
synchronized (mutex) {
return delegate().add(e);
}
}
public boolean addAll(Collection<? extends E> c) {
synchronized (mutex) {
return delegate().addAll(c);
}
}
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
public boolean contains(Object o) {
synchronized (mutex) {
return delegate().contains(o);
}
}
public boolean containsAll(Collection<?> c) {
synchronized (mutex) {
return delegate().containsAll(c);
}
}
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
public Iterator<E> iterator() {
return delegate().iterator(); // manually synchronized
}
public boolean remove(Object o) {
synchronized (mutex) {
return delegate().remove(o);
}
}
public boolean removeAll(Collection<?> c) {
synchronized (mutex) {
return delegate().removeAll(c);
}
}
public boolean retainAll(Collection<?> c) {
synchronized (mutex) {
return delegate().retainAll(c);
}
}
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
public Object[] toArray() {
synchronized (mutex) {
return delegate().toArray();
}
}
public <T> T[] toArray(T[] a) {
synchronized (mutex) {
return delegate().toArray(a);
}
}
private static final long serialVersionUID = 0;
}
@VisibleForTesting static <E> Set<E> set(Set<E> set, @Nullable Object mutex) {
return new SynchronizedSet<E>(set, mutex);
}
static class SynchronizedSet<E>
extends SynchronizedCollection<E> implements Set<E> {
SynchronizedSet(Set<E> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override Set<E> delegate() {
return (Set<E>) super.delegate();
}
@Override public boolean equals(Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
private static <E> SortedSet<E> sortedSet(
SortedSet<E> set, @Nullable Object mutex) {
return new SynchronizedSortedSet<E>(set, mutex);
}
static class SynchronizedSortedSet<E> extends SynchronizedSet<E>
implements SortedSet<E> {
SynchronizedSortedSet(SortedSet<E> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override SortedSet<E> delegate() {
return (SortedSet<E>) super.delegate();
}
public Comparator<? super E> comparator() {
synchronized (mutex) {
return delegate().comparator();
}
}
public SortedSet<E> subSet(E fromElement, E toElement) {
synchronized (mutex) {
return sortedSet(delegate().subSet(fromElement, toElement), mutex);
}
}
public SortedSet<E> headSet(E toElement) {
synchronized (mutex) {
return sortedSet(delegate().headSet(toElement), mutex);
}
}
public SortedSet<E> tailSet(E fromElement) {
synchronized (mutex) {
return sortedSet(delegate().tailSet(fromElement), mutex);
}
}
public E first() {
synchronized (mutex) {
return delegate().first();
}
}
public E last() {
synchronized (mutex) {
return delegate().last();
}
}
private static final long serialVersionUID = 0;
}
private static <E> List<E> list(List<E> list, @Nullable Object mutex) {
return (list instanceof RandomAccess)
? new SynchronizedRandomAccessList<E>(list, mutex)
: new SynchronizedList<E>(list, mutex);
}
private static class SynchronizedList<E> extends SynchronizedCollection<E>
implements List<E> {
SynchronizedList(List<E> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override List<E> delegate() {
return (List<E>) super.delegate();
}
public void add(int index, E element) {
synchronized (mutex) {
delegate().add(index, element);
}
}
public boolean addAll(int index, Collection<? extends E> c) {
synchronized (mutex) {
return delegate().addAll(index, c);
}
}
public E get(int index) {
synchronized (mutex) {
return delegate().get(index);
}
}
public int indexOf(Object o) {
synchronized (mutex) {
return delegate().indexOf(o);
}
}
public int lastIndexOf(Object o) {
synchronized (mutex) {
return delegate().lastIndexOf(o);
}
}
public ListIterator<E> listIterator() {
return delegate().listIterator(); // manually synchronized
}
public ListIterator<E> listIterator(int index) {
return delegate().listIterator(index); // manually synchronized
}
public E remove(int index) {
synchronized (mutex) {
return delegate().remove(index);
}
}
public E set(int index, E element) {
synchronized (mutex) {
return delegate().set(index, element);
}
}
public List<E> subList(int fromIndex, int toIndex) {
synchronized (mutex) {
return list(delegate().subList(fromIndex, toIndex), mutex);
}
}
@Override public boolean equals(Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
private static class SynchronizedRandomAccessList<E>
extends SynchronizedList<E> implements RandomAccess {
SynchronizedRandomAccessList(List<E> list, @Nullable Object mutex) {
super(list, mutex);
}
private static final long serialVersionUID = 0;
}
static <E> Multiset<E> multiset(
Multiset<E> multiset, @Nullable Object mutex) {
return new SynchronizedMultiset<E>(multiset, mutex);
}
private static class SynchronizedMultiset<E> extends SynchronizedCollection<E>
implements Multiset<E> {
transient Set<E> elementSet;
transient Set<Entry<E>> entrySet;
SynchronizedMultiset(Multiset<E> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override Multiset<E> delegate() {
return (Multiset<E>) super.delegate();
}
public int count(Object o) {
synchronized (mutex) {
return delegate().count(o);
}
}
public int add(E e, int n) {
synchronized (mutex) {
return delegate().add(e, n);
}
}
public int remove(Object o, int n) {
synchronized (mutex) {
return delegate().remove(o, n);
}
}
public int setCount(E element, int count) {
synchronized (mutex) {
return delegate().setCount(element, count);
}
}
public boolean setCount(E element, int oldCount, int newCount) {
synchronized (mutex) {
return delegate().setCount(element, oldCount, newCount);
}
}
public Set<E> elementSet() {
synchronized (mutex) {
if (elementSet == null) {
elementSet = typePreservingSet(delegate().elementSet(), mutex);
}
return elementSet;
}
}
public Set<Entry<E>> entrySet() {
synchronized (mutex) {
if (entrySet == null) {
entrySet = typePreservingSet(delegate().entrySet(), mutex);
}
return entrySet;
}
}
@Override public boolean equals(Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static <K, V> Multimap<K, V> multimap(
Multimap<K, V> multimap, @Nullable Object mutex) {
return new SynchronizedMultimap<K, V>(multimap, mutex);
}
private static class SynchronizedMultimap<K, V> extends SynchronizedObject
implements Multimap<K, V> {
transient Set<K> keySet;
transient Collection<V> valuesCollection;
transient Collection<Map.Entry<K, V>> entries;
transient Map<K, Collection<V>> asMap;
transient Multiset<K> keys;
@SuppressWarnings("unchecked")
@Override Multimap<K, V> delegate() {
return (Multimap<K, V>) super.delegate();
}
SynchronizedMultimap(Multimap<K, V> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
public boolean containsKey(Object key) {
synchronized (mutex) {
return delegate().containsKey(key);
}
}
public boolean containsValue(Object value) {
synchronized (mutex) {
return delegate().containsValue(value);
}
}
public boolean containsEntry(Object key, Object value) {
synchronized (mutex) {
return delegate().containsEntry(key, value);
}
}
public Collection<V> get(K key) {
synchronized (mutex) {
return typePreservingCollection(delegate().get(key), mutex);
}
}
public boolean put(K key, V value) {
synchronized (mutex) {
return delegate().put(key, value);
}
}
public boolean putAll(K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().putAll(key, values);
}
}
public boolean putAll(Multimap<? extends K, ? extends V> multimap) {
synchronized (mutex) {
return delegate().putAll(multimap);
}
}
public Collection<V> replaceValues(K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
public boolean remove(Object key, Object value) {
synchronized (mutex) {
return delegate().remove(key, value);
}
}
public Collection<V> removeAll(Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
public Set<K> keySet() {
synchronized (mutex) {
if (keySet == null) {
keySet = typePreservingSet(delegate().keySet(), mutex);
}
return keySet;
}
}
public Collection<V> values() {
synchronized (mutex) {
if (valuesCollection == null) {
valuesCollection = collection(delegate().values(), mutex);
}
return valuesCollection;
}
}
public Collection<Map.Entry<K, V>> entries() {
synchronized (mutex) {
if (entries == null) {
entries = typePreservingCollection(delegate().entries(), mutex);
}
return entries;
}
}
public Map<K, Collection<V>> asMap() {
synchronized (mutex) {
if (asMap == null) {
asMap = new SynchronizedAsMap<K, V>(delegate().asMap(), mutex);
}
return asMap;
}
}
public Multiset<K> keys() {
synchronized (mutex) {
if (keys == null) {
keys = multiset(delegate().keys(), mutex);
}
return keys;
}
}
@Override public boolean equals(Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static <K, V> ListMultimap<K, V> listMultimap(
ListMultimap<K, V> multimap, @Nullable Object mutex) {
return new SynchronizedListMultimap<K, V>(multimap, mutex);
}
private static class SynchronizedListMultimap<K, V>
extends SynchronizedMultimap<K, V> implements ListMultimap<K, V> {
SynchronizedListMultimap(
ListMultimap<K, V> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override ListMultimap<K, V> delegate() {
return (ListMultimap<K, V>) super.delegate();
}
@Override public List<V> get(K key) {
synchronized (mutex) {
return list(delegate().get(key), mutex);
}
}
@Override public List<V> removeAll(Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override public List<V> replaceValues(
K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
private static final long serialVersionUID = 0;
}
static <K, V> SetMultimap<K, V> setMultimap(
SetMultimap<K, V> multimap, @Nullable Object mutex) {
return new SynchronizedSetMultimap<K, V>(multimap, mutex);
}
private static class SynchronizedSetMultimap<K, V>
extends SynchronizedMultimap<K, V> implements SetMultimap<K, V> {
transient Set<Map.Entry<K, V>> entrySet;
SynchronizedSetMultimap(
SetMultimap<K, V> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override SetMultimap<K, V> delegate() {
return (SetMultimap<K, V>) super.delegate();
}
@Override public Set<V> get(K key) {
synchronized (mutex) {
return set(delegate().get(key), mutex);
}
}
@Override public Set<V> removeAll(Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override public Set<V> replaceValues(
K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
@Override public Set<Map.Entry<K, V>> entries() {
synchronized (mutex) {
if (entrySet == null) {
entrySet = set(delegate().entries(), mutex);
}
return entrySet;
}
}
private static final long serialVersionUID = 0;
}
static <K, V> SortedSetMultimap<K, V> sortedSetMultimap(
SortedSetMultimap<K, V> multimap, @Nullable Object mutex) {
return new SynchronizedSortedSetMultimap<K, V>(multimap, mutex);
}
private static class SynchronizedSortedSetMultimap<K, V>
extends SynchronizedSetMultimap<K, V> implements SortedSetMultimap<K, V> {
SynchronizedSortedSetMultimap(
SortedSetMultimap<K, V> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override SortedSetMultimap<K, V> delegate() {
return (SortedSetMultimap<K, V>) super.delegate();
}
@Override public SortedSet<V> get(K key) {
synchronized (mutex) {
return sortedSet(delegate().get(key), mutex);
}
}
@Override public SortedSet<V> removeAll(Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override public SortedSet<V> replaceValues(
K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
public Comparator<? super V> valueComparator() {
synchronized (mutex) {
return delegate().valueComparator();
}
}
private static final long serialVersionUID = 0;
}
private static <E> Collection<E> typePreservingCollection(
Collection<E> collection, @Nullable Object mutex) {
if (collection instanceof SortedSet) {
return sortedSet((SortedSet<E>) collection, mutex);
}
if (collection instanceof Set) {
return set((Set<E>) collection, mutex);
}
if (collection instanceof List) {
return list((List<E>) collection, mutex);
}
return collection(collection, mutex);
}
private static <E> Set<E> typePreservingSet(
Set<E> set, @Nullable Object mutex) {
if (set instanceof SortedSet) {
return sortedSet((SortedSet<E>) set, mutex);
} else {
return set(set, mutex);
}
}
private static class SynchronizedAsMapEntries<K, V>
extends SynchronizedSet<Map.Entry<K, Collection<V>>> {
SynchronizedAsMapEntries(
Set<Map.Entry<K, Collection<V>>> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override public Iterator<Map.Entry<K, Collection<V>>> iterator() {
// Must be manually synchronized.
final Iterator<Map.Entry<K, Collection<V>>> iterator = super.iterator();
return new ForwardingIterator<Map.Entry<K, Collection<V>>>() {
@Override protected Iterator<Map.Entry<K, Collection<V>>> delegate() {
return iterator;
}
@Override public Map.Entry<K, Collection<V>> next() {
final Map.Entry<K, Collection<V>> entry = iterator.next();
return new ForwardingMapEntry<K, Collection<V>>() {
@Override protected Map.Entry<K, Collection<V>> delegate() {
return entry;
}
@Override public Collection<V> getValue() {
return typePreservingCollection(entry.getValue(), mutex);
}
};
}
};
}
// See Collections.CheckedMap.CheckedEntrySet for details on attacks.
@Override public Object[] toArray() {
synchronized (mutex) {
return ObjectArrays.toArrayImpl(delegate());
}
}
@Override public <T> T[] toArray(T[] array) {
synchronized (mutex) {
return ObjectArrays.toArrayImpl(delegate(), array);
}
}
@Override public boolean contains(Object o) {
synchronized (mutex) {
return Maps.containsEntryImpl(delegate(), o);
}
}
@Override public boolean containsAll(Collection<?> c) {
synchronized (mutex) {
return Collections2.containsAllImpl(delegate(), c);
}
}
@Override public boolean equals(Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return Sets.equalsImpl(delegate(), o);
}
}
@Override public boolean remove(Object o) {
synchronized (mutex) {
return Maps.removeEntryImpl(delegate(), o);
}
}
@Override public boolean removeAll(Collection<?> c) {
synchronized (mutex) {
return Iterators.removeAll(delegate().iterator(), c);
}
}
@Override public boolean retainAll(Collection<?> c) {
synchronized (mutex) {
return Iterators.retainAll(delegate().iterator(), c);
}
}
private static final long serialVersionUID = 0;
}
@VisibleForTesting
static <K, V> Map<K, V> map(Map<K, V> map, @Nullable Object mutex) {
return new SynchronizedMap<K, V>(map, mutex);
}
private static class SynchronizedMap<K, V> extends SynchronizedObject
implements Map<K, V> {
transient Set<K> keySet;
transient Collection<V> values;
transient Set<Map.Entry<K, V>> entrySet;
SynchronizedMap(Map<K, V> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@SuppressWarnings("unchecked")
@Override Map<K, V> delegate() {
return (Map<K, V>) super.delegate();
}
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
public boolean containsKey(Object key) {
synchronized (mutex) {
return delegate().containsKey(key);
}
}
public boolean containsValue(Object value) {
synchronized (mutex) {
return delegate().containsValue(value);
}
}
public Set<Map.Entry<K, V>> entrySet() {
synchronized (mutex) {
if (entrySet == null) {
entrySet = set(delegate().entrySet(), mutex);
}
return entrySet;
}
}
public V get(Object key) {
synchronized (mutex) {
return delegate().get(key);
}
}
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
public Set<K> keySet() {
synchronized (mutex) {
if (keySet == null) {
keySet = set(delegate().keySet(), mutex);
}
return keySet;
}
}
public V put(K key, V value) {
synchronized (mutex) {
return delegate().put(key, value);
}
}
public void putAll(Map<? extends K, ? extends V> map) {
synchronized (mutex) {
delegate().putAll(map);
}
}
public V remove(Object key) {
synchronized (mutex) {
return delegate().remove(key);
}
}
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
public Collection<V> values() {
synchronized (mutex) {
if (values == null) {
values = collection(delegate().values(), mutex);
}
return values;
}
}
@Override public boolean equals(Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static <K, V> SortedMap<K, V> sortedMap(
SortedMap<K, V> sortedMap, @Nullable Object mutex) {
return new SynchronizedSortedMap<K, V>(sortedMap, mutex);
}
static class SynchronizedSortedMap<K, V> extends SynchronizedMap<K, V>
implements SortedMap<K, V> {
SynchronizedSortedMap(SortedMap<K, V> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override SortedMap<K, V> delegate() {
return (SortedMap<K, V>) super.delegate();
}
@Override public Comparator<? super K> comparator() {
synchronized (mutex) {
return delegate().comparator();
}
}
@Override public K firstKey() {
synchronized (mutex) {
return delegate().firstKey();
}
}
@Override public SortedMap<K, V> headMap(K toKey) {
synchronized (mutex) {
return sortedMap(delegate().headMap(toKey), mutex);
}
}
@Override public K lastKey() {
synchronized (mutex) {
return delegate().lastKey();
}
}
@Override public SortedMap<K, V> subMap(K fromKey, K toKey) {
synchronized (mutex) {
return sortedMap(delegate().subMap(fromKey, toKey), mutex);
}
}
@Override public SortedMap<K, V> tailMap(K fromKey) {
synchronized (mutex) {
return sortedMap(delegate().tailMap(fromKey), mutex);
}
}
private static final long serialVersionUID = 0;
}
static <K, V> BiMap<K, V> biMap(BiMap<K, V> bimap, @Nullable Object mutex) {
return new SynchronizedBiMap<K, V>(bimap, mutex, null);
}
@VisibleForTesting static class SynchronizedBiMap<K, V>
extends SynchronizedMap<K, V> implements BiMap<K, V>, Serializable {
private transient Set<V> valueSet;
private transient BiMap<V, K> inverse;
private SynchronizedBiMap(BiMap<K, V> delegate, @Nullable Object mutex,
@Nullable BiMap<V, K> inverse) {
super(delegate, mutex);
this.inverse = inverse;
}
@Override BiMap<K, V> delegate() {
return (BiMap<K, V>) super.delegate();
}
@Override public Set<V> values() {
synchronized (mutex) {
if (valueSet == null) {
valueSet = set(delegate().values(), mutex);
}
return valueSet;
}
}
public V forcePut(K key, V value) {
synchronized (mutex) {
return delegate().forcePut(key, value);
}
}
public BiMap<V, K> inverse() {
synchronized (mutex) {
if (inverse == null) {
inverse
= new SynchronizedBiMap<V, K>(delegate().inverse(), mutex, this);
}
return inverse;
}
}
private static final long serialVersionUID = 0;
}
private static class SynchronizedAsMap<K, V>
extends SynchronizedMap<K, Collection<V>> {
transient Set<Map.Entry<K, Collection<V>>> asMapEntrySet;
transient Collection<Collection<V>> asMapValues;
SynchronizedAsMap(Map<K, Collection<V>> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override public Collection<V> get(Object key) {
synchronized (mutex) {
Collection<V> collection = super.get(key);
return (collection == null) ? null
: typePreservingCollection(collection, mutex);
}
}
@Override public Set<Map.Entry<K, Collection<V>>> entrySet() {
synchronized (mutex) {
if (asMapEntrySet == null) {
asMapEntrySet = new SynchronizedAsMapEntries<K, V>(
delegate().entrySet(), mutex);
}
return asMapEntrySet;
}
}
@Override public Collection<Collection<V>> values() {
synchronized (mutex) {
if (asMapValues == null) {
asMapValues
= new SynchronizedAsMapValues<V>(delegate().values(), mutex);
}
return asMapValues;
}
}
@Override public boolean containsValue(Object o) {
// values() and its contains() method are both synchronized.
return values().contains(o);
}
private static final long serialVersionUID = 0;
}
private static class SynchronizedAsMapValues<V>
extends SynchronizedCollection<Collection<V>> {
SynchronizedAsMapValues(
Collection<Collection<V>> delegate, @Nullable Object mutex) {
super(delegate, mutex);
}
@Override public Iterator<Collection<V>> iterator() {
// Must be manually synchronized.
final Iterator<Collection<V>> iterator = super.iterator();
return new ForwardingIterator<Collection<V>>() {
@Override protected Iterator<Collection<V>> delegate() {
return iterator;
}
@Override public Collection<V> next() {
return typePreservingCollection(iterator.next(), mutex);
}
};
}
private static final long serialVersionUID = 0;
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for InterconnectAttachments.Insert. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InsertInterconnectAttachmentRequest}
*/
public final class InsertInterconnectAttachmentRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InsertInterconnectAttachmentRequest)
InsertInterconnectAttachmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use InsertInterconnectAttachmentRequest.newBuilder() to construct.
private InsertInterconnectAttachmentRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InsertInterconnectAttachmentRequest() {
project_ = "";
region_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InsertInterconnectAttachmentRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private InsertInterconnectAttachmentRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 296879706:
{
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
requestId_ = s;
break;
}
case 1111570338:
{
java.lang.String s = input.readStringRequireUtf8();
region_ = s;
break;
}
case 1698730954:
{
com.google.cloud.compute.v1.InterconnectAttachment.Builder subBuilder = null;
if (interconnectAttachmentResource_ != null) {
subBuilder = interconnectAttachmentResource_.toBuilder();
}
interconnectAttachmentResource_ =
input.readMessage(
com.google.cloud.compute.v1.InterconnectAttachment.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(interconnectAttachmentResource_);
interconnectAttachmentResource_ = subBuilder.buildPartial();
}
break;
}
case 1820481738:
{
java.lang.String s = input.readStringRequireUtf8();
project_ = s;
break;
}
case 1941957032:
{
bitField0_ |= 0x00000002;
validateOnly_ = input.readBool();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InsertInterconnectAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InsertInterconnectAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.class,
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.Builder.class);
}
private int bitField0_;
public static final int INTERCONNECT_ATTACHMENT_RESOURCE_FIELD_NUMBER = 212341369;
private com.google.cloud.compute.v1.InterconnectAttachment interconnectAttachmentResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the interconnectAttachmentResource field is set.
*/
@java.lang.Override
public boolean hasInterconnectAttachmentResource() {
return interconnectAttachmentResource_ != null;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The interconnectAttachmentResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.InterconnectAttachment getInterconnectAttachmentResource() {
return interconnectAttachmentResource_ == null
? com.google.cloud.compute.v1.InterconnectAttachment.getDefaultInstance()
: interconnectAttachmentResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.InterconnectAttachmentOrBuilder
getInterconnectAttachmentResourceOrBuilder() {
return getInterconnectAttachmentResource();
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
private volatile java.lang.Object project_;
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
private volatile java.lang.Object region_;
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 37109963;
private volatile java.lang.Object requestId_;
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
@java.lang.Override
public boolean hasRequestId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 242744629;
private boolean validateOnly_;
/**
*
*
* <pre>
* If true, the request will not be committed.
* </pre>
*
* <code>optional bool validate_only = 242744629;</code>
*
* @return Whether the validateOnly field is set.
*/
@java.lang.Override
public boolean hasValidateOnly() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* If true, the request will not be committed.
* </pre>
*
* <code>optional bool validate_only = 242744629;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (interconnectAttachmentResource_ != null) {
output.writeMessage(212341369, getInterconnectAttachmentResource());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(242744629, validateOnly_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (interconnectAttachmentResource_ != null) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
212341369, getInterconnectAttachmentResource());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(242744629, validateOnly_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest other =
(com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest) obj;
if (hasInterconnectAttachmentResource() != other.hasInterconnectAttachmentResource())
return false;
if (hasInterconnectAttachmentResource()) {
if (!getInterconnectAttachmentResource().equals(other.getInterconnectAttachmentResource()))
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (hasRequestId() != other.hasRequestId()) return false;
if (hasRequestId()) {
if (!getRequestId().equals(other.getRequestId())) return false;
}
if (hasValidateOnly() != other.hasValidateOnly()) return false;
if (hasValidateOnly()) {
if (getValidateOnly() != other.getValidateOnly()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInterconnectAttachmentResource()) {
hash = (37 * hash) + INTERCONNECT_ATTACHMENT_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getInterconnectAttachmentResource().hashCode();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
if (hasRequestId()) {
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
}
if (hasValidateOnly()) {
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for InterconnectAttachments.Insert. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InsertInterconnectAttachmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InsertInterconnectAttachmentRequest)
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InsertInterconnectAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InsertInterconnectAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.class,
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (interconnectAttachmentResourceBuilder_ == null) {
interconnectAttachmentResource_ = null;
} else {
interconnectAttachmentResource_ = null;
interconnectAttachmentResourceBuilder_ = null;
}
project_ = "";
region_ = "";
requestId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
validateOnly_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InsertInterconnectAttachmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest build() {
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest buildPartial() {
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest result =
new com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (interconnectAttachmentResourceBuilder_ == null) {
result.interconnectAttachmentResource_ = interconnectAttachmentResource_;
} else {
result.interconnectAttachmentResource_ = interconnectAttachmentResourceBuilder_.build();
}
result.project_ = project_;
result.region_ = region_;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.requestId_ = requestId_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.validateOnly_ = validateOnly_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest) {
return mergeFrom((com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest other) {
if (other
== com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest.getDefaultInstance())
return this;
if (other.hasInterconnectAttachmentResource()) {
mergeInterconnectAttachmentResource(other.getInterconnectAttachmentResource());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
onChanged();
}
if (other.hasRequestId()) {
bitField0_ |= 0x00000001;
requestId_ = other.requestId_;
onChanged();
}
if (other.hasValidateOnly()) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.cloud.compute.v1.InterconnectAttachment interconnectAttachmentResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.InterconnectAttachment,
com.google.cloud.compute.v1.InterconnectAttachment.Builder,
com.google.cloud.compute.v1.InterconnectAttachmentOrBuilder>
interconnectAttachmentResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the interconnectAttachmentResource field is set.
*/
public boolean hasInterconnectAttachmentResource() {
return interconnectAttachmentResourceBuilder_ != null
|| interconnectAttachmentResource_ != null;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The interconnectAttachmentResource.
*/
public com.google.cloud.compute.v1.InterconnectAttachment getInterconnectAttachmentResource() {
if (interconnectAttachmentResourceBuilder_ == null) {
return interconnectAttachmentResource_ == null
? com.google.cloud.compute.v1.InterconnectAttachment.getDefaultInstance()
: interconnectAttachmentResource_;
} else {
return interconnectAttachmentResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInterconnectAttachmentResource(
com.google.cloud.compute.v1.InterconnectAttachment value) {
if (interconnectAttachmentResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
interconnectAttachmentResource_ = value;
onChanged();
} else {
interconnectAttachmentResourceBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInterconnectAttachmentResource(
com.google.cloud.compute.v1.InterconnectAttachment.Builder builderForValue) {
if (interconnectAttachmentResourceBuilder_ == null) {
interconnectAttachmentResource_ = builderForValue.build();
onChanged();
} else {
interconnectAttachmentResourceBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInterconnectAttachmentResource(
com.google.cloud.compute.v1.InterconnectAttachment value) {
if (interconnectAttachmentResourceBuilder_ == null) {
if (interconnectAttachmentResource_ != null) {
interconnectAttachmentResource_ =
com.google.cloud.compute.v1.InterconnectAttachment.newBuilder(
interconnectAttachmentResource_)
.mergeFrom(value)
.buildPartial();
} else {
interconnectAttachmentResource_ = value;
}
onChanged();
} else {
interconnectAttachmentResourceBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInterconnectAttachmentResource() {
if (interconnectAttachmentResourceBuilder_ == null) {
interconnectAttachmentResource_ = null;
onChanged();
} else {
interconnectAttachmentResource_ = null;
interconnectAttachmentResourceBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.InterconnectAttachment.Builder
getInterconnectAttachmentResourceBuilder() {
onChanged();
return getInterconnectAttachmentResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.InterconnectAttachmentOrBuilder
getInterconnectAttachmentResourceOrBuilder() {
if (interconnectAttachmentResourceBuilder_ != null) {
return interconnectAttachmentResourceBuilder_.getMessageOrBuilder();
} else {
return interconnectAttachmentResource_ == null
? com.google.cloud.compute.v1.InterconnectAttachment.getDefaultInstance()
: interconnectAttachmentResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.InterconnectAttachment interconnect_attachment_resource = 212341369 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.InterconnectAttachment,
com.google.cloud.compute.v1.InterconnectAttachment.Builder,
com.google.cloud.compute.v1.InterconnectAttachmentOrBuilder>
getInterconnectAttachmentResourceFieldBuilder() {
if (interconnectAttachmentResourceBuilder_ == null) {
interconnectAttachmentResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.InterconnectAttachment,
com.google.cloud.compute.v1.InterconnectAttachment.Builder,
com.google.cloud.compute.v1.InterconnectAttachmentOrBuilder>(
getInterconnectAttachmentResource(), getParentForChildren(), isClean());
interconnectAttachmentResource_ = null;
}
return interconnectAttachmentResourceBuilder_;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region for this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
public boolean hasRequestId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
requestId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
bitField0_ = (bitField0_ & ~0x00000001);
requestId_ = getDefaultInstance().getRequestId();
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
requestId_ = value;
onChanged();
return this;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* If true, the request will not be committed.
* </pre>
*
* <code>optional bool validate_only = 242744629;</code>
*
* @return Whether the validateOnly field is set.
*/
@java.lang.Override
public boolean hasValidateOnly() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* If true, the request will not be committed.
* </pre>
*
* <code>optional bool validate_only = 242744629;</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* If true, the request will not be committed.
* </pre>
*
* <code>optional bool validate_only = 242744629;</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
bitField0_ |= 0x00000002;
validateOnly_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* If true, the request will not be committed.
* </pre>
*
* <code>optional bool validate_only = 242744629;</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000002);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InsertInterconnectAttachmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InsertInterconnectAttachmentRequest)
private static final com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest();
}
public static com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InsertInterconnectAttachmentRequest> PARSER =
new com.google.protobuf.AbstractParser<InsertInterconnectAttachmentRequest>() {
@java.lang.Override
public InsertInterconnectAttachmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new InsertInterconnectAttachmentRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<InsertInterconnectAttachmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InsertInterconnectAttachmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.InsertInterconnectAttachmentRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
package org.sagebionetworks.bridge.dynamodb;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.Resource;
import com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.sagebionetworks.bridge.TestUtils;
import org.sagebionetworks.bridge.exceptions.EntityAlreadyExistsException;
import org.sagebionetworks.bridge.exceptions.EntityNotFoundException;
import org.sagebionetworks.bridge.exceptions.UnauthorizedException;
import org.sagebionetworks.bridge.models.OperatingSystem;
import org.sagebionetworks.bridge.models.studies.Study;
@ContextConfiguration("classpath:test-context.xml")
@RunWith(SpringJUnit4ClassRunner.class)
public class DynamoStudyDaoTest {
private static final Logger LOG = LoggerFactory.getLogger(DynamoStudyDaoTest.class);
private final Set<String> USER_PROFILE_ATTRIBUTES = Sets.newHashSet("can-publish", "can-recontact");
private final Set<String> TASK_IDENTIFIERS = Sets.newHashSet("task1", "task2");
private final Set<String> ACTIVITY_EVENT_KEYS = Sets.newHashSet("event1", "event2");
private final Set<String> DATA_GROUPS = Sets.newHashSet("beta_users", "production_users");
private Set<String> studyIdsToDelete;
@Resource
DynamoStudyDao studyDao;
@Before
public void before() {
// Clear the set before each test, because JUnit seems to not do this automatically.
studyIdsToDelete = new HashSet<>();
}
@After
public void after() {
for (String oneStudyId : studyIdsToDelete) {
try {
Study study = studyDao.getStudy(oneStudyId);
studyDao.deleteStudy(study);
} catch (RuntimeException ex) {
LOG.error("Error deleting study " + oneStudyId + ": " + ex.getMessage(), ex);
}
}
}
@Test
public void crudOneStudy() {
Study study = TestUtils.getValidStudy(DynamoStudyDaoTest.class);
// Verify these values are persisted in a map
String androidARN = study.getPushNotificationARNs().get(OperatingSystem.ANDROID);
String iosARN = study.getPushNotificationARNs().get(OperatingSystem.IOS);
assertNotNull(androidARN);
assertNotNull(iosARN);
study.setUserProfileAttributes(USER_PROFILE_ATTRIBUTES);
study.setTaskIdentifiers(TASK_IDENTIFIERS);
study.setDataGroups(DATA_GROUPS);
study = createStudy(study);
assertNotNull("Study was assigned a version", study.getVersion());
assertNotNull("Study has an identifier", study.getIdentifier());
study.setName("This is a test name");
study = studyDao.updateStudy(study);
study = studyDao.getStudy(study.getIdentifier());
assertEquals("Name was set", "This is a test name", study.getName());
assertEquals("bridge-testing+support@sagebase.org", study.getSupportEmail());
assertEquals("bridge-testing+consent@sagebase.org", study.getConsentNotificationEmail());
assertEquals(USER_PROFILE_ATTRIBUTES, study.getUserProfileAttributes());
assertTrue(study.getUsesCustomExportSchedule());
assertEquals(TASK_IDENTIFIERS, study.getTaskIdentifiers());
assertEquals(ACTIVITY_EVENT_KEYS, study.getActivityEventKeys());
assertEquals(DATA_GROUPS, study.getDataGroups());
assertEquals(androidARN, study.getPushNotificationARNs().get(OperatingSystem.ANDROID));
assertEquals(iosARN, study.getPushNotificationARNs().get(OperatingSystem.IOS));
assertFalse(study.getDisableExport());
String identifier = study.getIdentifier();
studyDao.deleteStudy(study);
try {
studyDao.getStudy(identifier);
fail("Should have thrown EntityNotFoundException");
} catch (EntityNotFoundException e) {
// expected
}
}
@Test
public void deactivateStudy() {
Study study = TestUtils.getValidStudy(DynamoStudyDaoTest.class);
createStudy(study);
studyDao.deactivateStudy(study.getIdentifier());
// verify if that study still exist in dynamodb
assertTrue(studyDao.doesIdentifierExist(study.getIdentifier()));
}
@Test
public void stringSetsCanBeEmpty() throws Exception {
Study study = TestUtils.getValidStudy(DynamoStudyDaoTest.class);
study = createStudy(study);
// This triggers an error without the JSON serializer annotations because DDB doesn't support empty sets
study.setTaskIdentifiers(Sets.newHashSet());
study.setActivityEventKeys(Sets.newHashSet());
studyDao.updateStudy(study);
// We get what we want here because it deserializes the empty array
study = studyDao.getStudy(study.getIdentifier());
assertEquals(0, study.getTaskIdentifiers().size());
assertEquals(0, study.getActivityEventKeys().size());
// These two are now equivalent insofar as they throw no error and the object can always present a non-null field
study.setTaskIdentifiers(null);
study.setActivityEventKeys(null);
studyDao.updateStudy(study);
// We get what we want here because we set the field to an empty set in the constructor. It's never null.
study = studyDao.getStudy(study.getIdentifier());
assertEquals(0, study.getTaskIdentifiers().size());
assertEquals(0, study.getActivityEventKeys().size());
}
@Test
public void canRetrieveAllStudies() throws InterruptedException {
// create studies
Study study1 = createStudy(TestUtils.getValidStudy(DynamoStudyDaoTest.class));
String study1Id = study1.getIdentifier();
Study study2 = createStudy(TestUtils.getValidStudy(DynamoStudyDaoTest.class));
String study2Id = study2.getIdentifier();
// verify that they exist
{
List<Study> savedStudies = studyDao.getStudies();
boolean foundStudy1 = false, foundStudy2 = false;
for (Study oneStudy : savedStudies) {
if (study1Id.equals(oneStudy.getIdentifier())) {
foundStudy1 = true;
} else if (study2Id.equals(oneStudy.getIdentifier())) {
foundStudy2 = true;
}
}
assertTrue(foundStudy1);
assertTrue(foundStudy2);
}
// delete studies
studyDao.deleteStudy(study1);
studyDao.deleteStudy(study2);
// verify that they don't exist
{
List<Study> savedStudies = studyDao.getStudies();
for (Study oneStudy : savedStudies) {
if (study1Id.equals(oneStudy.getIdentifier())) {
fail("study " + study1Id + " shouldn't exist");
} else if (study2Id.equals(oneStudy.getIdentifier())) {
fail("study " + study2Id + " shouldn't exist");
}
}
}
}
@Test
public void willNotSaveTwoStudiesWithSameIdentifier() {
Study study;
try {
study = TestUtils.getValidStudy(DynamoStudyDaoTest.class);
study = createStudy(study);
study.setVersion(null);
createStudy(study);
fail("Should have thrown entity exists exception");
} catch (EntityAlreadyExistsException e) {
// expected exception
}
}
@Test(expected = EntityAlreadyExistsException.class)
public void identifierUniquenessEnforcedByVersionChecks() throws Exception {
Study study = TestUtils.getValidStudy(DynamoStudyDaoTest.class);
createStudy(study);
study.setVersion(null); // This is now a "new study"
createStudy(study);
}
@Test(expected = UnauthorizedException.class)
public void cantDeleteApiStudy() {
Study apiStudy = studyDao.getStudy("api");
studyDao.deleteStudy(apiStudy);
}
private Study createStudy(Study study) {
Study createdStudy = studyDao.createStudy(study);
studyIdsToDelete.add(createdStudy.getIdentifier());
return createdStudy;
}
}
|
|
package com.darkkeeper.minecraft.mods;
import android.app.ActionBar;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Parcelable;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.widget.NestedScrollView;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.appodeal.ads.Appodeal;
/*import com.google.android.gms.analytics.GoogleAnalytics;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;*/
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Random;
//import com.google.firebase.analytics.FirebaseAnalytics;
import com.appodeal.ads.BannerCallbacks;
import com.appodeal.ads.BannerView;
import com.appodeal.ads.InterstitialCallbacks;
import com.backendless.Backendless;
import com.backendless.exceptions.BackendlessFault;
import com.darkkeeper.minecraft.mods.entity.DatabaseManager;
import com.google.android.gms.analytics.GoogleAnalytics;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;
/**
* Created by Dark Keeper on 29.03.2016.
*/
public class BaseActivity extends AppCompatActivity {
private static String TAG = "LOG_TAG";
/* private Tracker globalTracker;*/
public static String BACKENDLESS_ID = "918BBE49-41A3-F430-FF7A-C08FC9404A00";
public static String BACKENDLESS_SECRET_KEY = "B5480EA7-C95B-9DBF-FF73-9F6F5AAC0700";
public static String BACKENDLESS_REST_KEY = "0E032D19-9B84-69D0-FF72-83D19A8F1D00";
public final static String DEFAULT_LANGUAGE = "en";
public static String CURRENT_LANGUAGE = "en";
private String[] backendlessIds = {
"918BBE49-41A3-F430-FF7A-C08FC9404A00",
"37C19C4A-242C-21F9-FF9E-45EED2745000",
"0C25C203-286D-A5CB-FFA7-095D6B013600",
"3E0E08A1-732F-8207-FF48-94A648FA6200",
"B9084ACE-574D-4115-FF7B-09AC734A6100",
"8627312C-DDDF-62A4-FF06-79B9A1F81E00",
"692CA5F1-D000-A744-FFA9-6D63EBD51100",
"A401EF37-F400-7D79-FFC7-BAE81514C200",
"7681EF14-B8F9-AFB6-FF39-301C2A8ACA00",
"ADCCB67A-19B1-65E9-FF45-B3F74807B100",
"D20CB563-E214-92DE-FF98-5A8E0CB21800",
"47A3BC54-4EA8-B408-FF80-69B582F8F300",
"BEBCBC7D-D6D2-69CD-FFE4-E7DEDDBB6E00",
"01408E9F-E96C-EBCB-FF7E-C5CE9B80EF00",
"C06B6F1F-5C53-4AA3-FFFB-D57AD4B66600",
"57F372F2-7BA8-8D55-FFEE-81413854AD00",
"A5219442-D316-2D84-FF8C-773D9E267D00",
"4D29A7AF-92CF-CE3C-FF15-23A9DB1A2200",
"96F4F7EB-090C-8B8E-FFD2-413FA8670F00",
"65DB385D-C465-AAC4-FF18-E6CF5E0F5000",
"6D486926-7E8B-AC39-FF16-43008763DD00",
"33322429-2901-5E8D-FF3F-859462C6D000",
"FE890419-1F2A-A566-FF1C-27B8F46AD300",
"2CDA2541-CE57-6775-FF8A-F7E6A6FE7900",
"D205CDE0-2236-79FB-FFCF-953BF73B4B00",
"BCD4F6DE-06C1-4741-FF0B-5E8C2FA67900",
"43BF92EE-7A3D-ADE2-FF31-5B81CAD1CD00",
"4C112E3D-45DF-4A4B-FFC9-9F8DB2412D00",
"16D8B079-86A1-A7AA-FF0F-FD7AD1604C00",
};
protected Tracker globalTracker;
public final static String INTENT_UPDATE = "UPDATE_APP";
protected boolean canShowCommercial = false;
// private static FirebaseAnalytics mFirebaseAnalytics;
// private static int backPressedCount = 1;
private Toast toast = null;
private static boolean isActivityVisible;
private List<DatabaseManager> databaseManagers;
private int currentDatabaseManager;
@Override
protected void onResume() {
super.onResume();
isActivityVisible = true;
/* boolean isBannerShowing = false;
try {
BannerView bannerView1 = (BannerView) findViewById( R.id.appodealBannerView );
bannerView1.setVisibility(View.GONE);
} catch (Exception e){
}
try {
BannerView bannerView2 = (BannerView) findViewById( R.id.appodealBannerView2 );
bannerView2.setVisibility(View.GONE);
} catch (Exception e){
}
showBanner(this);*/
}
@Override
protected void onPause() {
super.onPause();
Appodeal.hide(this,Appodeal.BANNER_BOTTOM);
isActivityVisible = false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
switch ( item.getItemId() ){
case android.R.id.home:
logFirebaseEvent("Back");
onBackPressed();
return true;
case R.id.action_share:
logFirebaseEvent( "Share" );
canShowCommercial = true;
showInterstitial( this );
share( this );
return true;
case R.id.action_rate:
logFirebaseEvent( "Rate" );
canShowCommercial = true;
showInterstitial( this );
rate( this );
return true;
case R.id.action_help:
logFirebaseEvent( "Help" );
/* canShowCommercial = true;
showInterestial( this );*/
help( this );
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onBackPressed(){
super.onBackPressed();
canShowCommercial = true;
showInterstitial( this );
/* backPressedCount++;
if ( (3 + backPressedCount)%3 == 0 ){
canShowCommercial = true;
showInterestial( this );
}
super.onBackPressed();*/
}
protected boolean isOnline() {
ConnectivityManager cm =
(ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo netInfo = cm.getActiveNetworkInfo();
return netInfo != null && netInfo.isConnectedOrConnecting();
}
protected void getSystemLanguage (){
CURRENT_LANGUAGE = Locale.getDefault().getLanguage();
}
protected void setDatabaseManagers() {
databaseManagers = new ArrayList<>(29);
for (String backendlessId : backendlessIds) {
databaseManagers.add(new DatabaseManager(backendlessId, BACKENDLESS_SECRET_KEY));
}
Random r = new Random();
currentDatabaseManager = r.nextInt(databaseManagers.size())-1; //should give a number between -1 inclusive and 3 inclusive (3 variants)
}
protected void initNextDatabase (){
if ( currentDatabaseManager < (databaseManagers.size()-1) ) {
currentDatabaseManager++;
BACKENDLESS_ID = databaseManagers.get(currentDatabaseManager).getDatabaseID();
BACKENDLESS_SECRET_KEY = databaseManagers.get(currentDatabaseManager).getDatabaseSecretKey();
Backendless.initApp(this, BACKENDLESS_ID, BACKENDLESS_SECRET_KEY);
} else {
currentDatabaseManager=0; //should be 0
BACKENDLESS_ID = databaseManagers.get(currentDatabaseManager).getDatabaseID();
BACKENDLESS_SECRET_KEY = databaseManagers.get(currentDatabaseManager).getDatabaseSecretKey();
Backendless.initApp(this, BACKENDLESS_ID, BACKENDLESS_SECRET_KEY);
}
// Log.d("MY_LOGS", "ChangeDatabase to " + currentDatabaseManager );
}
/* protected void initDatabase () {
Backendless.initApp(this, BACKENDLESS_ID, BACKENDLESS_SECRET_KEY, APP_VERSION);
}*/
protected void initAds () {
String appKey = getResources().getString(R.string.appodeal_id);
//Appodeal.disableLocationPermissionCheck();
//Appodeal.setAutoCache(Appodeal.INTERSTITIAL, false);
Appodeal.setBannerViewId(R.id.appodealBannerView);
// Appodeal.confirm(Appodeal.SKIPPABLE_VIDEO);
// Appodeal.disableNetwork(this, "cheetah");
/* Appodeal.disableNetwork(this, "yandex");
Appodeal.disableNetwork(this, "unity_ads");
Appodeal.disableNetwork(this, "chartboost");*/
// Appodeal.disableNetwork(this, "adcolony");
/* Appodeal.disableNetwork(this, "ogury");
Appodeal.disableNetwork(this, "mmedia");
Appodeal.disableNetwork(this, "inmobi");*/
Appodeal.initialize(this, appKey, Appodeal.BANNER_BOTTOM | Appodeal.INTERSTITIAL );
}
protected void cacheInterestial(){
Appodeal.cache(this, Appodeal.INTERSTITIAL);
}
protected void initGoogleAnalytics ( Context context ) {
// mFirebaseAnalytics = FirebaseAnalytics.getInstance( context );
GoogleAnalytics analytics = GoogleAnalytics.getInstance(context);
globalTracker = analytics.newTracker( R.xml.global_tracker );
globalTracker.setScreenName(getPackageName());
globalTracker.send(new HitBuilders.ScreenViewBuilder().build());
/* globalTracker.send(new HitBuilders.EventBuilder()
.setCategory("BackendlessFault")
.setAction( "test" )
.setLabel( "test" )
.build());*/
}
private void logFirebaseEvent ( String event ){
globalTracker.send(new HitBuilders.EventBuilder()
.setCategory(event)
.setAction( "Toolbar Button Clicked" )
.setLabel(event + " Clicked from Toolbar")
.build());
/* Bundle bundle = new Bundle();
bundle.putString(FirebaseAnalytics.Param.ITEM_ID, event);
// bundle.putString(FirebaseAnalytics.Param.ITEM_NAME, event);
mFirebaseAnalytics.logEvent(FirebaseAnalytics.Event.SELECT_CONTENT, bundle);*/
}
protected void showInterstitial ( Context context ) {
Log.d(TAG, "CAN_SHOW = " + canShowCommercial );
// Appodeal.show((Activity) context, Appodeal.INTERSTITIAL );
if (canShowCommercial) {
Appodeal.show((Activity) context, Appodeal.INTERSTITIAL );
}
}
protected void showBanner ( Context context ) {
// Log.d("MY_LOGS2", "CAN_SHOW = " + canShowCommercial );
// isBannerShowing = false;
Appodeal.show((Activity) context, Appodeal.BANNER_BOTTOM);
}
protected void hideBanner( Context context ){
Appodeal.hide((Activity) context, Appodeal.BANNER_BOTTOM);
}
protected void showMyCommercial(){
Intent intent = new Intent(this, MyCommercialActivity.class);
startActivity(intent);
}
protected void setAppodealCallbacks ( final Context context ) {
Appodeal.setInterstitialCallbacks(new InterstitialCallbacks() {
//private Toast mToast;
@Override
public void onInterstitialLoaded(boolean isPrecache) {
canShowCommercial = false;
//Log.d(TAG, "onInterstitialLoaded: ");
// Log.d("LOG_D", "CanShowCommercial = " + canShowCommercial);
}
@Override
public void onInterstitialFailedToLoad() {
canShowCommercial = false;
showMyCommercial();
//Log.d(TAG, "onInterstitialFailedToLoad: ");
// Log.d("LOG_D", "CanShowCommercial = " + canShowCommercial);
}
@Override
public void onInterstitialShown() {
canShowCommercial = false;
//Log.d(TAG, "onInterstitialShown: ");
// Log.d("LOG_D", "CanShowCommercial = " + canShowCommercial);
}
@Override
public void onInterstitialClicked() {
canShowCommercial = false;
//Log.d(TAG, "onInterstitialClicked: ");
// Log.d("LOG_D", "CanShowCommercial = " + canShowCommercial);
}
@Override
public void onInterstitialClosed() {
canShowCommercial = false;
//Log.d(TAG, "onInterstitialClosed: ");
// Log.d("LOG_D", "CanShowCommercial = " + canShowCommercial);
}
});
Appodeal.setBannerCallbacks(new BannerCallbacks() {
//private Toast mToast;
@Override
public void onBannerLoaded(int height, boolean isPrecache) {
// showToast(String.format("onBannerLoaded, %ddp" + isBannerShowing, height));
/* if ( !isBannerShowing && Appodeal.isLoaded(Appodeal.BANNER_BOTTOM)){
Appodeal.show((Activity) context, Appodeal.BANNER_BOTTOM);
isBannerShowing = true;
}*/
}
@Override
public void onBannerFailedToLoad() {
// showToast("onBannerFailedToLoad");
}
@Override
public void onBannerShown() {
/* try {
BannerView bannerView1 = (BannerView) findViewById( R.id.appodealBannerView );
bannerView1.setVisibility(View.VISIBLE);
} catch (Exception e){
}
try {
BannerView bannerView2 = (BannerView) findViewById( R.id.appodealBannerView2 );
bannerView2.setVisibility(View.VISIBLE);
NestedScrollView nestedScrollView = (NestedScrollView) findViewById(R.id.nestedScrollView2);
Log.d("MY_LOGS", "heights = " + nestedScrollView.getLayoutParams().height);
nestedScrollView.getLayoutParams().height += 50;
Log.d("MY_LOGS", "heights = " + nestedScrollView.getLayoutParams().height);
nestedScrollView.invalidate();
} catch (Exception e){
Log.d("MY_LOGS", "ERROR = " + e.toString());
e.printStackTrace();
}*/
// showToast("onBannerShown");
}
@Override
public void onBannerClicked() {
// showToast("onBannerClicked");
}
/* void showToast(final String text) {
if (mToast == null) {
mToast = Toast.makeText(context, text, Toast.LENGTH_SHORT);
}
mToast.setText(text);
mToast.setDuration(Toast.LENGTH_SHORT);
mToast.show();
}*/
});
}
/* protected void showAdsOnStart ( Context context ) {
Appodeal.show((Activity) context, Appodeal.INTERSTITIAL);
Appodeal.show((Activity) context, Appodeal.BANNER_BOTTOM);
}*/
protected void showPermissionDialog ( final Context context ) {
AlertDialog.Builder permissions = new AlertDialog.Builder( context );
permissions.setMessage(R.string.showPermissionMessage)
.setTitle(R.string.notificationMessage)
.setCancelable(false)
.setPositiveButton(R.string.answerOk,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
ActivityCompat.requestPermissions((MainActivity) context, new String[]{android.Manifest.permission.WRITE_EXTERNAL_STORAGE,android.Manifest.permission.READ_EXTERNAL_STORAGE}, 1);
return;
}
}
);
AlertDialog alert = permissions.create();
alert.show();
}
protected void showInetRequirementMessage ( final Context context ) {
/* if ()
AlertDialog.Builder permissions = new AlertDialog.Builder( context );
permissions.setMessage("You need Internet Connection to use this application. Enable your Internet and try again!")
.setTitle("Notification")
.setCancelable(false)
.setPositiveButton("Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
System.exit(0);
return;
}
}
);
AlertDialog alert = permissions.create();
alert.show();*/
if (isActivityVisible) {
try {
toast.getView().isShown();
toast.setText(R.string.networkReq);
} catch (Exception e) {
toast = Toast.makeText(context, R.string.networkReq, Toast.LENGTH_SHORT);
}
toast.show();
}
/* if ( toast==null || toast.getView().getWindowVisibility() != View.GONE ) {
toast = Toast.makeText(context, "Network is Unnavailable", Toast.LENGTH_SHORT);
toast.show();
Log.d("LOGS", "" + toast + " VISIBILITY = " + toast.getView().getWindowVisibility() + " isShown = " + toast.getView().isShown() + " getWindowToken = " + toast.getView().getWindowToken());
}*/
}
protected void showErrorDialog ( final Context context ) {
AlertDialog.Builder builder = new AlertDialog.Builder( context );
builder.setMessage(R.string.errorMessage)
.setTitle(R.string.errorTitle)
.setCancelable(false)
.setPositiveButton(R.string.answerOk,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
return;
}
}
);
AlertDialog alert = builder.create();
alert.show();
}
protected void showExitDialog ( Context context ) {
Appodeal.show((Activity) context, Appodeal.INTERSTITIAL );
AlertDialog.Builder exit = new AlertDialog.Builder( context );
exit.setMessage(R.string.exitText)
.setTitle(R.string.exitQuestion)
.setCancelable(false)
.setPositiveButton(R.string.answerYes,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
System.exit(0);
}
}
)
.setNegativeButton(R.string.answerNo,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
return;
}
}
);
AlertDialog alert = exit.create();
alert.show();
}
/* protected void showUpdateDialog ( final Context context ) {
AlertDialog.Builder alert = new AlertDialog.Builder( context );
alert.setMessage(R.string.updateMessage)
.setTitle(R.string.updateTitle)
.setCancelable(false)
.setPositiveButton(R.string.answerInstallNow,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
Intent i = new Intent( Intent.ACTION_VIEW );
i.setData(Uri.parse("https://play.google.com/store/apps/details?id=" + context.getPackageName()));
context.startActivity(i);
}
}
)
.setNegativeButton(R.string.answerLater,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
}
);
AlertDialog alertDialog = alert.create();
alertDialog.show();
}*/
protected void rate ( Context context ) {
Intent i = new Intent( Intent.ACTION_VIEW );
i.setData(Uri.parse("https://play.google.com/store/apps/details?id=" + context.getPackageName()));
context.startActivity(i);
}
protected void share ( Context context ) {
PackageManager pm = context.getPackageManager();
Intent sendIntent = new Intent(Intent.ACTION_SEND);
sendIntent.setType("text/plain");
List<Intent> targetedShareIntents = new ArrayList<>();
List<ResolveInfo> resInfo = pm.queryIntentActivities(sendIntent, 0);
String urlToShare = context.getString( context.getApplicationInfo().labelRes) + getString(R.string.shareMessage) + "https://play.google.com/store/apps/details?id=" + context.getPackageName();
Intent chooserIntent;
boolean isTargetsFound = false;
if (!resInfo.isEmpty()) {
for (ResolveInfo info : resInfo) {
Intent targetedShare = new Intent(android.content.Intent.ACTION_SEND);
targetedShare.setType("text/plain");
if (info.activityInfo.packageName.toLowerCase().contains("facebook") || info.activityInfo.name.toLowerCase().contains("facebook") || info.activityInfo.packageName.toLowerCase().contains("twitter") || info.activityInfo.name.toLowerCase().contains("twitter") || info.activityInfo.packageName.toLowerCase().contains("vk") || info.activityInfo.name.toLowerCase().contains("vk") || info.activityInfo.packageName.toLowerCase().contains("kate") || info.activityInfo.name.toLowerCase().contains("kate")) {
targetedShare.putExtra(Intent.EXTRA_TEXT, urlToShare );
targetedShare.setPackage(info.activityInfo.packageName);
targetedShareIntents.add(targetedShare);
isTargetsFound = true;
}
}
if ( isTargetsFound ) {
chooserIntent = Intent.createChooser(targetedShareIntents.remove(0), getString(R.string.sharePickApp));
chooserIntent.putExtra(Intent.EXTRA_INITIAL_INTENTS, targetedShareIntents.toArray(new Parcelable[]{}));
} else
{
/*String sharerUrl = "https://www.facebook.com/sharer/sharer.php?u=" + urlToShare;
chooserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(sharerUrl));*/
for (ResolveInfo info : resInfo) {
Intent targetedShare = new Intent(android.content.Intent.ACTION_SEND);
targetedShare.setType("text/plain");
targetedShare.putExtra(Intent.EXTRA_TEXT, urlToShare );
targetedShare.setPackage(info.activityInfo.packageName);
targetedShareIntents.add(targetedShare);
}
chooserIntent = Intent.createChooser(targetedShareIntents.remove(0), getString(R.string.sharePickApp));
chooserIntent.putExtra(Intent.EXTRA_INITIAL_INTENTS, targetedShareIntents.toArray(new Parcelable[]{}));
}
context.startActivity(chooserIntent);
}
}
protected void help ( final Context context ) {
Intent i = new Intent( context, HelpActivity.class );
context.startActivity(i);
/* AlertDialog.Builder rate = new AlertDialog.Builder( context );
String helpMessage = "Thank you for downloading " + context.getResources().getString( R.string.app_name ) + "!" + "\n" + "\n" +
"Report any bug to apiatosin@gmail.com and leave five stars on the store! " + "\n" + "\n" +
"Have fun!" + "\n" + "\n" +
"Developer: Andrei Piatosin"
;
rate.setMessage( helpMessage )
.setTitle("Help")
.setCancelable(false)
.setNegativeButton("Rate me!",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
Intent i = new Intent( Intent.ACTION_VIEW );
i.setData( Uri.parse( "https://play.google.com/store/apps/details?id=" + context.getPackageName() ) );
context.startActivity(i);
}
}
)
.setNeutralButton("Report",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
sendEmail( context );
return;
}
}
)
.setPositiveButton("OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
return;
}
}
);
AlertDialog alert = rate.create();
alert.show();*/
}
protected void sendEmail( Context context ){
Intent myIntent1 = new Intent(android.content.Intent.ACTION_SEND);
myIntent1.putExtra(android.content.Intent.EXTRA_EMAIL, new String[]{"apiatosin@gmail.com"});
final String my1 = Settings.Secure.getString( context.getContentResolver(), Settings.Secure.ANDROID_ID);
final String my2 = android.os.Build.DEVICE;
final String my3 = android.os.Build.MANUFACTURER;
final String my4 = android.os.Build.MODEL;
final String my5 = android.os.Build.VERSION.RELEASE;
final int my6 = android.os.Build.VERSION.SDK_INT;
final String my7 = android.os.Build.BRAND;
final String my8 = android.os.Build.VERSION.INCREMENTAL;
final String my9 = android.os.Build.PRODUCT;
myIntent1.putExtra(android.content.Intent.EXTRA_SUBJECT, "Support Request: " + my1 + " Application: " + context.getPackageName() + " Device: " + my2 + " Manufacturer: " + my3 + " Model: " + my4 + " Version: " + my5 + " SDK: " + my6 + " Brand: " + my7 + " Incremental: " + my8 + " Product: " + my9);
myIntent1.setType("text/plain");
//IN CASE EMAIL APP FAILS, THEN DEFINE THE OPTION TO LAUNCH SUPPORT WEBSITE
String url2 = "";
Intent myIntent2 = new Intent(Intent.ACTION_VIEW);
myIntent2.setData(Uri.parse(url2));
//IF USER CLICKS THE OK BUTTON, THEN DO THIS
try {
// TRY TO LAUNCH TO EMAIL APP
context.startActivity(Intent.createChooser(myIntent1, "Send email to Developer"));
// startActivity(myIntent1);
} catch (ActivityNotFoundException ex) {
// ELSE LAUNCH TO WEB BROWSER
// activity.startActivity(myIntent2);
}
}
protected boolean isPermissionGranted (){
return ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED;
}
protected void sendBackendlessFaultToAnalytics (Tracker globalTracker, String action, BackendlessFault fault){
globalTracker.send(new HitBuilders.EventBuilder()
.setCategory("BackendlessFault")
.setAction( action )
.setLabel( fault.getMessage() )
.build());
}
}
|
|
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.scheduler;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.hawkular.agent.monitor.diagnostics.Diagnostics;
import org.hawkular.agent.monitor.inventory.dmr.DMRAvailInstance;
import org.hawkular.agent.monitor.inventory.dmr.DMRMetricInstance;
import org.hawkular.agent.monitor.log.MsgLogger;
import org.hawkular.agent.monitor.scheduler.config.AvailDMRPropertyReference;
import org.hawkular.agent.monitor.scheduler.config.DMREndpoint;
import org.hawkular.agent.monitor.scheduler.config.DMRPropertyReference;
import org.hawkular.agent.monitor.scheduler.config.LocalDMREndpoint;
import org.hawkular.agent.monitor.scheduler.config.SchedulerConfiguration;
import org.hawkular.agent.monitor.scheduler.polling.IntervalBasedScheduler;
import org.hawkular.agent.monitor.scheduler.polling.Scheduler;
import org.hawkular.agent.monitor.scheduler.polling.Task;
import org.hawkular.agent.monitor.scheduler.polling.TaskGroup;
import org.hawkular.agent.monitor.scheduler.polling.dmr.AvailDMRTask;
import org.hawkular.agent.monitor.scheduler.polling.dmr.AvailDMRTaskGroupRunnable;
import org.hawkular.agent.monitor.scheduler.polling.dmr.DMRTask;
import org.hawkular.agent.monitor.scheduler.polling.dmr.MetricDMRTask;
import org.hawkular.agent.monitor.scheduler.polling.dmr.MetricDMRTaskGroupRunnable;
import org.hawkular.agent.monitor.service.ServerIdentifiers;
import org.hawkular.agent.monitor.storage.AvailBufferedStorageDispatcher;
import org.hawkular.agent.monitor.storage.MetricBufferedStorageDispatcher;
import org.hawkular.agent.monitor.storage.StorageAdapter;
/**
* The core service that schedules tasks and stores the data resulting from those tasks to its storage adapter.
*/
public class SchedulerService {
private final SchedulerConfiguration schedulerConfig;
private final ServerIdentifiers selfId;
private final ModelControllerClientFactory localDMRClientFactory;
private final Diagnostics diagnostics;
private final Scheduler metricScheduler;
private final Scheduler availScheduler;
private final MetricBufferedStorageDispatcher metricCompletionHandler;
private final AvailBufferedStorageDispatcher availCompletionHandler;
private boolean started = false;
public SchedulerService(
SchedulerConfiguration configuration,
ServerIdentifiers selfId,
Diagnostics diagnostics,
StorageAdapter storageAdapter,
ModelControllerClientFactory localDMRClientFactory) {
this.schedulerConfig = configuration;
// for those tasks that require a DMR client to our own WildFly server, this factory can provide those clients
this.localDMRClientFactory = localDMRClientFactory;
// this helps identify where we are running
this.selfId = selfId;
// metrics for our own internals
this.diagnostics = diagnostics;
// create the schedulers - we use two: one for metric collections and one for avail checks
this.metricCompletionHandler = new MetricBufferedStorageDispatcher(configuration, storageAdapter,
diagnostics);
this.metricScheduler = new IntervalBasedScheduler(this, "Hawkular-Monitor-Scheduler-Metrics",
configuration.getMetricSchedulerThreads());
this.availCompletionHandler = new AvailBufferedStorageDispatcher(configuration, storageAdapter,
diagnostics);
this.availScheduler = new IntervalBasedScheduler(this, "Hawkular-Monitor-Scheduler-Avail",
configuration.getAvailSchedulerThreads());
}
public ServerIdentifiers getSelfIdentifiers() {
return this.selfId;
}
public Diagnostics getDiagnostics() {
return this.diagnostics;
}
public void start() {
if (started) {
return; // already started
}
MsgLogger.LOG.infoStartingScheduler();
// turn metric DMR refs into Tasks and schedule them now
List<Task> metricTasks = createMetricDMRTasks(schedulerConfig.getDMRMetricsToBeCollected());
this.metricCompletionHandler.start();
this.metricScheduler.schedule(metricTasks);
// turn avail DMR refs into Tasks and schedule them now
List<Task> availTasks = createAvailDMRTasks(schedulerConfig.getDMRAvailsToBeChecked());
this.availCompletionHandler.start();
this.availScheduler.schedule(availTasks);
started = true;
}
public void stop() {
if (!started) {
return; // already stopped
}
MsgLogger.LOG.infoStoppingScheduler();
// stop completion handlers
this.metricCompletionHandler.shutdown();
this.availCompletionHandler.shutdown();
// stop the schedulers
this.metricScheduler.shutdown();
this.availScheduler.shutdown();
started = false;
}
public Runnable getTaskGroupRunnable(TaskGroup group) {
switch (group.getType()) {
case METRIC: {
// we are guaranteed the first task is the same kind as all the rest
Task firstTask = group.getTask(0);
if (DMRTask.class.isInstance(firstTask)) {
// we are guaranteed that all tasks in a group refer to the same endpoint
DMREndpoint endpoint = ((DMRTask) firstTask).getEndpoint();
ModelControllerClientFactory factory;
if (endpoint instanceof LocalDMREndpoint) {
factory = this.localDMRClientFactory;
} else {
factory = new ModelControllerClientFactoryImpl(endpoint);
}
return new MetricDMRTaskGroupRunnable(group, metricCompletionHandler, getDiagnostics(), factory);
} else {
throw new UnsupportedOperationException("Unsupported metric group: " + group);
}
}
case AVAIL: {
// we are guaranteed the first task is the same kind as all the rest
Task firstTask = group.getTask(0);
if (DMRTask.class.isInstance(firstTask)) {
// we are guaranteed that all tasks in a group refer to the same endpoint
DMREndpoint endpoint = ((DMRTask) firstTask).getEndpoint();
ModelControllerClientFactory factory;
if (endpoint instanceof LocalDMREndpoint) {
factory = this.localDMRClientFactory;
} else {
factory = new ModelControllerClientFactoryImpl(endpoint);
}
return new AvailDMRTaskGroupRunnable(group, availCompletionHandler, getDiagnostics(), factory);
} else {
throw new UnsupportedOperationException("Unsupported avail group: " + group);
}
}
default: {
throw new IllegalArgumentException("Bad group [" + group + "]. Please report this bug.");
}
}
}
private List<Task> createMetricDMRTasks(Map<DMREndpoint, List<DMRMetricInstance>> map) {
List<Task> tasks = new ArrayList<>();
for (Map.Entry<DMREndpoint, List<DMRMetricInstance>> entry : map.entrySet()) {
DMREndpoint dmrEndpoint = entry.getKey();
for (DMRMetricInstance instance : entry.getValue()) {
// parse sub references (complex attribute support)
DMRPropertyReference propRef = instance.getProperty();
String attribute = propRef.getAttribute();
String subref = null;
if (attribute != null) {
int i = attribute.indexOf("#");
if (i > 0) {
subref = attribute.substring(i + 1, attribute.length());
attribute = attribute.substring(0, i);
}
}
tasks.add(new MetricDMRTask(propRef.getInterval(), dmrEndpoint, propRef.getAddress(), attribute,
subref, instance));
}
}
return tasks;
}
private List<Task> createAvailDMRTasks(Map<DMREndpoint, List<DMRAvailInstance>> map) {
List<Task> tasks = new ArrayList<>();
for (Map.Entry<DMREndpoint, List<DMRAvailInstance>> entry : map.entrySet()) {
DMREndpoint dmrEndpoint = entry.getKey();
for (DMRAvailInstance instance : entry.getValue()) {
// parse sub references (complex attribute support)
AvailDMRPropertyReference propRef = instance.getProperty();
String attribute = propRef.getAttribute();
String subref = null;
if (attribute != null) {
int i = attribute.indexOf("#");
if (i > 0) {
subref = attribute.substring(i + 1, attribute.length());
attribute = attribute.substring(0, i);
}
}
tasks.add(new AvailDMRTask(propRef.getInterval(), dmrEndpoint, propRef.getAddress(), attribute,
subref, instance, propRef.getUpRegex()));
}
}
return tasks;
}
}
|
|
/*
* Copyright (c) 2005, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/* @test
* @bug 4770745 6234507
* @summary test a variety of zip file entries
* @author Martin Buchholz
*/
import java.util.*;
import java.util.zip.*;
import java.util.jar.*;
import java.io.*;
public class Assortment {
static int passed = 0, failed = 0;
static void fail(String msg) {
failed++;
new Exception(msg).printStackTrace();
}
static void unexpected(Throwable t) {
failed++;
t.printStackTrace();
}
static void check(boolean condition, String msg) {
if (! condition)
fail(msg);
}
static void check(boolean condition) {
check(condition, "Something's wrong");
}
private static class Entry {
private String name;
private int method;
private byte[] data;
private byte[] extra;
private String comment;
Entry(String name,
int method,
byte[] data,
byte[] extra,
String comment) {
this.name = name;
this.method = method;
this.data = data;
this.extra = extra;
this.comment = comment;
}
void write(ZipOutputStream s) throws Exception {
ZipEntry e = new ZipEntry(name);
CRC32 crc32 = new CRC32();
e.setMethod(method);
if (method == ZipEntry.STORED) {
e.setSize(data == null ? 0 : data.length);
crc32.reset();
if (data != null) crc32.update(data);
e.setCrc(crc32.getValue());
} else {
e.setSize(0);
e.setCrc(0);
}
if (comment != null) e.setComment(comment);
if (extra != null) e.setExtra(extra);
s.putNextEntry(e);
if (data != null) s.write(data);
}
byte[] getData(ZipFile f, ZipEntry e) throws Exception {
byte[] fdata = new byte[(int)e.getSize()];
InputStream is = f.getInputStream(e);
is.read(fdata);
return fdata;
}
void verify(ZipFile f) throws Exception {
ZipEntry e = f.getEntry(name);
byte[] data = (this.data == null) ? new byte[]{} : this.data;
byte[] extra = (this.extra != null && this.extra.length == 0) ?
null : this.extra;
check(name.equals(e.getName()));
check(method == e.getMethod());
check((((comment == null) || comment.equals(""))
&& (e.getComment() == null))
|| comment.equals(e.getComment()));
check(Arrays.equals(extra, e.getExtra()));
check(Arrays.equals(data, getData(f, e)));
check(e.getSize() == data.length);
check((method == ZipEntry.DEFLATED) ||
(e.getCompressedSize() == data.length));
}
void verify(JarInputStream jis) throws Exception {
// JarInputStream "automatically" reads the manifest
if (name.equals("meta-iNf/ManIfEst.Mf"))
return;
ZipEntry e = jis.getNextEntry();
byte[] data = (this.data == null) ? new byte[]{} : this.data;
byte[] otherData = new byte[data.length];
jis.read(otherData);
check(Arrays.equals(data, otherData));
byte[] extra = (this.extra != null && this.extra.length == 0) ?
null : this.extra;
check(Arrays.equals(extra, e.getExtra()));
check(name.equals(e.getName()));
check(method == e.getMethod());
check(e.getSize() == -1 || e.getSize() == data.length);
check((method == ZipEntry.DEFLATED) ||
(e.getCompressedSize() == data.length));
}
}
private static int uniquifier = 86;
private static String uniquify(String name) {
return name + (uniquifier++);
}
private static byte[] toBytes(String s) throws Exception {
return s.getBytes("UTF-8");
}
private static byte[] toExtra(byte[] bytes) throws Exception {
if (bytes == null) return null;
// Construct a fake extra field with valid header length
byte[] v = new byte[bytes.length + 4];
v[0] = (byte) 0x47;
v[1] = (byte) 0xff;
v[2] = (byte) bytes.length;
v[3] = (byte) (bytes.length << 8);
System.arraycopy(bytes, 0, v, 4, bytes.length);
return v;
}
private static Random random = new Random();
private static String makeName(int length) {
StringBuilder sb = new StringBuilder(length);
for (int i = 0; i < length; i++)
sb.append((char)(random.nextInt(10000)+1));
return sb.toString();
}
public static void main(String[] args) throws Exception {
File zipName = new File("x.zip");
int[] methods = {ZipEntry.STORED, ZipEntry.DEFLATED};
String[] names = {makeName(1), makeName(160), makeName(9000)};
byte[][] datas = {null, new byte[]{}, new byte[]{'d'}};
byte[][] extras = {null, new byte[]{}, new byte[]{'e'}};
String[] comments = {null, "", "c"};
List<Entry> entries = new ArrayList<Entry>();
// Highly unusual manifest
entries.add(new Entry("meta-iNf/ManIfEst.Mf",
ZipEntry.STORED,
toBytes("maNiFest-VeRsIon: 1.0\n"),
toExtra(toBytes("Can manifests have extra??")),
"Can manifests have comments??"));
// The emptiest possible entry
entries.add(new Entry("", ZipEntry.STORED, null, null, ""));
for (String name : names)
for (int method : methods)
for (byte[] data : datas) // datae??
for (byte[] extra : extras)
for (String comment : comments)
entries.add(new Entry(uniquify(name), method, data,
toExtra(extra), comment));
//----------------------------------------------------------------
// Write zip file using ZipOutputStream
//----------------------------------------------------------------
ZipOutputStream zos = new ZipOutputStream(
new FileOutputStream(zipName));
for (Entry e : entries)
e.write(zos);
zos.close();
//----------------------------------------------------------------
// Verify zip file contents using JarFile class
//----------------------------------------------------------------
JarFile f = new JarFile(zipName);
check(f.getManifest() != null);
for (Entry e : entries)
e.verify(f);
f.close();
//----------------------------------------------------------------
// Verify zip file contents using JarInputStream class
//----------------------------------------------------------------
JarInputStream jis = new JarInputStream(
new FileInputStream(zipName));
// JarInputStream "automatically" reads the manifest
check(jis.getManifest() != null);
for (Entry e : entries)
e.verify(jis);
jis.close();
// String cmd = "unzip -t " + zipName.getPath() + " >/dev/tty";
// new ProcessBuilder(new String[]{"/bin/sh", "-c", cmd}).start().waitFor();
zipName.deleteOnExit();
System.out.printf("passed = %d, failed = %d%n", passed, failed);
if (failed > 0) throw new Exception("Some tests failed");
}
}
|
|
package com.lab.inmotion.learny.Activities;
import android.app.ProgressDialog;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.Intent;
import android.media.MediaPlayer;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
import com.lab.inmotion.learny.R;
import java.io.IOException;
import java.util.UUID;
public class ArduinoActivity extends AppCompatActivity {
Button btnOn, btnOff, btnDis;
String address = null;
private ProgressDialog progress;
BluetoothAdapter myBluetooth = null;
BluetoothSocket btSocket = null;
private boolean isBtConnected = false;
//SPP UUID. Look for it
static final UUID myUUID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB");
//private MediaPlayer sp;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
Intent newint = getIntent();
address = newint.getStringExtra(DeviceActivity.EXTRA_ADDRESS); //receive the address of the bluetooth device
setContentView(R.layout.activity_arduino);
//sp = MediaPlayer.create(this, R.raw.sp);
new ConnectBT().execute(); //Call the class to connect
}
/*
public void playMusic(View view){
if(!sp.isPlaying()){
sp.start();
}
else{
sp.reset();
}
}
public void stopMusic(View view){
if(sp.isPlaying()){
sp.pause();
}
}*/
public void btnAhead(View view){
Toast.makeText(this, "Ahead", Toast.LENGTH_LONG).show();
ahead();
}
public void btnBack(View view){
Toast.makeText(this, "Back", Toast.LENGTH_LONG).show();
back();
}
public void btnRight(View view){
Toast.makeText(this, "Right", Toast.LENGTH_LONG).show();
right();
}
public void btnLeft(View view){
Toast.makeText(this, "Left", Toast.LENGTH_LONG).show();
left();
}
private void Disconnect()
{
if (btSocket!=null) //If the btSocket is busy
{
try
{
btSocket.close(); //close connection
}
catch (IOException e)
{ msg("Error");}
}
finish(); //return to the first layout
}
private void left()
{
if (btSocket!=null)
{
try
{
btSocket.getOutputStream().write('b');
}
catch (IOException e)
{
msg("Error");
}
}
}
private void right()
{
if (btSocket!=null)
{
try
{
btSocket.getOutputStream().write('d');
}
catch (IOException e)
{
msg("Error");
}
}
}
private void ahead()
{
if (btSocket!=null)
{
try
{
btSocket.getOutputStream().write('a');
}
catch (IOException e)
{
msg("Error");
}
}
}
private void back()
{
if (btSocket!=null)
{
try
{
btSocket.getOutputStream().write('e');
}
catch (IOException e)
{
msg("Error");
}
}
}
// fast way to call Toast
private void msg(String s)
{
Toast.makeText(getApplicationContext(), s, Toast.LENGTH_LONG).show();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
private class ConnectBT extends AsyncTask<Void, Void, Void> // UI thread
{
private boolean ConnectSuccess = true; //if it's here, it's almost connected
@Override
protected void onPreExecute()
{
progress = ProgressDialog.show(ArduinoActivity.this, "Connecting...", "Please wait!!!"); //show a progress dialog
}
@Override
protected Void doInBackground(Void... devices) //while the progress dialog is shown, the connection is done in background
{
try
{
if (btSocket == null || !isBtConnected)
{
myBluetooth = BluetoothAdapter.getDefaultAdapter();//get the mobile bluetooth device
BluetoothDevice dispositivo = myBluetooth.getRemoteDevice("20:14:05:19:37:41");//connects to the device's address and checks if it's available
btSocket = dispositivo.createInsecureRfcommSocketToServiceRecord(myUUID);//create a RFCOMM (SPP) connection
BluetoothAdapter.getDefaultAdapter().cancelDiscovery();
btSocket.connect();//start connection
}
}
catch (IOException e)
{
ConnectSuccess = false;//if the try failed, you can check the exception here
}
return null;
}
@Override
protected void onPostExecute(Void result) //after the doInBackground, it checks if everything went fine
{
super.onPostExecute(result);
if (!ConnectSuccess)
{
msg("Connection Failed. Is it a SPP Bluetooth? Try again.");
finish();
}
else
{
msg("Connected.");
isBtConnected = true;
}
progress.dismiss();
}
}
}
|
|
/*
* Copyright (c) 2009-2013, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.docsubmission.configuration.jmx;
import gov.hhs.fha.nhinc.configuration.IConfiguration.serviceEnum;
import gov.hhs.fha.nhinc.docsubmission._11.entity.deferred.response.EntityDocSubmissionDeferredResponseSecured;
import gov.hhs.fha.nhinc.docsubmission._11.entity.deferred.response.EntityDocSubmissionDeferredResponseUnsecured;
import gov.hhs.fha.nhinc.docsubmission._11.nhin.deferred.response.NhinXDRResponse;
import gov.hhs.fha.nhinc.docsubmission.inbound.deferred.response.InboundDocSubmissionDeferredResponse;
import gov.hhs.fha.nhinc.docsubmission.outbound.deferred.response.OutboundDocSubmissionDeferredResponse;
import javax.servlet.ServletContext;
/**
* The Class DocumentSubmissionDefRequest20WebServices.
*
* @author msw
*/
public class DocumentSubmissionDefResponse11WebServices extends AbstractDSDeferredRespWebServicesMXBean {
/** The Constant NHIN_DS_BEAN_NAME. */
private static final String NHIN_DS_BEAN_NAME = "nhinXDRDeferredResponse";
/** The Constant ENTITY_UNSECURED_DS_BEAN_NAME. */
private static final String ENTITY_UNSECURED_DS_BEAN_NAME = "entityXDRDeferredResponseUnsecured";
/** The Constant ENTITY_SECURED_DS_BEAN_NAME. */
private static final String ENTITY_SECURED_DS_BEAN_NAME = "entityXDRDeferredResponseSecured";
private final serviceEnum serviceName = serviceEnum.DocumentSubmissionDeferredResponse;
/**
* Instantiates a new document submission def request20 web services.
*
* @param sc the sc
*/
public DocumentSubmissionDefResponse11WebServices(ServletContext sc) {
super(sc);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isInboundPassthru()
*/
@Override
public boolean isInboundPassthru() {
boolean isPassthru = false;
NhinXDRResponse nhinDS = retrieveBean(NhinXDRResponse.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse outboundDS = nhinDS.getInboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_INBOUND_PASSTHRU_IMPL_CLASS_NAME)) {
isPassthru = true;
}
return isPassthru;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isOutboundPassthru()
*/
@Override
public boolean isOutboundPassthru() {
boolean isPassthru = false;
EntityDocSubmissionDeferredResponseUnsecured entityDS = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured.class, getEntityUnsecuredBeanName());
OutboundDocSubmissionDeferredResponse outboundDS = entityDS.getOutboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_OUTBOUND_PASSTHRU_IMPL_CLASS_NAME)) {
isPassthru = true;
}
return isPassthru;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isInboundStandard()
*/
@Override
public boolean isInboundStandard() {
boolean isStandard = false;
NhinXDRResponse nhinDS = retrieveBean(NhinXDRResponse.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse outboundDS = nhinDS.getInboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_INBOUND_STANDARD_IMPL_CLASS_NAME)) {
isStandard = true;
}
return isStandard;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isOutboundStandard()
*/
@Override
public boolean isOutboundStandard() {
boolean isStandard = false;
EntityDocSubmissionDeferredResponseUnsecured entityDS = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured.class, getEntityUnsecuredBeanName());
OutboundDocSubmissionDeferredResponse outboundDS = entityDS.getOutboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_OUTBOUND_STANDARD_IMPL_CLASS_NAME)) {
isStandard = true;
}
return isStandard;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#getNhinBeanName()
*/
@Override
protected String getNhinBeanName() {
return NHIN_DS_BEAN_NAME;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#getEntityUnsecuredBeanName()
*/
@Override
protected String getEntityUnsecuredBeanName() {
return ENTITY_UNSECURED_DS_BEAN_NAME;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#getEntitySecuredBeanName()
*/
@Override
protected String getEntitySecuredBeanName() {
return ENTITY_SECURED_DS_BEAN_NAME;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureInboundImpl(java.lang.String)
*/
@Override
public void configureInboundStdImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
NhinXDRResponse nhinDS = retrieveBean(NhinXDRResponse.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse inboundDS = retrieveBean(InboundDocSubmissionDeferredResponse.class,
getStandardInboundBeanName());
nhinDS.setInboundDocSubmissionResponse(inboundDS);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureInboundImpl(java.lang.String)
*/
@Override
public void configureInboundPtImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
NhinXDRResponse nhinDS = retrieveBean(NhinXDRResponse.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse inboundDS = retrieveBean(InboundDocSubmissionDeferredResponse.class,
getPassthroughInboundBeanName());
nhinDS.setInboundDocSubmissionResponse(inboundDS);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureOutboundImpl(java.lang.String)
*/
@Override
public void configureOutboundStdImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
OutboundDocSubmissionDeferredResponse outboundDS = retrieveBean(
OutboundDocSubmissionDeferredResponse.class, getStandardOutboundBeanName());
EntityDocSubmissionDeferredResponseUnsecured entityDSUnsecured = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured.class, getEntityUnsecuredBeanName());
EntityDocSubmissionDeferredResponseSecured entityDSSecured = retrieveBean(
EntityDocSubmissionDeferredResponseSecured.class, getEntitySecuredBeanName());
entityDSSecured.setOutboundDocSubmissionResponse(outboundDS);
entityDSUnsecured.setOutboundDocSubmissionResponse(outboundDS);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureOutboundImpl(java.lang.String)
*/
@Override
public void configureOutboundPtImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
OutboundDocSubmissionDeferredResponse outboundDS = retrieveBean(
OutboundDocSubmissionDeferredResponse.class, getPassthroughOutboundBeanName());
EntityDocSubmissionDeferredResponseUnsecured entityDSUnsecured = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured.class, getEntityUnsecuredBeanName());
EntityDocSubmissionDeferredResponseSecured entityDSSecured = retrieveBean(
EntityDocSubmissionDeferredResponseSecured.class, getEntitySecuredBeanName());
entityDSSecured.setOutboundDocSubmissionResponse(outboundDS);
entityDSUnsecured.setOutboundDocSubmissionResponse(outboundDS);
}
public serviceEnum getServiceName() {
return this.serviceName;
}
}
|
|
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.launcher;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.JarURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.CodeSource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/** Main class for a JAR file to run code from "WEB-INF/lib". */
public final class GerritLauncher {
private static final String PKG = "com.google.gerrit.pgm";
public static final String NOT_ARCHIVED = "NOT_ARCHIVED";
private static ClassLoader daemonClassLoader;
public static void main(String[] argv) throws Exception {
System.exit(mainImpl(argv));
}
/**
* Invokes a program.
*
* <p>Creates a new classloader to load and run the program class. To reuse a classloader across
* calls (e.g. from tests), use {@link #invokeProgram(ClassLoader, String[])}.
*
* @param argv arguments, as would be passed to {@code gerrit.war}. The first argument is the
* program name.
* @return program return code.
* @throws Exception if any error occurs.
*/
public static int mainImpl(String[] argv) throws Exception {
if (argv.length == 0 || "-h".equals(argv[0]) || "--help".equals(argv[0])) {
File me;
try {
me = getDistributionArchive();
} catch (FileNotFoundException e) {
me = null;
}
String jar = me != null ? me.getName() : "gerrit.war";
System.err.println("Gerrit Code Review " + getVersion(me));
System.err.println("usage: java -jar " + jar + " command [ARG ...]");
System.err.println();
System.err.println("The most commonly used commands are:");
System.err.println(" init Initialize a Gerrit installation");
System.err.println(" reindex Rebuild the secondary index");
System.err.println(" daemon Run the Gerrit network daemons");
System.err.println(" gsql Run the interactive query console");
System.err.println(" version Display the build version number");
System.err.println(" passwd Set or change password in secure.config");
System.err.println();
System.err.println(" ls List files available for cat");
System.err.println(" cat FILE Display a file from the archive");
System.err.println();
return 1;
}
// Special cases, a few global options actually are programs.
//
if ("-v".equals(argv[0]) || "--version".equals(argv[0])) {
argv[0] = "version";
} else if ("-p".equals(argv[0]) || "--cat".equals(argv[0])) {
argv[0] = "cat";
} else if ("-l".equals(argv[0]) || "--ls".equals(argv[0])) {
argv[0] = "ls";
}
// Run the application class
//
final ClassLoader cl = libClassLoader(isProlog(programClassName(argv[0])));
Thread.currentThread().setContextClassLoader(cl);
return invokeProgram(cl, argv);
}
public static void daemonStart(String[] argv) throws Exception {
if (daemonClassLoader != null) {
throw new IllegalStateException("daemonStart can be called only once per JVM instance");
}
final ClassLoader cl = libClassLoader(false);
Thread.currentThread().setContextClassLoader(cl);
daemonClassLoader = cl;
String[] daemonArgv = new String[argv.length + 1];
daemonArgv[0] = "daemon";
for (int i = 0; i < argv.length; i++) {
daemonArgv[i + 1] = argv[i];
}
int res = invokeProgram(cl, daemonArgv);
if (res != 0) {
throw new Exception("Unexpected return value: " + res);
}
}
public static void daemonStop(String[] argv) throws Exception {
if (daemonClassLoader == null) {
throw new IllegalStateException("daemonStop can be called only after call to daemonStop");
}
String[] daemonArgv = new String[argv.length + 2];
daemonArgv[0] = "daemon";
daemonArgv[1] = "--stop-only";
for (int i = 0; i < argv.length; i++) {
daemonArgv[i + 2] = argv[i];
}
int res = invokeProgram(daemonClassLoader, daemonArgv);
if (res != 0) {
throw new Exception("Unexpected return value: " + res);
}
}
private static boolean isProlog(String cn) {
return "PrologShell".equals(cn) || "Rulec".equals(cn);
}
private static String getVersion(File me) {
if (me == null) {
return "";
}
try (JarFile jar = new JarFile(me)) {
Manifest mf = jar.getManifest();
Attributes att = mf.getMainAttributes();
String val = att.getValue(Attributes.Name.IMPLEMENTATION_VERSION);
return val != null ? val : "";
} catch (IOException e) {
return "";
}
}
/**
* Invokes a program in the provided {@code ClassLoader}.
*
* @param loader classloader to load program class from.
* @param origArgv arguments, as would be passed to {@code gerrit.war}. The first argument is the
* program name.
* @return program return code.
* @throws Exception if any error occurs.
*/
public static int invokeProgram(ClassLoader loader, String[] origArgv) throws Exception {
String name = origArgv[0];
final String[] argv = new String[origArgv.length - 1];
System.arraycopy(origArgv, 1, argv, 0, argv.length);
Class<?> clazz;
try {
try {
String cn = programClassName(name);
clazz = Class.forName(PKG + "." + cn, true, loader);
} catch (ClassNotFoundException cnfe) {
if (name.equals(name.toLowerCase())) {
clazz = Class.forName(PKG + "." + name, true, loader);
} else {
throw cnfe;
}
}
} catch (ClassNotFoundException cnfe) {
System.err.println("fatal: unknown command " + name);
System.err.println(" (no " + PKG + "." + name + ")");
return 1;
}
final Method main;
try {
main = clazz.getMethod("main", argv.getClass());
} catch (SecurityException | NoSuchMethodException e) {
System.err.println("fatal: unknown command " + name);
return 1;
}
final Object res;
try {
if ((main.getModifiers() & Modifier.STATIC) == Modifier.STATIC) {
res = main.invoke(null, new Object[] {argv});
} else {
res =
main.invoke(clazz.getConstructor(new Class<?>[] {}).newInstance(), new Object[] {argv});
}
} catch (InvocationTargetException ite) {
if (ite.getCause() instanceof Exception) {
throw (Exception) ite.getCause();
} else if (ite.getCause() instanceof Error) {
throw (Error) ite.getCause();
} else {
throw ite;
}
}
if (res instanceof Number) {
return ((Number) res).intValue();
}
return 0;
}
private static String programClassName(String cn) {
if (cn.equals(cn.toLowerCase())) {
StringBuilder buf = new StringBuilder();
buf.append(Character.toUpperCase(cn.charAt(0)));
for (int i = 1; i < cn.length(); i++) {
if (cn.charAt(i) == '-' && i + 1 < cn.length()) {
i++;
buf.append(Character.toUpperCase(cn.charAt(i)));
} else {
buf.append(cn.charAt(i));
}
}
return buf.toString();
}
return cn;
}
private static ClassLoader libClassLoader(boolean prologCompiler) throws IOException {
final File path;
try {
path = getDistributionArchive();
} catch (FileNotFoundException e) {
if (NOT_ARCHIVED.equals(e.getMessage())) {
return useDevClasspath();
}
throw e;
}
final SortedMap<String, URL> jars = new TreeMap<>();
try (ZipFile zf = new ZipFile(path)) {
final Enumeration<? extends ZipEntry> e = zf.entries();
while (e.hasMoreElements()) {
final ZipEntry ze = e.nextElement();
if (ze.isDirectory()) {
continue;
}
String name = ze.getName();
if (name.startsWith("WEB-INF/lib/")) {
extractJar(zf, ze, jars);
} else if (name.startsWith("WEB-INF/pgm-lib/")) {
// Some Prolog tools are restricted.
if (prologCompiler || !name.startsWith("WEB-INF/pgm-lib/prolog-")) {
extractJar(zf, ze, jars);
}
}
}
} catch (IOException e) {
throw new IOException("Cannot obtain libraries from " + path, e);
}
if (jars.isEmpty()) {
return GerritLauncher.class.getClassLoader();
}
// The extension API needs to be its own ClassLoader, along
// with a few of its dependencies. Try to construct this first.
List<URL> extapi = new ArrayList<>();
move(jars, "gerrit-extension-api-", extapi);
move(jars, "guice-", extapi);
move(jars, "javax.inject-1.jar", extapi);
move(jars, "aopalliance-1.0.jar", extapi);
move(jars, "guice-servlet-", extapi);
move(jars, "tomcat-servlet-api-", extapi);
ClassLoader parent = ClassLoader.getSystemClassLoader();
if (!extapi.isEmpty()) {
parent = URLClassLoader.newInstance(extapi.toArray(new URL[extapi.size()]), parent);
}
return URLClassLoader.newInstance(jars.values().toArray(new URL[jars.size()]), parent);
}
private static void extractJar(ZipFile zf, ZipEntry ze, SortedMap<String, URL> jars)
throws IOException {
File tmp = createTempFile(safeName(ze), ".jar");
try (OutputStream out = Files.newOutputStream(tmp.toPath());
InputStream in = zf.getInputStream(ze)) {
byte[] buf = new byte[4096];
int n;
while ((n = in.read(buf, 0, buf.length)) > 0) {
out.write(buf, 0, n);
}
}
String name = ze.getName();
jars.put(name.substring(name.lastIndexOf('/'), name.length()), tmp.toURI().toURL());
}
private static void move(SortedMap<String, URL> jars, String prefix, List<URL> extapi) {
SortedMap<String, URL> matches = jars.tailMap(prefix);
if (!matches.isEmpty()) {
String first = matches.firstKey();
if (first.startsWith(prefix)) {
extapi.add(jars.remove(first));
}
}
}
private static String safeName(ZipEntry ze) {
// Try to derive the name of the temporary file so it
// doesn't completely suck. Best if we can make it
// match the name it was in the archive.
//
String name = ze.getName();
if (name.contains("/")) {
name = name.substring(name.lastIndexOf('/') + 1);
}
if (name.contains(".")) {
name = name.substring(0, name.lastIndexOf('.'));
}
if (name.isEmpty()) {
name = "code";
}
return name;
}
private static volatile File myArchive;
private static volatile File myHome;
private static final Map<Path, FileSystem> zipFileSystems = new HashMap<>();
/**
* Locate the JAR/WAR file we were launched from.
*
* @return local path of the Gerrit WAR file.
* @throws FileNotFoundException if the code cannot guess the location.
*/
public static File getDistributionArchive() throws FileNotFoundException, IOException {
File result = myArchive;
if (result == null) {
synchronized (GerritLauncher.class) {
result = myArchive;
if (result != null) {
return result;
}
result = locateMyArchive();
myArchive = result;
}
}
return result;
}
public static synchronized FileSystem getZipFileSystem(Path zip) throws IOException {
// FileSystems canonicalizes the path, so we should too.
zip = zip.toRealPath();
FileSystem zipFs = zipFileSystems.get(zip);
if (zipFs == null) {
zipFs = newZipFileSystem(zip);
zipFileSystems.put(zip, zipFs);
}
return zipFs;
}
public static FileSystem newZipFileSystem(Path zip) throws IOException {
return FileSystems.newFileSystem(
URI.create("jar:" + zip.toUri()), Collections.<String, String>emptyMap());
}
private static File locateMyArchive() throws FileNotFoundException {
final ClassLoader myCL = GerritLauncher.class.getClassLoader();
final String myName = GerritLauncher.class.getName().replace('.', '/') + ".class";
final URL myClazz = myCL.getResource(myName);
if (myClazz == null) {
throw new FileNotFoundException("Cannot find JAR: no " + myName);
}
// ZipFile may have the path of our JAR hiding within itself.
//
try {
JarFile jar = ((JarURLConnection) myClazz.openConnection()).getJarFile();
File path = new File(jar.getName());
if (path.isFile()) {
return path;
}
} catch (Exception e) {
// Nope, that didn't work. Try a different method.
//
}
// Maybe this is a local class file, running under a debugger?
//
if ("file".equals(myClazz.getProtocol())) {
final File path = new File(myClazz.getPath());
if (path.isFile() && path.getParentFile().isDirectory()) {
throw new FileNotFoundException(NOT_ARCHIVED);
}
}
// The CodeSource might be able to give us the source as a stream.
// If so, copy it to a local file so we have random access to it.
//
final CodeSource src = GerritLauncher.class.getProtectionDomain().getCodeSource();
if (src != null) {
try (InputStream in = src.getLocation().openStream()) {
final File tmp = createTempFile("gerrit_", ".zip");
try (OutputStream out = Files.newOutputStream(tmp.toPath())) {
final byte[] buf = new byte[4096];
int n;
while ((n = in.read(buf, 0, buf.length)) > 0) {
out.write(buf, 0, n);
}
}
return tmp;
} catch (IOException e) {
// Nope, that didn't work.
//
}
}
throw new FileNotFoundException("Cannot find local copy of JAR");
}
private static boolean temporaryDirectoryFound;
private static File temporaryDirectory;
/**
* Creates a temporary file within the application's unpack location.
*
* <p>The launcher unpacks the nested JAR files into a temporary directory, allowing the classes
* to be loaded from local disk with standard Java APIs. This method constructs a new temporary
* file in the same directory.
*
* <p>The method first tries to create {@code prefix + suffix} within the directory under the
* assumption that a given {@code prefix + suffix} combination is made at most once per JVM
* execution. If this fails (e.g. the named file already exists) a mangled unique name is used and
* returned instead, with the unique string appearing between the prefix and suffix.
*
* <p>Files created by this method will be automatically deleted by the JVM when it terminates. If
* the returned file is converted into a directory by the caller, the caller must arrange for the
* contents to be deleted before the directory is.
*
* <p>If supported by the underlying operating system, the temporary directory which contains
* these temporary files is accessible only by the user running the JVM.
*
* @param prefix prefix of the file name.
* @param suffix suffix of the file name.
* @return the path of the temporary file. The returned object exists in the filesystem as a file;
* caller may need to delete and recreate as a directory if a directory was preferred.
* @throws IOException the file could not be created.
*/
public static synchronized File createTempFile(String prefix, String suffix) throws IOException {
if (!temporaryDirectoryFound) {
final File d = File.createTempFile("gerrit_", "_app", tmproot());
if (d.delete() && d.mkdir()) {
// Try to lock the directory down to be accessible by us.
// We first have to remove all permissions, then add back
// only the owner permissions.
//
d.setWritable(false, false /* all */);
d.setReadable(false, false /* all */);
d.setExecutable(false, false /* all */);
d.setWritable(true, true /* owner only */);
d.setReadable(true, true /* owner only */);
d.setExecutable(true, true /* owner only */);
d.deleteOnExit();
temporaryDirectory = d;
}
temporaryDirectoryFound = true;
}
if (temporaryDirectory != null) {
// If we have a private directory and this name has not yet
// been used within the private directory, create it as-is.
//
final File tmp = new File(temporaryDirectory, prefix + suffix);
if (tmp.createNewFile()) {
tmp.deleteOnExit();
return tmp;
}
}
if (!prefix.endsWith("_")) {
prefix += "_";
}
final File tmp = File.createTempFile(prefix, suffix, temporaryDirectory);
tmp.deleteOnExit();
return tmp;
}
/**
* Provide path to a working directory
*
* @return local path of the working directory or null if cannot be determined
*/
public static File getHomeDirectory() {
if (myHome == null) {
myHome = locateHomeDirectory();
}
return myHome;
}
private static File tmproot() {
File tmp;
String gerritTemp = System.getenv("GERRIT_TMP");
if (gerritTemp != null && gerritTemp.length() > 0) {
tmp = new File(gerritTemp);
} else {
tmp = new File(getHomeDirectory(), "tmp");
}
if (!tmp.exists() && !tmp.mkdirs()) {
System.err.println("warning: cannot create " + tmp.getAbsolutePath());
System.err.println("warning: using system temporary directory instead");
return null;
}
// Try to clean up any stale empty directories. Assume any empty
// directory that is older than 7 days is one of these dead ones
// that we can clean up.
//
final File[] tmpEntries = tmp.listFiles();
if (tmpEntries != null) {
final long now = System.currentTimeMillis();
final long expired = now - MILLISECONDS.convert(7, DAYS);
for (File tmpEntry : tmpEntries) {
if (tmpEntry.isDirectory() && tmpEntry.lastModified() < expired) {
final String[] all = tmpEntry.list();
if (all == null || all.length == 0) {
tmpEntry.delete();
}
}
}
}
try {
return tmp.getCanonicalFile();
} catch (IOException e) {
return tmp;
}
}
private static File locateHomeDirectory() {
// Try to find the user's home directory. If we can't find it
// return null so the JVM's default temporary directory is used
// instead. This is probably /tmp or /var/tmp.
//
String userHome = System.getProperty("user.home");
if (userHome == null || "".equals(userHome)) {
userHome = System.getenv("HOME");
if (userHome == null || "".equals(userHome)) {
System.err.println("warning: cannot determine home directory");
System.err.println("warning: using system temporary directory instead");
return null;
}
}
// Ensure the home directory exists. If it doesn't, try to make it.
//
final File home = new File(userHome);
if (!home.exists()) {
if (home.mkdirs()) {
System.err.println("warning: created " + home.getAbsolutePath());
} else {
System.err.println("warning: " + home.getAbsolutePath() + " not found");
System.err.println("warning: using system temporary directory instead");
return null;
}
}
// Use $HOME/.gerritcodereview/tmp for our temporary file area.
//
final File gerrithome = new File(home, ".gerritcodereview");
if (!gerrithome.exists() && !gerrithome.mkdirs()) {
System.err.println("warning: cannot create " + gerrithome.getAbsolutePath());
System.err.println("warning: using system temporary directory instead");
return null;
}
try {
return gerrithome.getCanonicalFile();
} catch (IOException e) {
return gerrithome;
}
}
/**
* Check whether the process is running in Eclipse.
*
* <p>Unlike {@link #getDeveloperEclipseOut()}, this method checks the actual runtime stack, not
* the classpath.
*
* @return true if any thread has a stack frame in {@code org.eclipse.jdt}.
*/
public static boolean isRunningInEclipse() {
return Thread.getAllStackTraces().values().stream()
.flatMap(Arrays::stream)
.anyMatch(e -> e.getClassName().startsWith("org.eclipse.jdt."));
}
/**
* Locate the path of the {@code eclipse-out} directory in a source tree.
*
* <p>Unlike {@link #isRunningInEclipse()}, this method only inspects files relative to the
* classpath, not the runtime stack.
*
* @return local path of the {@code eclipse-out} directory in a source tree.
* @throws FileNotFoundException if the directory cannot be found.
*/
public static Path getDeveloperEclipseOut() throws FileNotFoundException {
return resolveInSourceRoot("eclipse-out");
}
public static boolean isJdk9OrLater() {
return Double.parseDouble(System.getProperty("java.class.version")) >= 53.0;
}
public static String getJdkVersionPostJdk8() {
// 9.0.4 => 9
return System.getProperty("java.version").substring(0, 1);
}
public static Properties loadBuildProperties(Path propPath) throws IOException {
Properties properties = new Properties();
try (InputStream in = Files.newInputStream(propPath)) {
properties.load(in);
} catch (NoSuchFileException e) {
// Ignore; will be run from PATH, with a descriptive error if it fails.
}
return properties;
}
static final String SOURCE_ROOT_RESOURCE = "/com/google/gerrit/launcher/workspace-root.txt";
/**
* Locate a path in the source tree.
*
* @return local path of the {@code name} directory in a source tree.
* @throws FileNotFoundException if the directory cannot be found.
*/
public static Path resolveInSourceRoot(String name) throws FileNotFoundException {
// Find ourselves in the classpath, as a loose class file or jar.
Class<GerritLauncher> self = GerritLauncher.class;
Path dir;
String sourceRoot = System.getProperty("sourceRoot");
if (sourceRoot != null) {
dir = Paths.get(sourceRoot);
if (!Files.exists(dir)) {
throw new FileNotFoundException("source root not found: " + dir);
}
} else {
URL u = self.getResource(self.getSimpleName() + ".class");
if (u == null) {
throw new FileNotFoundException("Cannot find class " + self.getName());
} else if ("jar".equals(u.getProtocol())) {
String p = u.getPath();
try {
u = new URL(p.substring(0, p.indexOf('!')));
} catch (MalformedURLException e) {
FileNotFoundException fnfe = new FileNotFoundException("Not a valid jar file: " + u);
fnfe.initCause(e);
throw fnfe;
}
}
if (!"file".equals(u.getProtocol())) {
throw new FileNotFoundException("Cannot extract path from " + u);
}
// Pop up to the top-level source folder by looking for WORKSPACE.
dir = Paths.get(u.getPath());
while (!Files.isRegularFile(dir.resolve("WORKSPACE"))) {
Path parent = dir.getParent();
if (parent == null) {
throw new FileNotFoundException("Cannot find source root from " + u);
}
dir = parent;
}
}
Path ret = dir.resolve(name);
if (!Files.exists(ret)) {
throw new FileNotFoundException(name + " not found in source root " + dir);
}
return ret;
}
private static ClassLoader useDevClasspath() throws IOException {
Path out = getDeveloperEclipseOut();
List<URL> dirs = new ArrayList<>();
dirs.add(out.resolve("classes").toUri().toURL());
ClassLoader cl = GerritLauncher.class.getClassLoader();
if (isJdk9OrLater()) {
Path rootPath = resolveInSourceRoot(".").normalize();
Properties properties = loadBuildProperties(rootPath.resolve(".bazel_path"));
Path outputBase = Paths.get(properties.getProperty("output_base"));
Path runtimeClasspath =
rootPath.resolve("bazel-bin/tools/eclipse/main_classpath_collect.runtime_classpath");
for (String f : Files.readAllLines(runtimeClasspath, UTF_8)) {
URL url;
if (f.startsWith("external")) {
url = outputBase.resolve(f).toUri().toURL();
} else {
url = rootPath.resolve(f).toUri().toURL();
}
if (includeJar(url)) {
dirs.add(url);
}
}
} else {
for (URL u : ((URLClassLoader) cl).getURLs()) {
if (includeJar(u)) {
dirs.add(u);
}
}
}
return URLClassLoader.newInstance(
dirs.toArray(new URL[dirs.size()]), ClassLoader.getSystemClassLoader().getParent());
}
private static boolean includeJar(URL u) {
String path = u.getPath();
return path.endsWith(".jar")
&& !path.endsWith("-src.jar")
&& !path.contains("/com/google/gerrit");
}
private GerritLauncher() {}
}
|
|
// Copyright (c) 2013 Turbulenz Limited
// See LICENSE for full license text.
package com.turbulenz.turbulenz;
import java.util.ArrayList;
import org.json.JSONException;
import org.json.JSONObject;
import android.util.Log;
import android.app.Activity;
import android.app.PendingIntent;
import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.text.TextUtils;
import android.content.Context;
import android.content.ComponentName;
import android.content.ServiceConnection;
import android.content.Intent;
import android.content.IntentSender.SendIntentException;
import com.android.vending.billing.IInAppBillingService;
public class googlepayment extends payment.BillingAgent
{
// Logging
static private void _log(String msg)
{
Log.i("tzbilling(google)", msg);
}
static private void _print(String msg)
{
Log.i("tzbilling(google)", msg);
}
static private void _error(String msg)
{
Log.e("tzbilling(google)", msg);
}
// ------------------------------------------------------------------
//
// Billing response codes
public static final int BILLING_RESPONSE_RESULT_OK = 0;
public static final int BILLING_RESPONSE_RESULT_USER_CANCELED = 1;
public static final int BILLING_RESPONSE_RESULT_BILLING_UNAVAILABLE = 3;
public static final int BILLING_RESPONSE_RESULT_ITEM_UNAVAILABLE = 4;
public static final int BILLING_RESPONSE_RESULT_DEVELOPER_ERROR = 5;
public static final int BILLING_RESPONSE_RESULT_ERROR = 6;
public static final int BILLING_RESPONSE_RESULT_ITEM_ALREADY_OWNED = 7;
public static final int BILLING_RESPONSE_RESULT_ITEM_NOT_OWNED = 8;
// Keys for the responses from InAppBillingService
public static final String RESPONSE_CODE = "RESPONSE_CODE";
public static final String RESPONSE_GET_SKU_DETAILS_LIST = "DETAILS_LIST";
public static final String RESPONSE_BUY_INTENT = "BUY_INTENT";
public static final String RESPONSE_INAPP_PURCHASE_DATA =
"INAPP_PURCHASE_DATA";
public static final String RESPONSE_INAPP_SIGNATURE =
"INAPP_DATA_SIGNATURE";
public static final String RESPONSE_INAPP_ITEM_LIST =
"INAPP_PURCHASE_ITEM_LIST";
public static final String RESPONSE_INAPP_PURCHASE_DATA_LIST =
"INAPP_PURCHASE_DATA_LIST";
public static final String RESPONSE_INAPP_SIGNATURE_LIST =
"INAPP_DATA_SIGNATURE_LIST";
public static final String INAPP_CONTINUATION_TOKEN =
"INAPP_CONTINUATION_TOKEN";
public static final String ITEM_TYPE_INAPP = "inapp";
//
// ------------------------------------------------------------------
Activity mActivity = null;
int mPurchaseRequestCode;
ServiceConnection mServiceConnection = null;
IInAppBillingService mService = null;
boolean mReady = false;
// If not zero, indicates that a purchase is already in progress
long mPurchaseContext = 0;
public googlepayment(Activity activity, int purchaseRequestCode)
{
mActivity = activity;
mPurchaseRequestCode = purchaseRequestCode;
// Just listens for connection / disconnection
mServiceConnection = new ServiceConnection() {
@Override
public void onServiceDisconnected(ComponentName name)
{
_log("service disconnected :(");
mService = null;
mReady = false;
reportReady(false);
}
@Override
public void onServiceConnected(ComponentName name, IBinder service)
{
_log("service connected :)");
mService = IInAppBillingService.Stub.asInterface(service);
String packageName = mActivity.getPackageName();
_log("checking for billing.3 in " + packageName + "...");
try {
int response =
mService.isBillingSupported(3, packageName,
ITEM_TYPE_INAPP);
if (BILLING_RESPONSE_RESULT_OK == response) {
mReady = true;
} else {
_log("billing v3 not supported for this package");
}
} catch (RemoteException e) {
_error("remoteexception:");
e.printStackTrace();
}
reportReady(mReady);
}
};
_log("binding service ...");
Intent i = new Intent("com.android.vending.billing.InAppBillingService.BIND");
i.setPackage("com.android.vending");
boolean bound = activity.bindService(
i,
mServiceConnection,
Context.BIND_AUTO_CREATE);
_log("back from bindService: bound: " + Boolean.toString(bound));
}
//
public void shutdown()
{
_log("shutting down ...");
mReady = false;
if (null != mServiceConnection) {
_log("unbinding service");
mActivity.unbindService(mServiceConnection);
mServiceConnection = null;
mService = null;
_log("service unbound");
}
mActivity = null;
_log("done shutting down.");
}
// Workaround to bug where sometimes response codes come as Long
// instead of Integer
static int getResponseCodeFromBundle(Bundle b)
{
Object o = b.get(RESPONSE_CODE);
if (o == null) {
_log("response code is null, assuming OK"); // known issue
return BILLING_RESPONSE_RESULT_OK;
}
else if (o instanceof Integer) {
return ((Integer)o).intValue();
}
else if (o instanceof Long) {
return (int)((Long)o).longValue();
}
else {
_error("!! Unexpected type for bundle response code." +
o.getClass().getName());
throw new RuntimeException("Unexpected type for bundle response code: "
+ o.getClass().getName());
}
}
// Workaround to bug where sometimes response codes come as Long
// instead of Integer
static int getResponseCodeFromIntent(Intent i)
{
Object o = i.getExtras().get(RESPONSE_CODE);
if (o == null) {
_log("Intent with no response code, assuming OK (known issue)");
return BILLING_RESPONSE_RESULT_OK;
}
else if (o instanceof Integer) {
return ((Integer)o).intValue();
}
else if (o instanceof Long) {
return (int)((Long)o).longValue();
}
else {
_log("Unexpected type for intent response code.");
_log(o.getClass().getName());
throw new RuntimeException("Unexpected intent response code type: "
+ o.getClass().getName());
}
}
// ------------------------------------------------------------------
// doPurchase
// ------------------------------------------------------------------
//
protected boolean verifyPurchase(String data, String sig)
{
// A VERY BIG TODO:
// _error("verifyPurchase: !! NO CLIENT SIDE PURCHASE VERIFICATION !!");
return true;
}
// Return value indicates whether or not we handled the Intent,
// not whether the purchase succeeded.
public boolean handleActivityResult(int requestCode, int resultCode,
Intent data)
{
_log("handleActivityResult: requestCode: " + requestCode +
" resultCode: " + resultCode);
if (0 == mPurchaseContext) {
_error("handleActivityResult: no purchase context registered");
return true;
}
if (Activity.RESULT_CANCELED == resultCode) {
_log("handleActivityResult: cancelled");
sendPurchaseFailure(mPurchaseContext, null);
mPurchaseContext = 0;
return true;
}
if (Activity.RESULT_OK != resultCode) {
_log("onActivityResult: unknown result code");
sendPurchaseFailure(mPurchaseContext, "Unknown GooglePlay failure");
mPurchaseContext = 0;
return true;
}
_log("handleActivityResult: resultCode was OK");
int purchaseResponse = getResponseCodeFromIntent(data);
if (BILLING_RESPONSE_RESULT_OK != purchaseResponse) {
_log("onActivityResult: bad purchaseResponse: " + purchaseResponse);
sendPurchaseFailure(mPurchaseContext, "Purchase did not complete");
mPurchaseContext = 0;
return true;
}
String purchaseData = data.getStringExtra(RESPONSE_INAPP_PURCHASE_DATA);
String purchaseSig = data.getStringExtra(RESPONSE_INAPP_SIGNATURE);
String purchaseGoogleToken;
String purchaseDevPayload;
_log("onActivityResult: purchaseResponse: OK" +
", purchaseData: " + purchaseData +
", purchaseSig: " + purchaseSig);
if (null == purchaseData || null == purchaseSig) {
_log("onActivityResult: bad purchase data");
sendPurchaseFailure(mPurchaseContext, "bad purchase data");
mPurchaseContext = 0;
return true;
}
if (!verifyPurchase(purchaseData, purchaseSig)) {
_log("onActivityResult: invalid signature");
sendPurchaseFailure(mPurchaseContext, "invalid signature");
mPurchaseContext = 0;
return true;
}
// Extract the sku name from the purchase data
String sku;
String googleToken;
String devPayload;
try {
JSONObject o = new JSONObject(purchaseData);
sku = o.optString("productId");
googleToken = o.optString("token", o.optString("purchaseToken"));
devPayload = o.optString("developerPayload");
} catch(JSONException e) {
sendPurchaseFailure(mPurchaseContext,
"no sku data in GooglePlaye response");
mPurchaseContext = 0;
return true;
}
if (TextUtils.isEmpty(sku)) {
sendPurchaseFailure(mPurchaseContext, "sku name was empty");
mPurchaseContext = 0;
return true;
}
_log("onActivityResult: purchase succeeded");
sendPurchaseResult(mPurchaseContext, sku, purchaseData, googleToken,
devPayload, purchaseSig);
mPurchaseContext = 0;
return true;
}
//
void uiThreadDoPurchase(String sku, String extraData)
{
_log("uiThreadDoPurchase: sku: " + sku);
try {
Bundle buyIntentBundle =
mService.getBuyIntent(3, mActivity.getPackageName(),
sku, ITEM_TYPE_INAPP, extraData);
int response = getResponseCodeFromBundle(buyIntentBundle);
if (response != BILLING_RESPONSE_RESULT_OK) {
_log("uiThreadDoPurchase: Failed to create intent bundle, " +
"response: " + response);
sendPurchaseFailure(mPurchaseContext,
"failed to create Android buy Intent");
return;
}
PendingIntent pendingIntent =
buyIntentBundle.getParcelable(RESPONSE_BUY_INTENT);
_log("uiThreadDoPurchase: launching buy intent for sku: " + sku +
", with request code: " + mPurchaseRequestCode);
mActivity.startIntentSenderForResult
(pendingIntent.getIntentSender(),
mPurchaseRequestCode, new Intent(),
Integer.valueOf(0), // flagsMask
Integer.valueOf(0), // flagsValues
Integer.valueOf(0)); // extraFlags
}
catch (SendIntentException e) {
_error("uiThreadDoPurchase: SendIntentException");
e.printStackTrace();
sendPurchaseFailure(mPurchaseContext, "failed to send intent");
}
catch (RemoteException e) {
_error("uiThreadDoPurchase: RemoteException");
e.printStackTrace();
sendPurchaseFailure(mPurchaseContext, "RemoteException: " + e);
}
}
//
public boolean doPurchase(final String sku, final String devPayload,
long context)
{
_print("doPurchase: " + sku);
if (!mReady) {
_error("doPurchase: not ready. leaving.");
return false;
}
if (0 != mPurchaseContext) {
_error("doPurchase: !! purchase in progress (internal err)");
return false;
}
mPurchaseContext = context;
mActivity.runOnUiThread(new Runnable() {
@Override public void run() {
uiThreadDoPurchase(sku, devPayload);
}
});
return true;
}
// ------------------------------------------------------------------
// doQueryPurchases
// ------------------------------------------------------------------
void threadQueryPurchases(final long context)
{
String continueToken = null;
do {
Bundle ownedItems;
try {
ownedItems = mService.getPurchases
(3, mActivity.getPackageName(), ITEM_TYPE_INAPP,
continueToken);
} catch (RemoteException e) {
_error("threadQueryPurchases: remote exception: " + e);
e.printStackTrace();
sendPurchaseInfoError(context, "failed to communicate "
+ "with Google Play");
return;
}
int response = getResponseCodeFromBundle(ownedItems);
if (BILLING_RESPONSE_RESULT_OK != response) {
_error("doQueryPurchases: !! error retrieving purchased SKUs");
// TODO: Should we grab something fom saved data here?
sendPurchaseInfoError(context, "error getting purchase data");
return;
}
if (!ownedItems.containsKey(RESPONSE_INAPP_ITEM_LIST) ||
!ownedItems.containsKey(RESPONSE_INAPP_PURCHASE_DATA_LIST) ||
!ownedItems.containsKey(RESPONSE_INAPP_SIGNATURE_LIST)) {
_error("doQueryPurchases: !! missign fields in response");
sendPurchaseInfoError(context, "response missing some fields");
return;
}
ArrayList<String> ownedSkus =
ownedItems.getStringArrayList(RESPONSE_INAPP_ITEM_LIST);
ArrayList<String> purchaseData =
ownedItems.getStringArrayList(RESPONSE_INAPP_PURCHASE_DATA_LIST);
ArrayList<String> signatureData =
ownedItems.getStringArrayList(RESPONSE_INAPP_SIGNATURE_LIST);
final int numSKUs = purchaseData.size();
_print("doQueryPurchases: " + numSKUs + " SKUs:");
for (int itemIdx = 0 ; itemIdx < numSKUs ; ++itemIdx) {
final String sku = ownedSkus.get(itemIdx);
final String data = purchaseData.get(itemIdx);
final String sig = signatureData.get(itemIdx);
try {
JSONObject o = new JSONObject(data);
//o.optString("productId");
final String googleToken =
o.optString("token", o.optString("purchaseToken"));
final String devPayload = o.optString("developerPayload");
_print(" - " + sku);
_log(" - (data:" + data + ", sig: " + sig + ")");
sendPurchaseInfo(context, sku, data, googleToken,
devPayload, sig);
} catch(JSONException e) {
_error("threadQueryPurchases: bad JSON: " + data);
sendPurchaseInfoError(context, "error in purchase data");
return;
}
}
continueToken = ownedItems.getString(INAPP_CONTINUATION_TOKEN);
_log("doQueryPurchases: got continue token: " + continueToken);
} while(!TextUtils.isEmpty(continueToken));
sendPurchaseInfoTerminator(context);
}
// Call back to native code with the details of each purchase,
public boolean doQueryPurchases(final long context)
{
if (!mReady) {
_error("doQueryPurchases: not ready. leaving.");
return false;
}
_log("doQueryPurchases: ");
(new Thread(new Runnable() {
public void run() {
threadQueryPurchases(context);
}
})).start();
_log("doQueryPurchases: launched thread");
return true;
}
// ------------------------------------------------------------------
// doQueryProduct
// ------------------------------------------------------------------
void threadQueryProduct(final String sku, final long context)
{
ArrayList<String> skuList = new ArrayList<String>();
skuList.add(sku);
Bundle productQueryBundle = new Bundle();
productQueryBundle.putStringArrayList("ITEM_ID_LIST", skuList);
Bundle skuDetails;
try {
skuDetails = mService.getSkuDetails
(3, mActivity.getPackageName(), ITEM_TYPE_INAPP,
productQueryBundle);
} catch (RemoteException e) {
_error("threadQueryProduct: remote exception: " + e);
e.printStackTrace();
sendProductInfoError(context, sku);
return;
}
int response = getResponseCodeFromBundle(skuDetails);
if (BILLING_RESPONSE_RESULT_OK != response) {
_log("threadQueryProduct: bad response from getSkuDetails: " +
response);
sendProductInfoError(context, sku);
return;
}
if (!skuDetails.containsKey(RESPONSE_GET_SKU_DETAILS_LIST)) {
_log("threadQueryProduct: bundle doens't contain list");
sendProductInfoError(context, sku);
return;
}
ArrayList<String> responseList =
skuDetails.getStringArrayList(RESPONSE_GET_SKU_DETAILS_LIST);
if (1 != responseList.size()) {
_log("threadQueryProduct: repsonse list has unexpected length: " +
responseList.size());
sendProductInfoError(context, sku);
return;
}
String responseString = responseList.get(0);
try {
JSONObject o = new JSONObject(responseString);
final String _sku = o.getString("productId");
final String title = o.getString("title");
final String description = o.getString("description");
// TODO: something with price
final String price = o.getString("price");
// TOOD: check _sku == sku
sendProductInfo(context, sku, title, description, price);
} catch(JSONException e) {
_error("threadQueryProduct: failed parsing JSON");
sendProductInfoError(context, sku);
}
}
public boolean doQueryProduct(final String sku, final long context)
{
if (!mReady) {
_log("doQueryProduct: no ready");
return false;
}
_log("doQueryProduct: " + sku);
(new Thread(new Runnable() {
public void run() {
threadQueryProduct(sku, context);
}
})).start();
_log("doQueryProduct: launched thread");
return true;
}
// ------------------------------------------------------------------
// doConsume
// ------------------------------------------------------------------
// TODO: Make this async?
// Consume a sku
public boolean doConsume(final String token)
{
if (!mReady) {
_error("doConsume: !! not ready. leaving.");
return false;
}
if (null == token || token.equals("")) {
_error("doConsume: !! null or empty token");
return false;
}
_print("doConsume: token: " + token);
try {
int response =
mService.consumePurchase(3, mActivity.getPackageName(), token);
if (BILLING_RESPONSE_RESULT_OK == response) {
_log("doConsume: successfully consumed");
return true;
} else {
_error("doConsume: !! failed to consume. response: " + response);
}
} catch (RemoteException e) {
_error("doConsume: !! exception " + e.toString());
}
return false;
}
}
|
|
package com.orbital.lead.controller.RecyclerViewAdapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.ImageView;
import android.widget.TableRow;
import android.widget.TextView;
import android.widget.ViewAnimator;
import com.gc.materialdesign.views.CheckBox;
import com.orbital.lead.Parser.Parser;
import com.orbital.lead.R;
import com.orbital.lead.logic.CustomLogging;
import com.orbital.lead.logic.Logic;
import com.orbital.lead.model.EnumDialogEditJournalType;
import com.orbital.lead.model.Tag;
import com.orbital.lead.model.TagList;
/**
* Created by joseph on 16/6/2015.
*/
public class RecyclerTagListAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder>{
//private static final int VIEW_TYPE_HEADER = 0;
//private static final int VIEW_TYPE_ITEM = 1;
private final String TAG = this.getClass().getSimpleName();
private OnItemClickListener mItemClickListener;
private Parser mParser;
private CustomLogging mLogging;
private Logic mLogic;
private Context mContext;
private TagList mTagList;
private Animation inAnim;
private Animation outAnim;
public class HeaderViewHolder extends RecyclerView.ViewHolder {
public HeaderViewHolder(View v) {
super(v);
}
}
public class ListContentHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
private com.gc.materialdesign.views.CheckBox mCheckBox;
private TableRow mTableRowTag;
private TextView mTextTag;
private ImageView mImageOptions;
//private ProgressBar mLoadingSpinner;
private ViewAnimator mAnimator;
private int pos;
private boolean isChecked;
@Override
public void onClick(View v) {
if(mItemClickListener != null){
mItemClickListener.onItemClick(v, getAdapterPosition());
}
}
public ListContentHolder(View v){
super(v);
this.initTableRowTag(v);
this.initCheckBox(v);
this.initTextTag(v);
this.initImageOption(v);
}
public String getTextTag() {
return this.mTextTag.getText().toString();
}
public void setTextTag(String val){
this.mTextTag.setText(val);
}
public void setTagChecked(final boolean checked){
//this.mCheckBox.setChecked(checked);
this.mCheckBox.post(new Runnable() {
@Override
public void run() {
mCheckBox.setChecked(checked);
isChecked = checked;
}
});
}
public void setPosition(int pos) {
this.pos = pos;
}
private void initTableRowTag(View v){
this.mTableRowTag = (TableRow) v.findViewById(R.id.tableRowTag);
}
private void initCheckBox(View v){
this.mCheckBox = (com.gc.materialdesign.views.CheckBox) v.findViewById(R.id.checkBox);
this.mCheckBox.setOncheckListener(new CheckBox.OnCheckListener() {
@Override
public void onCheck(CheckBox checkBox, boolean b) {
isChecked = b;
mLogging.debug(TAG, "mCheckBox onCheck Set " + pos + " to " + b);
setCurrentTagCheckedStatus(pos, b);
}
});
}
private void initTextTag(View v){
this.mTextTag = (TextView) v.findViewById(R.id.text_tag);
this.mTextTag.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
setTagChecked(!isChecked);
mLogging.debug(TAG, "mTextTag onClick Set " + pos + " to " + !isChecked);
setCurrentTagCheckedStatus(pos, !isChecked);
}
});
}
private void initImageOption(View v){
this.mImageOptions = (ImageView) v.findViewById(R.id.image_more_options);
this.mImageOptions.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mLogic.showJournalPopUpMenu(mContext, v, EnumDialogEditJournalType.EDIT_TAG, getTagList(), null, pos);
}
});
}
private void initViewAnimator(View v){
this.mAnimator = (ViewAnimator) v.findViewById(R.id.animator);
this.mAnimator.setInAnimation(inAnim);
this.mAnimator.setOutAnimation(outAnim);
}
}
//MainActivity activity, View headerView,
public RecyclerTagListAdapter(TagList tagList){
this.initLogging();
this.initLogic();
this.initParser();
//this.setHeaderView(headerView);
this.setTagList(tagList);
//.initCompliedTagList();
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
this.mContext = parent.getContext();
this.initAnimation();
//if(viewType == VIEW_TYPE_HEADER){
// return new HeaderViewHolder(mHeaderView);
//}else{
View v = LayoutInflater.from(this.mContext).inflate(R.layout.recycler_list_row_tag_layout, parent, false);
// set the view's size, margins, paddings and layout parameters
ListContentHolder vh = new ListContentHolder(v);
return vh;
//}
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
if(holder instanceof ListContentHolder){
//position = position - 1;
if(this.getTagList() != null){
Tag mTag = this.getTagList().getList().get(position);
((ListContentHolder) holder).setTextTag(mTag.getName());
((ListContentHolder) holder).setTagChecked(mTag.getIsChecked());
((ListContentHolder) holder).setPosition(position);
}
}
}
@Override
public int getItemCount() {
/*
int count = 0;
if(mHeaderView != null){
count++;
}
return count;
*/
if(this.mTagList != null){
return this.mTagList.size();
}
return 0;
}
@Override
public int getItemViewType(int position) {
//return (position == 0) ? VIEW_TYPE_HEADER : VIEW_TYPE_ITEM;
return 0;
}
public interface OnItemClickListener {
public void onItemClick(View view, int position);
}
public void setOnItemClickListener(final OnItemClickListener listener){
this.mItemClickListener = listener;
}
private void initParser(){
this.mParser = Parser.getInstance();
}
private void initLogging(){
this.mLogging = CustomLogging.getInstance();
}
private void initLogic() {
this.mLogic = Logic.getInstance();
}
/*
private void setMainActivity(MainActivity activity){
this.activity = activity;
}
*/
/*
private void setHeaderView(View v){
this.mHeaderView = v;
}
*/
/*
private void initCompliedTagList() {
this.mTagList = new TagList();
// all currently used tags compile together with the unused tags
//mLogging.debug(TAG, "================== copy all Tag List into hashmap ==================");
for(Tag tag : this.getUsedTagList().getList()){
this.getRecentTagMap().setValue(tag.getName(), tag.getIsChecked());
//mLogging.debug(TAG, tag.getName() + " => " + tag.getIsChecked());
}
// display all tags based on the compiled tag map
this.mTagList.setList(this.getRecentTagMap().getTagList().getList());
}
*/
private void setTagList(TagList list){
this.mTagList = list;
}
private void setCurrentTagCheckedStatus(int position, boolean val) {
this.getTagList().getList().get(position).setIsChecked(val);
}
private Parser getParser(){
return this.mParser;
}
private Context getContext(){
return this.mContext;
}
public TagList getTagList() {
return this.mTagList;
}
private void initAnimation(){
this.inAnim = AnimationUtils.loadAnimation(getContext(), android.R.anim.fade_in);
this.outAnim = AnimationUtils.loadAnimation(getContext(),android.R.anim.fade_out);
}
}
|
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CreativeTemplatePage.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202105;
/**
* Captures a page of {@link CreativeTemplate} objects.
*/
public class CreativeTemplatePage implements java.io.Serializable , Iterable<com.google.api.ads.admanager.axis.v202105.CreativeTemplate>{
/* The size of the total result set to which this page belongs. */
private java.lang.Integer totalResultSetSize;
/* The absolute index in the total result set on which this page
* begins. */
private java.lang.Integer startIndex;
/* The collection of creative templates contained within this
* page. */
private com.google.api.ads.admanager.axis.v202105.CreativeTemplate[] results;
public CreativeTemplatePage() {
}
public CreativeTemplatePage(
java.lang.Integer totalResultSetSize,
java.lang.Integer startIndex,
com.google.api.ads.admanager.axis.v202105.CreativeTemplate[] results) {
this.totalResultSetSize = totalResultSetSize;
this.startIndex = startIndex;
this.results = results;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
// Only include length of results to avoid overly verbose output
.add("results.length", getResults() == null ? 0 : getResults().length)
.add("startIndex", getStartIndex())
.add("totalResultSetSize", getTotalResultSetSize())
.toString();
}
/**
* Gets the totalResultSetSize value for this CreativeTemplatePage.
*
* @return totalResultSetSize * The size of the total result set to which this page belongs.
*/
public java.lang.Integer getTotalResultSetSize() {
return totalResultSetSize;
}
/**
* Sets the totalResultSetSize value for this CreativeTemplatePage.
*
* @param totalResultSetSize * The size of the total result set to which this page belongs.
*/
public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) {
this.totalResultSetSize = totalResultSetSize;
}
/**
* Gets the startIndex value for this CreativeTemplatePage.
*
* @return startIndex * The absolute index in the total result set on which this page
* begins.
*/
public java.lang.Integer getStartIndex() {
return startIndex;
}
/**
* Sets the startIndex value for this CreativeTemplatePage.
*
* @param startIndex * The absolute index in the total result set on which this page
* begins.
*/
public void setStartIndex(java.lang.Integer startIndex) {
this.startIndex = startIndex;
}
/**
* Gets the results value for this CreativeTemplatePage.
*
* @return results * The collection of creative templates contained within this
* page.
*/
public com.google.api.ads.admanager.axis.v202105.CreativeTemplate[] getResults() {
return results;
}
/**
* Sets the results value for this CreativeTemplatePage.
*
* @param results * The collection of creative templates contained within this
* page.
*/
public void setResults(com.google.api.ads.admanager.axis.v202105.CreativeTemplate[] results) {
this.results = results;
}
public com.google.api.ads.admanager.axis.v202105.CreativeTemplate getResults(int i) {
return this.results[i];
}
public void setResults(int i, com.google.api.ads.admanager.axis.v202105.CreativeTemplate _value) {
this.results[i] = _value;
}
/**
* Returns an iterator over this page's {@code results} that:
* <ul>
* <li>Will not be {@code null}.</li>
* <li>Will not support {@link java.util.Iterator#remove()}.</li>
* </ul>
*
* @return a non-null iterator.
*/
@Override
public java.util.Iterator<com.google.api.ads.admanager.axis.v202105.CreativeTemplate> iterator() {
if (results == null) {
return java.util.Collections.<com.google.api.ads.admanager.axis.v202105.CreativeTemplate>emptyIterator();
}
return java.util.Arrays.<com.google.api.ads.admanager.axis.v202105.CreativeTemplate>asList(results).iterator();
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof CreativeTemplatePage)) return false;
CreativeTemplatePage other = (CreativeTemplatePage) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) ||
(this.totalResultSetSize!=null &&
this.totalResultSetSize.equals(other.getTotalResultSetSize()))) &&
((this.startIndex==null && other.getStartIndex()==null) ||
(this.startIndex!=null &&
this.startIndex.equals(other.getStartIndex()))) &&
((this.results==null && other.getResults()==null) ||
(this.results!=null &&
java.util.Arrays.equals(this.results, other.getResults())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getTotalResultSetSize() != null) {
_hashCode += getTotalResultSetSize().hashCode();
}
if (getStartIndex() != null) {
_hashCode += getStartIndex().hashCode();
}
if (getResults() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getResults());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CreativeTemplatePage.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "CreativeTemplatePage"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("totalResultSetSize");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "totalResultSetSize"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startIndex");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "startIndex"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("results");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "results"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "CreativeTemplate"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
|
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.config;
import org.apache.openejb.OpenEJBException;
import org.apache.openejb.assembler.classic.CallbackInfo;
import org.apache.openejb.assembler.classic.CmrFieldInfo;
import org.apache.openejb.assembler.classic.EjbJarInfo;
import org.apache.openejb.assembler.classic.EnterpriseBeanInfo;
import org.apache.openejb.assembler.classic.EntityBeanInfo;
import org.apache.openejb.assembler.classic.InitMethodInfo;
import org.apache.openejb.assembler.classic.InterceptorBindingInfo;
import org.apache.openejb.assembler.classic.InterceptorInfo;
import org.apache.openejb.assembler.classic.MessageDrivenBeanInfo;
import org.apache.openejb.assembler.classic.MethodInfo;
import org.apache.openejb.assembler.classic.MethodPermissionInfo;
import org.apache.openejb.assembler.classic.MethodTransactionInfo;
import org.apache.openejb.assembler.classic.NamedMethodInfo;
import org.apache.openejb.assembler.classic.QueryInfo;
import org.apache.openejb.assembler.classic.RemoveMethodInfo;
import org.apache.openejb.assembler.classic.SecurityRoleInfo;
import org.apache.openejb.assembler.classic.SecurityRoleReferenceInfo;
import org.apache.openejb.assembler.classic.StatefulBeanInfo;
import org.apache.openejb.assembler.classic.StatelessBeanInfo;
import org.apache.openejb.assembler.classic.ApplicationExceptionInfo;
import org.apache.openejb.jee.ActivationConfig;
import org.apache.openejb.jee.ActivationConfigProperty;
import org.apache.openejb.jee.CallbackMethod;
import org.apache.openejb.jee.CmpField;
import org.apache.openejb.jee.CmpVersion;
import org.apache.openejb.jee.ContainerTransaction;
import org.apache.openejb.jee.EjbRelation;
import org.apache.openejb.jee.EjbRelationshipRole;
import org.apache.openejb.jee.EnterpriseBean;
import org.apache.openejb.jee.EntityBean;
import org.apache.openejb.jee.Icon;
import org.apache.openejb.jee.InitMethod;
import org.apache.openejb.jee.Interceptor;
import org.apache.openejb.jee.InterceptorBinding;
import org.apache.openejb.jee.JndiConsumer;
import org.apache.openejb.jee.MessageDrivenBean;
import org.apache.openejb.jee.Method;
import org.apache.openejb.jee.MethodParams;
import org.apache.openejb.jee.MethodPermission;
import org.apache.openejb.jee.Multiplicity;
import org.apache.openejb.jee.NamedMethod;
import org.apache.openejb.jee.PersistenceType;
import org.apache.openejb.jee.Query;
import org.apache.openejb.jee.QueryMethod;
import org.apache.openejb.jee.RemoteBean;
import org.apache.openejb.jee.RemoveMethod;
import org.apache.openejb.jee.SecurityRole;
import org.apache.openejb.jee.SecurityRoleRef;
import org.apache.openejb.jee.SessionBean;
import org.apache.openejb.jee.SessionType;
import org.apache.openejb.jee.TransactionType;
import org.apache.openejb.jee.ExcludeList;
import org.apache.openejb.jee.ResultTypeMapping;
import org.apache.openejb.jee.ApplicationException;
import org.apache.openejb.jee.oejb3.EjbDeployment;
import org.apache.openejb.jee.oejb3.ResourceLink;
import org.apache.openejb.loader.SystemInstance;
import org.apache.openejb.util.LogCategory;
import org.apache.openejb.util.Logger;
import org.apache.openejb.util.Messages;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Collection;
/**
* @version $Revision$ $Date$
*/
public class EjbJarInfoBuilder {
public static Messages messages = new Messages("org.apache.openejb.util.resources");
public static Logger logger = Logger.getInstance(LogCategory.OPENEJB, "org.apache.openejb.util.resources");
private final List<String> deploymentIds = new ArrayList<String>();
private final List<String> securityRoles = new ArrayList<String>();
public EjbJarInfo buildInfo(EjbModule jar) throws OpenEJBException {
deploymentIds.clear();
securityRoles.clear();
int beansDeployed = jar.getOpenejbJar().getEjbDeploymentCount();
int beansInEjbJar = jar.getEjbJar().getEnterpriseBeans().length;
if (beansInEjbJar != beansDeployed) {
Map<String, EjbDeployment> deployed = jar.getOpenejbJar().getDeploymentsByEjbName();
for (EnterpriseBean bean : jar.getEjbJar().getEnterpriseBeans()) {
if (!deployed.containsKey(bean.getEjbName())){
ConfigUtils.logger.warning("conf.0018", bean.getEjbName(), jar.getJarLocation());
}
}
String message = messages.format("conf.0008", jar.getJarLocation(), "" + beansInEjbJar, "" + beansDeployed);
logger.warning(message);
throw new OpenEJBException(message);
}
Map<String, EjbDeployment> ejbds = jar.getOpenejbJar().getDeploymentsByEjbName();
Map<String, EnterpriseBeanInfo> infos = new HashMap<String, EnterpriseBeanInfo>();
Map<String, EnterpriseBean> items = new HashMap<String, EnterpriseBean>();
EjbJarInfo ejbJar = new EjbJarInfo();
ejbJar.jarPath = jar.getJarLocation();
ejbJar.moduleId = jar.getModuleId();
if (ejbJar.moduleId == null) {
ejbJar.moduleId = new File(ejbJar.jarPath).getName().replaceFirst(".jar$","");
}
ejbJar.properties.putAll(jar.getOpenejbJar().getProperties());
for (EnterpriseBean bean : jar.getEjbJar().getEnterpriseBeans()) {
EnterpriseBeanInfo beanInfo;
if (bean instanceof org.apache.openejb.jee.SessionBean) {
beanInfo = initSessionBean((SessionBean) bean, ejbds);
} else if (bean instanceof org.apache.openejb.jee.EntityBean) {
beanInfo = initEntityBean((EntityBean) bean, ejbds);
} else if (bean instanceof org.apache.openejb.jee.MessageDrivenBean) {
beanInfo = initMessageBean((MessageDrivenBean) bean, ejbds);
} else {
throw new OpenEJBException("Unknown bean type: "+bean.getClass().getName());
}
ejbJar.enterpriseBeans.add(beanInfo);
if (deploymentIds.contains(beanInfo.ejbDeploymentId)) {
String message = messages.format("conf.0100", beanInfo.ejbDeploymentId, jar.getJarLocation(), beanInfo.ejbName);
logger.warning(message);
throw new OpenEJBException(message);
}
deploymentIds.add(beanInfo.ejbDeploymentId);
beanInfo.codebase = jar.getJarLocation();
infos.put(beanInfo.ejbName, beanInfo);
items.put(beanInfo.ejbName, bean);
if (bean.getSecurityIdentity() != null) {
beanInfo.runAs = bean.getSecurityIdentity().getRunAs();
}
}
if (jar.getEjbJar().getAssemblyDescriptor() != null) {
initInterceptors(jar, ejbJar, infos);
initSecurityRoles(jar, ejbJar);
initMethodPermissions(jar, ejbds, ejbJar);
initExcludesList(jar, ejbds, ejbJar);
initMethodTransactions(jar, ejbds, ejbJar);
initApplicationExceptions(jar, ejbJar);
for (EnterpriseBeanInfo bean : ejbJar.enterpriseBeans) {
resolveRoleLinks(jar, bean, items.get(bean.ejbName));
}
}
if (jar.getEjbJar().getRelationships() != null) {
initRelationships(jar, infos);
}
if (!"tomcat-webapp".equals(SystemInstance.get().getProperty("openejb.loader"))) {
// try {
// File jarFile = new File(jar.getJarURI());
//
// SystemInstance.get().getClassPath().addJarToPath(jarFile.toURL());
// } catch (Exception e) {
// e.printStackTrace();
// }
}
return ejbJar;
}
private void initRelationships(EjbModule jar, Map<String, EnterpriseBeanInfo> infos) throws OpenEJBException {
for (EjbRelation ejbRelation : jar.getEjbJar().getRelationships().getEjbRelation()) {
Iterator<EjbRelationshipRole> iterator = ejbRelation.getEjbRelationshipRole().iterator();
EjbRelationshipRole left = iterator.next();
EjbRelationshipRole right = iterator.next();
// left role info
CmrFieldInfo leftCmrFieldInfo = initRelationshipRole(left, right, infos);
CmrFieldInfo rightCmrFieldInfo = initRelationshipRole(right, left, infos);
leftCmrFieldInfo.mappedBy = rightCmrFieldInfo;
rightCmrFieldInfo.mappedBy = leftCmrFieldInfo;
}
}
private CmrFieldInfo initRelationshipRole(EjbRelationshipRole role, EjbRelationshipRole relatedRole, Map<String, EnterpriseBeanInfo> infos) throws OpenEJBException {
CmrFieldInfo cmrFieldInfo = new CmrFieldInfo();
// find the entityBeanInfo info for this role
String ejbName = role.getRelationshipRoleSource().getEjbName();
EnterpriseBeanInfo enterpriseBeanInfo = infos.get(ejbName);
if (enterpriseBeanInfo == null) {
throw new OpenEJBException("Relation role source ejb not found " + ejbName);
}
if (!(enterpriseBeanInfo instanceof EntityBeanInfo)) {
throw new OpenEJBException("Relation role source ejb is not an entity bean " + ejbName);
}
EntityBeanInfo entityBeanInfo = (EntityBeanInfo) enterpriseBeanInfo;
cmrFieldInfo.roleSource = entityBeanInfo;
// RoleName: this may be null
cmrFieldInfo.roleName = role.getEjbRelationshipRoleName();
cmrFieldInfo.synthetic = role.getCmrField() == null;
// CmrFieldName: is null for uni-directional relationships
if (role.getCmrField() != null) {
cmrFieldInfo.fieldName = role.getCmrField().getCmrFieldName();
// CollectionType: java.util.Collection or java.util.Set
if (role.getCmrField().getCmrFieldType() != null) {
cmrFieldInfo.fieldType = role.getCmrField().getCmrFieldType().toString();
}
} else {
String relatedEjbName = relatedRole.getRelationshipRoleSource().getEjbName();
EnterpriseBeanInfo relatedEjb = infos.get(relatedEjbName);
if (relatedEjb == null) {
throw new OpenEJBException("Relation role source ejb not found " + relatedEjbName);
}
if (!(relatedEjb instanceof EntityBeanInfo)) {
throw new OpenEJBException("Relation role source ejb is not an entity bean " + relatedEjbName);
}
EntityBeanInfo relatedEntity = (EntityBeanInfo) relatedEjb;
relatedRole.getRelationshipRoleSource();
cmrFieldInfo.fieldName = relatedEntity.abstractSchemaName + "_" + relatedRole.getCmrField().getCmrFieldName();
if (relatedRole.getMultiplicity() == Multiplicity.MANY) {
cmrFieldInfo.fieldType = Collection.class.getName();
}
}
// CascadeDelete
cmrFieldInfo.cascadeDelete = role.getCascadeDelete();
// Multiplicity: one or many
cmrFieldInfo.many = role.getMultiplicity() == Multiplicity.MANY;
// add the field to the entityBean
entityBeanInfo.cmrFields.add(cmrFieldInfo);
return cmrFieldInfo;
}
private void initInterceptors(EjbModule jar, EjbJarInfo ejbJar, Map<String, EnterpriseBeanInfo> beanInfos) throws OpenEJBException {
if (jar.getEjbJar().getInterceptors().length == 0) return;
if (jar.getEjbJar().getAssemblyDescriptor() == null) return;
if (jar.getEjbJar().getAssemblyDescriptor().getInterceptorBinding() == null) return;
for (Interceptor s : jar.getEjbJar().getInterceptors()) {
InterceptorInfo info = new InterceptorInfo();
info.clazz = s.getInterceptorClass();
copyCallbacks(s.getAroundInvoke(), info.aroundInvoke);
copyCallbacks(s.getPostConstruct(), info.postConstruct);
copyCallbacks(s.getPreDestroy(), info.preDestroy);
copyCallbacks(s.getPostActivate(), info.postActivate);
copyCallbacks(s.getPrePassivate(), info.prePassivate);
ejbJar.interceptors.add(info);
}
for (InterceptorBinding binding : jar.getEjbJar().getAssemblyDescriptor().getInterceptorBinding()) {
InterceptorBindingInfo info = new InterceptorBindingInfo();
info.ejbName = binding.getEjbName();
info.excludeClassInterceptors = binding.getExcludeClassInterceptors();
info.excludeDefaultInterceptors = binding.getExcludeDefaultInterceptors();
info.interceptors.addAll(binding.getInterceptorClass());
if (binding.getInterceptorOrder() != null) {
info.interceptorOrder.addAll(binding.getInterceptorOrder().getInterceptorClass());
}
info.method = toInfo(binding.getMethod());
ejbJar.interceptorBindings.add(info);
}
}
private void initMethodTransactions(EjbModule jar, Map ejbds, EjbJarInfo ejbJarInfo) {
List<ContainerTransaction> containerTransactions = jar.getEjbJar().getAssemblyDescriptor().getContainerTransaction();
for (ContainerTransaction cTx : containerTransactions) {
MethodTransactionInfo info = new MethodTransactionInfo();
info.description = cTx.getDescription();
info.transAttribute = cTx.getTransAttribute().toString();
info.methods.addAll(getMethodInfos(cTx.getMethod(), ejbds));
ejbJarInfo.methodTransactions.add(info);
}
}
private void initApplicationExceptions(EjbModule jar, EjbJarInfo ejbJarInfo) {
for (ApplicationException applicationException : jar.getEjbJar().getAssemblyDescriptor().getApplicationException()) {
ApplicationExceptionInfo info = new ApplicationExceptionInfo();
info.exceptionClass = applicationException.getExceptionClass();
info.rollback = applicationException.getRollback();
ejbJarInfo.applicationException.add(info);
}
}
private void initSecurityRoles(EjbModule jar, EjbJarInfo ejbJarInfo) {
List<SecurityRole> roles = jar.getEjbJar().getAssemblyDescriptor().getSecurityRole();
for (SecurityRole sr : roles) {
SecurityRoleInfo info = new SecurityRoleInfo();
info.description = sr.getDescription();
info.roleName = sr.getRoleName();
if (securityRoles.contains(sr.getRoleName())) {
ConfigUtils.logger.warning("conf.0102", jar.getJarLocation(), sr.getRoleName());
} else {
securityRoles.add(sr.getRoleName());
}
ejbJarInfo.securityRoles.add(info);
}
}
private void initMethodPermissions(EjbModule jar, Map ejbds, EjbJarInfo ejbJarInfo) {
List<MethodPermission> methodPermissions = jar.getEjbJar().getAssemblyDescriptor().getMethodPermission();
for (MethodPermission mp : methodPermissions) {
MethodPermissionInfo info = new MethodPermissionInfo();
info.description = mp.getDescription();
info.roleNames.addAll(mp.getRoleName());
info.methods.addAll(getMethodInfos(mp.getMethod(), ejbds));
info.unchecked = mp.getUnchecked();
ejbJarInfo.methodPermissions.add(info);
}
}
private void initExcludesList(EjbModule jar, Map ejbds, EjbJarInfo ejbJarInfo) {
ExcludeList methodPermissions = jar.getEjbJar().getAssemblyDescriptor().getExcludeList();
for (Method excludedMethod : methodPermissions.getMethod()) {
ejbJarInfo.excludeList.add(getMethodInfo(excludedMethod, ejbds));
}
}
private void resolveRoleLinks(EjbModule jar, EnterpriseBeanInfo bean, JndiConsumer item) {
if (!(item instanceof RemoteBean)) {
return;
}
RemoteBean rb = (RemoteBean) item;
List<SecurityRoleRef> refs = rb.getSecurityRoleRef();
for (SecurityRoleRef ref : refs) {
SecurityRoleReferenceInfo info = new SecurityRoleReferenceInfo();
info.description = ref.getDescription();
info.roleLink = ref.getRoleLink();
info.roleName = ref.getRoleName();
if (info.roleLink == null) {
info.roleLink = info.roleName;
}
bean.securityRoleReferences.add(info);
}
}
private List<MethodInfo> getMethodInfos(List<Method> ms, Map ejbds) {
if (ms == null) return Collections.emptyList();
List<MethodInfo> mi = new ArrayList<MethodInfo>(ms.size());
for (Method method : ms) {
MethodInfo methodInfo = getMethodInfo(method, ejbds);
mi.add(methodInfo);
}
return mi;
}
private MethodInfo getMethodInfo(Method method, Map ejbds) {
MethodInfo methodInfo = new MethodInfo();
EjbDeployment d = (EjbDeployment) ejbds.get(method.getEjbName());
methodInfo.description = method.getDescription();
methodInfo.ejbDeploymentId = d.getDeploymentId();
methodInfo.ejbName = method.getEjbName();
methodInfo.methodIntf = (method.getMethodIntf() == null) ? null : method.getMethodIntf().toString();
methodInfo.methodName = method.getMethodName();
MethodParams mp = method.getMethodParams();
if (mp != null) {
methodInfo.methodParams = mp.getMethodParam();
}
return methodInfo;
}
private EnterpriseBeanInfo initSessionBean(SessionBean s, Map m) throws OpenEJBException {
EnterpriseBeanInfo bean = null;
if (s.getSessionType() == SessionType.STATEFUL) {
bean = new StatefulBeanInfo();
StatefulBeanInfo stateful = ((StatefulBeanInfo) bean);
copyCallbacks(s.getPostActivate(), stateful.postActivate);
copyCallbacks(s.getPrePassivate(), stateful.prePassivate);
for (InitMethod initMethod : s.getInitMethod()) {
InitMethodInfo init = new InitMethodInfo();
init.beanMethod = toInfo(initMethod.getBeanMethod());
init.createMethod = toInfo(initMethod.getCreateMethod());
stateful.initMethods.add(init);
}
for (RemoveMethod removeMethod : s.getRemoveMethod()) {
RemoveMethodInfo remove = new RemoveMethodInfo();
remove.beanMethod = toInfo(removeMethod.getBeanMethod());
remove.retainIfException = removeMethod.getRetainIfException();
stateful.removeMethods.add(remove);
}
} else {
bean = new StatelessBeanInfo();
}
bean.timeoutMethod = toInfo(s.getTimeoutMethod());
copyCallbacks(s.getAroundInvoke(), bean.aroundInvoke);
copyCallbacks(s.getPostConstruct(), bean.postConstruct);
copyCallbacks(s.getPreDestroy(), bean.preDestroy);
EjbDeployment d = (EjbDeployment) m.get(s.getEjbName());
if (d == null) {
throw new OpenEJBException("No deployment information in openejb-jar.xml for bean "
+ s.getEjbName()
+ ". Please redeploy the jar");
}
bean.ejbDeploymentId = d.getDeploymentId();
bean.containerId = d.getContainerId();
Icon icon = s.getIcon();
bean.largeIcon = (icon == null) ? null : icon.getLargeIcon();
bean.smallIcon = (icon == null) ? null : icon.getSmallIcon();
bean.description = s.getDescription();
bean.displayName = s.getDisplayName();
bean.ejbClass = s.getEjbClass();
bean.ejbName = s.getEjbName();
bean.home = s.getHome();
bean.remote = s.getRemote();
bean.localHome = s.getLocalHome();
bean.local = s.getLocal();
bean.businessLocal.addAll(s.getBusinessLocal());
bean.businessRemote.addAll(s.getBusinessRemote());
TransactionType txType = s.getTransactionType();
bean.transactionType = (txType != null)?txType.toString(): TransactionType.CONTAINER.toString();
bean.serviceEndpoint = s.getServiceEndpoint();
return bean;
}
private EnterpriseBeanInfo initMessageBean(MessageDrivenBean mdb, Map m) throws OpenEJBException {
MessageDrivenBeanInfo bean = new MessageDrivenBeanInfo();
bean.timeoutMethod = toInfo(mdb.getTimeoutMethod());
copyCallbacks(mdb.getAroundInvoke(), bean.aroundInvoke);
copyCallbacks(mdb.getPostConstruct(), bean.postConstruct);
copyCallbacks(mdb.getPreDestroy(), bean.preDestroy);
EjbDeployment d = (EjbDeployment) m.get(mdb.getEjbName());
if (d == null) {
throw new OpenEJBException("No deployment information in openejb-jar.xml for bean "
+ mdb.getEjbName()
+ ". Please redeploy the jar");
}
bean.ejbDeploymentId = d.getDeploymentId();
bean.containerId = d.getContainerId();
Icon icon = mdb.getIcon();
bean.largeIcon = (icon == null) ? null : icon.getLargeIcon();
bean.smallIcon = (icon == null) ? null : icon.getSmallIcon();
bean.description = mdb.getDescription();
bean.displayName = mdb.getDisplayName();
bean.ejbClass = mdb.getEjbClass();
bean.ejbName = mdb.getEjbName();
TransactionType txType = mdb.getTransactionType();
bean.transactionType = (txType != null)?txType.toString(): TransactionType.CONTAINER.toString();
if (mdb.getMessagingType() != null) {
bean.mdbInterface = mdb.getMessagingType();
} else {
bean.mdbInterface = "javax.jms.MessageListener";
}
ResourceLink resourceLink = d.getResourceLink("openejb/destination");
if (resourceLink != null) {
bean.destinationId = resourceLink.getResId();
}
if (mdb.getMessageDestinationType() != null) {
bean.activationProperties.put("destinationType", mdb.getMessageDestinationType());
}
ActivationConfig activationConfig = mdb.getActivationConfig();
if (activationConfig != null) {
for (ActivationConfigProperty property : activationConfig.getActivationConfigProperty()) {
String name = property.getActivationConfigPropertyName();
String value = property.getActivationConfigPropertyValue();
bean.activationProperties.put(name, value);
}
}
return bean;
}
private NamedMethodInfo toInfo(NamedMethod method) {
if (method == null) return null;
NamedMethodInfo info = new NamedMethodInfo();
info.methodName = method.getMethodName();
if (method.getMethodParams() != null) {
info.methodParams = method.getMethodParams().getMethodParam();
}
return info;
}
private void copyCallbacks(List<? extends CallbackMethod> from, List<CallbackInfo> to) {
for (CallbackMethod callback : from) {
CallbackInfo info = new CallbackInfo();
info.className = callback.getClassName();
info.method = callback.getMethodName();
to.add(info);
}
}
private EnterpriseBeanInfo initEntityBean(EntityBean e, Map m) throws OpenEJBException {
EntityBeanInfo bean = new EntityBeanInfo();
EjbDeployment d = (EjbDeployment) m.get(e.getEjbName());
if (d == null) {
throw new OpenEJBException("No deployment information in openejb-jar.xml for bean "
+ e.getEjbName()
+ ". Please redeploy the jar");
}
bean.ejbDeploymentId = d.getDeploymentId();
bean.containerId = d.getContainerId();
Icon icon = e.getIcon();
bean.largeIcon = (icon == null) ? null : icon.getLargeIcon();
bean.smallIcon = (icon == null) ? null : icon.getSmallIcon();
bean.description = e.getDescription();
bean.displayName = e.getDisplayName();
bean.ejbClass = e.getEjbClass();
bean.abstractSchemaName = e.getAbstractSchemaName();
bean.ejbName = e.getEjbName();
bean.home = e.getHome();
bean.remote = e.getRemote();
bean.localHome = e.getLocalHome();
bean.local = e.getLocal();
bean.transactionType = "Container";
bean.primKeyClass = e.getPrimKeyClass();
bean.primKeyField = e.getPrimkeyField();
bean.persistenceType = e.getPersistenceType().toString();
bean.reentrant = e.getReentrant() + "";
CmpVersion cmpVersion = e.getCmpVersion();
if (e.getPersistenceType() == PersistenceType.CONTAINER) {
if (cmpVersion != null && cmpVersion == CmpVersion.CMP1){
bean.cmpVersion = 1;
} else {
bean.cmpVersion = 2;
}
}
List<CmpField> cmpFields = e.getCmpField();
for (CmpField cmpField : cmpFields) {
bean.cmpFieldNames.add(cmpField.getFieldName());
}
if (bean.persistenceType.equalsIgnoreCase("Container")) {
for (Query q : e.getQuery()) {
QueryInfo query = new QueryInfo();
query.queryStatement = q.getEjbQl().trim();
MethodInfo method = new MethodInfo();
QueryMethod qm = q.getQueryMethod();
method.methodName = qm.getMethodName();
if (qm.getMethodParams() != null) {
method.methodParams = qm.getMethodParams().getMethodParam();
}
query.method = method;
ResultTypeMapping resultType = q.getResultTypeMapping();
if (ResultTypeMapping.REMOTE.equals(resultType)) {
query.remoteResultType = true;
}
bean.queries.add(query);
}
for (org.apache.openejb.jee.oejb3.Query q : d.getQuery()) {
QueryInfo query = new QueryInfo();
query.description = q.getDescription();
query.queryStatement = q.getObjectQl().trim();
MethodInfo method = new MethodInfo();
org.apache.openejb.jee.oejb3.QueryMethod qm = q.getQueryMethod();
method.methodName = qm.getMethodName();
if (qm.getMethodParams() != null) {
method.methodParams = qm.getMethodParams().getMethodParam();
}
query.method = method;
bean.queries.add(query);
}
}
return bean;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.server.op;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import org.apache.tinkerpop.gremlin.driver.Tokens;
import org.apache.tinkerpop.gremlin.driver.message.RequestMessage;
import org.apache.tinkerpop.gremlin.driver.message.ResponseMessage;
import org.apache.tinkerpop.gremlin.driver.message.ResponseStatusCode;
import org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutor;
import org.apache.tinkerpop.gremlin.groovy.jsr223.customizer.TimedInterruptTimeoutException;
import org.apache.tinkerpop.gremlin.process.traversal.Operator;
import org.apache.tinkerpop.gremlin.process.traversal.Order;
import org.apache.tinkerpop.gremlin.process.traversal.Pop;
import org.apache.tinkerpop.gremlin.process.traversal.Scope;
import org.apache.tinkerpop.gremlin.server.OpProcessor;
import org.apache.tinkerpop.gremlin.server.handler.GremlinResponseFrameEncoder;
import org.apache.tinkerpop.gremlin.structure.Column;
import org.apache.tinkerpop.gremlin.structure.T;
import org.apache.tinkerpop.gremlin.server.Context;
import org.apache.tinkerpop.gremlin.server.GremlinServer;
import org.apache.tinkerpop.gremlin.server.Settings;
import org.apache.tinkerpop.gremlin.server.util.MetricManager;
import org.apache.tinkerpop.gremlin.util.function.ThrowingConsumer;
import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils;
import io.netty.channel.ChannelHandlerContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.Bindings;
import javax.script.SimpleBindings;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeoutException;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import static com.codahale.metrics.MetricRegistry.name;
/**
* A base {@link org.apache.tinkerpop.gremlin.server.OpProcessor} implementation that helps with operations that deal
* with script evaluation functions.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public abstract class AbstractEvalOpProcessor extends AbstractOpProcessor {
private static final Logger logger = LoggerFactory.getLogger(AbstractEvalOpProcessor.class);
public static final Timer evalOpTimer = MetricManager.INSTANCE.getTimer(name(GremlinServer.class, "op", "eval"));
/**
* Captures the "error" count as a reportable metric for Gremlin Server.
*
* @deprecated As of release 3.1.1-incubating, not replaced. Direct usage is discouraged with sub-classes as
* error counts are captured more globally for error messages written down the pipeline to
* {@link GremlinResponseFrameEncoder}.
*/
@Deprecated
static final Meter errorMeter = MetricManager.INSTANCE.getMeter(name(GremlinServer.class, "errors"));
/**
* Regex for validating that binding variables.
*
* @deprecated As of release 3.1.2-incubating, not replaced. This {@code Pattern} is not used internally.
* Deprecated rather than just removing as it's possible that someone else might be using it when developing
* custom {@link OpProcessor} implementations.
*/
@Deprecated
protected static final Pattern validBindingName = Pattern.compile("[a-zA-Z$_][a-zA-Z0-9$_]*");
/**
* This may or may not be the full set of invalid binding keys. It is dependent on the static imports made to
* Gremlin Server. This should get rid of the worst offenders though and provide a good message back to the
* calling client.
* <p/>
* Use of {@code toUpperCase()} on the accessor values of {@link T} solves an issue where the {@code ScriptEngine}
* ignores private scope on {@link T} and imports static fields.
*/
protected static final Set<String> INVALID_BINDINGS_KEYS = new HashSet<>();
static {
INVALID_BINDINGS_KEYS.addAll(Arrays.asList(
T.id.name(), T.key.name(),
T.label.name(), T.value.name(),
T.id.getAccessor(), T.key.getAccessor(),
T.label.getAccessor(), T.value.getAccessor(),
T.id.getAccessor().toUpperCase(), T.key.getAccessor().toUpperCase(),
T.label.getAccessor().toUpperCase(), T.value.getAccessor().toUpperCase()));
for (Column enumItem : Column.values()) {
INVALID_BINDINGS_KEYS.add(enumItem.name());
}
for (Order enumItem : Order.values()) {
INVALID_BINDINGS_KEYS.add(enumItem.name());
}
for (Operator enumItem : Operator.values()) {
INVALID_BINDINGS_KEYS.add(enumItem.name());
}
for (Scope enumItem : Scope.values()) {
INVALID_BINDINGS_KEYS.add(enumItem.name());
}
for (Pop enumItem : Pop.values()) {
INVALID_BINDINGS_KEYS.add(enumItem.name());
}
}
protected AbstractEvalOpProcessor(final boolean manageTransactions) {
super(manageTransactions);
}
/**
* Provides an operation for evaluating a Gremlin script.
*/
public abstract ThrowingConsumer<Context> getEvalOp();
/**
* A sub-class may have additional "ops" that it will service. Calls to {@link #select(Context)} that are not
* handled will be passed to this method to see if the sub-class can service the requested op code.
*/
public abstract Optional<ThrowingConsumer<Context>> selectOther(final RequestMessage requestMessage) throws OpProcessorException;
@Override
public ThrowingConsumer<Context> select(final Context ctx) throws OpProcessorException {
final RequestMessage message = ctx.getRequestMessage();
logger.debug("Selecting processor for RequestMessage {}", message);
final ThrowingConsumer<Context> op;
switch (message.getOp()) {
case Tokens.OPS_EVAL:
op = validateEvalMessage(message).orElse(getEvalOp());
break;
case Tokens.OPS_INVALID:
final String msgInvalid = String.format("Message could not be parsed. Check the format of the request. [%s]", message);
throw new OpProcessorException(msgInvalid, ResponseMessage.build(message).code(ResponseStatusCode.REQUEST_ERROR_MALFORMED_REQUEST).statusMessage(msgInvalid).create());
default:
op = selectOther(message).orElseThrow(() -> {
final String msgDefault = String.format("Message with op code [%s] is not recognized.", message.getOp());
return new OpProcessorException(msgDefault, ResponseMessage.build(message).code(ResponseStatusCode.REQUEST_ERROR_MALFORMED_REQUEST).statusMessage(msgDefault).create());
});
}
return op;
}
protected Optional<ThrowingConsumer<Context>> validateEvalMessage(final RequestMessage message) throws OpProcessorException {
if (!message.optionalArgs(Tokens.ARGS_GREMLIN).isPresent()) {
final String msg = String.format("A message with an [%s] op code requires a [%s] argument.", Tokens.OPS_EVAL, Tokens.ARGS_GREMLIN);
throw new OpProcessorException(msg, ResponseMessage.build(message).code(ResponseStatusCode.REQUEST_ERROR_INVALID_REQUEST_ARGUMENTS).statusMessage(msg).create());
}
if (message.optionalArgs(Tokens.ARGS_BINDINGS).isPresent()) {
final Map bindings = (Map) message.getArgs().get(Tokens.ARGS_BINDINGS);
if (bindings.keySet().stream().anyMatch(k -> null == k || !(k instanceof String))) {
final String msg = String.format("The [%s] message is using one or more invalid binding keys - they must be of type String and cannot be null", Tokens.OPS_EVAL);
throw new OpProcessorException(msg, ResponseMessage.build(message).code(ResponseStatusCode.REQUEST_ERROR_INVALID_REQUEST_ARGUMENTS).statusMessage(msg).create());
}
final Set<String> badBindings = IteratorUtils.set(IteratorUtils.<String>filter(bindings.keySet().iterator(), INVALID_BINDINGS_KEYS::contains));
if (!badBindings.isEmpty()) {
final String msg = String.format("The [%s] message supplies one or more invalid parameters key of [%s] - these are reserved names.", Tokens.OPS_EVAL, badBindings);
throw new OpProcessorException(msg, ResponseMessage.build(message).code(ResponseStatusCode.REQUEST_ERROR_INVALID_REQUEST_ARGUMENTS).statusMessage(msg).create());
}
}
return Optional.empty();
}
/**
* A generalized implementation of the "eval" operation. It handles script evaluation and iteration of results
* so as to write {@link ResponseMessage} objects down the Netty pipeline. It also handles script timeouts,
* iteration timeouts, metrics and building bindings. Note that result iteration is delegated to the
* {@link #handleIterator} method, so those extending this class could override that method for better control
* over result iteration.
*
* @param context The current Gremlin Server {@link Context}
* @param gremlinExecutorSupplier A function that returns the {@link GremlinExecutor} to use in executing the
* script evaluation.
* @param bindingsSupplier A function that returns the {@link Bindings} to provide to the
* {@link GremlinExecutor#eval} method.
*/
protected void evalOpInternal(final Context context, final Supplier<GremlinExecutor> gremlinExecutorSupplier,
final BindingSupplier bindingsSupplier) throws OpProcessorException {
final Timer.Context timerContext = evalOpTimer.time();
final ChannelHandlerContext ctx = context.getChannelHandlerContext();
final RequestMessage msg = context.getRequestMessage();
final GremlinExecutor gremlinExecutor = gremlinExecutorSupplier.get();
final Settings settings = context.getSettings();
final Map<String, Object> args = msg.getArgs();
final String script = (String) args.get(Tokens.ARGS_GREMLIN);
final String language = args.containsKey(Tokens.ARGS_LANGUAGE) ? (String) args.get(Tokens.ARGS_LANGUAGE) : null;
final Bindings bindings = new SimpleBindings();
// sessionless requests are always transaction managed, but in-session requests are configurable.
final boolean managedTransactionsForRequest = manageTransactions ?
true : (Boolean) args.getOrDefault(Tokens.ARGS_MANAGE_TRANSACTION, false);
// timeout override
final long seto = args.containsKey(Tokens.ARGS_SCRIPT_EVAL_TIMEOUT) ?
Long.parseLong(args.get(Tokens.ARGS_SCRIPT_EVAL_TIMEOUT).toString()) : settings.scriptEvaluationTimeout;
final GremlinExecutor.LifeCycle lifeCycle = GremlinExecutor.LifeCycle.build()
.scriptEvaluationTimeoutOverride(seto)
.afterFailure((b,t) -> {
if (managedTransactionsForRequest) attemptRollback(msg, context.getGraphManager(), settings.strictTransactionManagement);
})
.beforeEval(b -> {
try {
b.putAll(bindingsSupplier.get());
} catch (OpProcessorException ope) {
// this should bubble up in the GremlinExecutor properly as the RuntimeException will be
// unwrapped and the root cause thrown
throw new RuntimeException(ope);
}
})
.withResult(o -> {
final Iterator itty = IteratorUtils.asIterator(o);
logger.debug("Preparing to iterate results from - {} - in thread [{}]", msg, Thread.currentThread().getName());
try {
handleIterator(context, itty);
} catch (TimeoutException ex) {
final String errorMessage = String.format("Response iteration exceeded the configured threshold for request [%s] - %s", msg, ex.getMessage());
logger.warn(errorMessage);
ctx.writeAndFlush(ResponseMessage.build(msg).code(ResponseStatusCode.SERVER_ERROR_TIMEOUT).statusMessage(errorMessage).create());
if (managedTransactionsForRequest) attemptRollback(msg, context.getGraphManager(), settings.strictTransactionManagement);
} catch (InterruptedException ex) {
logger.warn(String.format("Interruption during result iteration on request [%s].", msg), ex);
final String exceptionMsg = ex.getMessage();
final String err = "Interruption of result iteration" + (null == exceptionMsg || exceptionMsg.isEmpty() ? "" : " - " + exceptionMsg);
ctx.writeAndFlush(ResponseMessage.build(msg).code(ResponseStatusCode.SERVER_ERROR).statusMessage(err).create());
if (managedTransactionsForRequest) attemptRollback(msg, context.getGraphManager(), settings.strictTransactionManagement);
} catch (Exception ex) {
logger.warn(String.format("Exception processing a script on request [%s].", msg), ex);
final String err = ex.getMessage();
ctx.writeAndFlush(ResponseMessage.build(msg).code(ResponseStatusCode.SERVER_ERROR)
.statusMessage(null == err || err.isEmpty() ? ex.getClass().getSimpleName() : err).create());
if (managedTransactionsForRequest) attemptRollback(msg, context.getGraphManager(), settings.strictTransactionManagement);
}
}).create();
final CompletableFuture<Object> evalFuture = gremlinExecutor.eval(script, language, bindings, lifeCycle);
evalFuture.handle((v, t) -> {
timerContext.stop();
if (t != null) {
if (t instanceof OpProcessorException) {
ctx.writeAndFlush(((OpProcessorException) t).getResponseMessage());
} else if (t instanceof TimedInterruptTimeoutException) {
// occurs when the TimedInterruptCustomizerProvider is in play
final String errorMessage = String.format("A timeout occurred within the script during evaluation of [%s] - consider increasing the limit given to TimedInterruptCustomizerProvider", msg);
logger.warn(errorMessage);
ctx.writeAndFlush(ResponseMessage.build(msg).code(ResponseStatusCode.SERVER_ERROR_TIMEOUT).statusMessage("Timeout during script evaluation triggered by TimedInterruptCustomizerProvider").create());
} else if (t instanceof TimeoutException) {
final String errorMessage = String.format("Response evaluation exceeded the configured threshold for request [%s] - %s", msg, t.getMessage());
logger.warn(errorMessage, t);
ctx.writeAndFlush(ResponseMessage.build(msg).code(ResponseStatusCode.SERVER_ERROR_TIMEOUT).statusMessage(t.getMessage()).create());
} else {
logger.warn(String.format("Exception processing a script on request [%s].", msg), t);
ctx.writeAndFlush(ResponseMessage.build(msg).code(ResponseStatusCode.SERVER_ERROR_SCRIPT_EVALUATION).statusMessage(t.getMessage()).create());
}
}
return null;
});
}
@FunctionalInterface
public interface BindingSupplier {
public Bindings get() throws OpProcessorException;
}
}
|
|
/*
* Copyright (C) 2014, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The Java Pathfinder core (jpf-core) platform is licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.vm;
import gov.nasa.jpf.JPFException;
import gov.nasa.jpf.annotation.MJI;
import gov.nasa.jpf.vm.ArrayFields;
import gov.nasa.jpf.vm.ElementInfo;
import gov.nasa.jpf.vm.FieldInfo;
import gov.nasa.jpf.vm.MJIEnv;
import gov.nasa.jpf.vm.NativePeer;
import gov.nasa.jpf.vm.SystemState;
import gov.nasa.jpf.vm.ThreadInfo;
/**
* we don't want this class! This is a hodgepodge of stuff that shouldn't be in Java, but
* is handy for some hacks. The reason we have it here - very rudimentary - is that
* java.util.concurrent makes use of the atomic compare&swap which is in it.
* The choice was to duplicate a lot of relatively difficult code in the "right" class
* (java.util.concurrent.locks.AbstractQueuedSynchronizer) or a small amount of straight forward
* code in the "wrong" class (sun.misc.Unsafe). Knowing a bit about the "library chase" game,
* we opt for the latter
*
* <2do> this might change with better modeling of high level java.util.concurrent constructs
*/
public class JPF_sun_misc_Unsafe extends NativePeer {
@MJI
public int getUnsafe____Lsun_misc_Unsafe_2 (MJIEnv env, int clsRef) {
int objRef = env.getStaticReferenceField("sun.misc.Unsafe", "theUnsafe");
return objRef;
}
@MJI
public long objectFieldOffset__Ljava_lang_reflect_Field_2__J (MJIEnv env, int unsafeRef, int fieldRef) {
return fieldOffset__Ljava_lang_reflect_Field_2__I(env, unsafeRef, fieldRef);
}
/**
* we don't really return an offset here, since that would be useless. What we really want is
* to identify the corresponding FieldInfo, and that's much easier done with the Field
* registration id
*/
@MJI
public int fieldOffset__Ljava_lang_reflect_Field_2__I (MJIEnv env, int unsafeRef, int fieldRef) {
//FieldInfo fi = JPF_java_lang_reflect_Field.getFieldInfo(env, fieldRef);
//return fi.getStorageOffset();
return env.getIntField(fieldRef, "regIdx");
}
@MJI
public boolean compareAndSwapObject__Ljava_lang_Object_2JLjava_lang_Object_2Ljava_lang_Object_2__Z (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset,
int expectRef, int updateRef) {
int actual = getObject__Ljava_lang_Object_2J__Ljava_lang_Object_2(env, unsafeRef, objRef, fieldOffset);
if (actual == expectRef) {
putObject__Ljava_lang_Object_2JLjava_lang_Object_2__V(env, unsafeRef, objRef, fieldOffset, updateRef);
return true;
}
return false;
}
@MJI
public boolean compareAndSwapInt__Ljava_lang_Object_2JII__Z (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, int expect, int update) {
int actual = getInt__Ljava_lang_Object_2J__I(env, unsafeRef, objRef, fieldOffset);
if (actual == expect) {
putInt__Ljava_lang_Object_2JI__V(env, unsafeRef, objRef, fieldOffset, update);
return true;
}
return false;
}
@MJI
public boolean compareAndSwapLong__Ljava_lang_Object_2JJJ__Z (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, long expect, long update) {
long actual = getLong__Ljava_lang_Object_2J__J(env, unsafeRef, objRef, fieldOffset);
if (actual == expect) {
putLong__Ljava_lang_Object_2JJ__V(env, unsafeRef, objRef, fieldOffset, update);
return true;
}
return false;
}
// this is a specialized, native wait() for the current thread that does not require a lock, and that can
// be turned off by a preceding unpark() call (which is not accumulative)
// park can be interrupted, but it doesn't throw an InterruptedException, and it doesn't clear the status
// it can only be called from the current (parking) thread
@MJI
public void park__ZJ__V (MJIEnv env, int unsafeRef, boolean isAbsoluteTime, long timeout) {
ThreadInfo ti = env.getThreadInfo();
int objRef = ti.getThreadObjectRef();
int permitRef = env.getReferenceField( objRef, "permit");
ElementInfo ei = env.getModifiableElementInfo(permitRef);
if (ti.isInterrupted(false)) {
// there is no lock, so we go directly back to running and therefore
// have to remove ourself from the contender list
ei.setMonitorWithoutLocked(ti);
// note that park() does not throw an InterruptedException
return;
}
if (!ti.isFirstStepInsn()){
if (ei.getBooleanField("blockPark")) { // we have to wait, but don't need a lock
// running -> waiting | timeout_waiting
ei.wait(ti, timeout, false);
} else {
ei.setBooleanField("blockPark", true); // re-arm for next park
return;
}
}
// scheduling point
if (ti.getScheduler().setsParkCG( ti, isAbsoluteTime, timeout)) {
env.repeatInvocation();
return;
}
switch (ti.getState()) {
case WAITING:
case TIMEOUT_WAITING:
throw new JPFException("blocking park() without transition break");
case NOTIFIED:
case TIMEDOUT:
case INTERRUPTED:
ti.resetLockRef();
ti.setRunning();
break;
default:
// nothing
}
}
@MJI
public void unpark__Ljava_lang_Object_2__V (MJIEnv env, int unsafeRef, int objRef) {
ThreadInfo tiCurrent = env.getThreadInfo();
ThreadInfo tiParked = env.getThreadInfoForObjRef(objRef);
if (tiParked.isTerminated()){
return; // nothing to do
}
if (!tiCurrent.isFirstStepInsn()){
SystemState ss = env.getSystemState();
int permitRef = env.getReferenceField( objRef, "permit");
ElementInfo eiPermit = env.getModifiableElementInfo(permitRef);
if (tiParked.getLockObject() == eiPermit){
// note that 'permit' is only used in park/unpark, so there never is more than
// one waiter, which immediately becomes runnable again because it doesn't hold a lock
// (park is a lockfree wait). unpark() therefore has to be a right mover
// and we have to register a ThreadCG here
eiPermit.notifies(ss, tiCurrent, false);
} else {
eiPermit.setBooleanField("blockPark", false);
return;
}
}
if (tiCurrent.getScheduler().setsUnparkCG(tiCurrent, tiParked)){
env.repeatInvocation();
return;
}
}
@MJI
public void ensureClassInitialized__Ljava_lang_Class_2__V (MJIEnv env, int unsafeRef, int clsObjRef) {
// <2do> not sure if we have to do anyting here - if we have a class object, the class should already
// be initialized
}
@MJI
public int getObject__Ljava_lang_Object_2J__Ljava_lang_Object_2 (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getReferenceField(fi);
} else {
return ei.getReferenceElement((int)fieldOffset);
}
}
@MJI
public int getObjectVolatile__Ljava_lang_Object_2J__Ljava_lang_Object_2 (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset) {
return getObject__Ljava_lang_Object_2J__Ljava_lang_Object_2( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putObject__Ljava_lang_Object_2JLjava_lang_Object_2__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, int valRef) {
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setReferenceField(fi, valRef);
} else {
ei.setReferenceElement((int)fieldOffset, valRef);
}
}
@MJI
public void putObjectVolatile__Ljava_lang_Object_2JLjava_lang_Object_2__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, int valRef) {
putObject__Ljava_lang_Object_2JLjava_lang_Object_2__V( env, unsafeRef, objRef, fieldOffset, valRef);
}
@MJI
public void putOrderedObject__Ljava_lang_Object_2JLjava_lang_Object_2__V(
MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset,
int valRef) {
putObject__Ljava_lang_Object_2JLjava_lang_Object_2__V(env, unsafeRef, objRef, fieldOffset, valRef);
}
@MJI
public boolean getBoolean__Ljava_lang_Object_2J__Z(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getBooleanField(fi);
} else {
return ei.getBooleanElement((int)fieldOffset);
}
}
@MJI
public boolean getBooleanVolatile__Ljava_lang_Object_2J__Z(MJIEnv env, int unsafeRef,int objRef,long fieldOffset) {
return getBoolean__Ljava_lang_Object_2J__Z( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putBoolean__Ljava_lang_Object_2JZ__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, boolean val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setBooleanField(fi, val);
} else {
ei.setBooleanElement((int)fieldOffset, val);
}
}
@MJI
public void putBooleanVolatile__Ljava_lang_Object_2JZ__V (MJIEnv env, int unsafeRef, int objRef, long fieldOffset, boolean val){
putBoolean__Ljava_lang_Object_2JZ__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public byte getByte__Ljava_lang_Object_2J__B(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getByteField(fi);
} else {
return ei.getByteElement((int)fieldOffset);
}
}
@MJI
public byte getByteVolatile__Ljava_lang_Object_2J__B(MJIEnv env,int unsafeRef,int objRef,long fieldOffset) {
return getByte__Ljava_lang_Object_2J__B(env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putByte__Ljava_lang_Object_2JB__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, byte val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setByteField(fi, val);
} else {
ei.setByteElement((int)fieldOffset, val);
}
}
@MJI
public void putByteVolatile__Ljava_lang_Object_2JB__V (MJIEnv env, int unsafeRef,int objRef, long fieldOffset, byte val){
putByte__Ljava_lang_Object_2JB__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public char getChar__Ljava_lang_Object_2J__C(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getCharField(fi);
} else {
return ei.getCharElement((int)fieldOffset);
}
}
@MJI
public char getCharVolatile__Ljava_lang_Object_2J__C(MJIEnv env,int unsafeRef,int objRef,long fieldOffset) {
return getChar__Ljava_lang_Object_2J__C( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putChar__Ljava_lang_Object_2JC__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, char val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setCharField(fi, val);
} else {
ei.setCharElement((int)fieldOffset, val);
}
}
@MJI
public void putCharVolatile__Ljava_lang_Object_2JC__V (MJIEnv env, int unsafeRef,int objRef, long fieldOffset, char val){
putChar__Ljava_lang_Object_2JC__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public short getShort__Ljava_lang_Object_2J__S(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getShortField(fi);
} else {
return ei.getShortElement((int)fieldOffset);
}
}
@MJI
public short getShortVolatile__Ljava_lang_Object_2J__S(MJIEnv env,int unsafeRef,int objRef,long fieldOffset) {
return getShort__Ljava_lang_Object_2J__S( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putShort__Ljava_lang_Object_2JS__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, short val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setShortField(fi, val);
} else {
ei.setShortElement((int)fieldOffset, val);
}
}
@MJI
public void putShortVolatile__Ljava_lang_Object_2JS__V (MJIEnv env, int unsafeRef,int objRef, long fieldOffset, short val){
putShort__Ljava_lang_Object_2JS__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public int getInt__Ljava_lang_Object_2J__I(MJIEnv env, int unsafeRef,
int objRef, long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getIntField(fi);
} else {
return ei.getIntElement((int)fieldOffset);
}
}
@MJI
public int getIntVolatile__Ljava_lang_Object_2J__I(MJIEnv env, int unsafeRef, int objRef, long fieldOffset) {
return getInt__Ljava_lang_Object_2J__I( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putInt__Ljava_lang_Object_2JI__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, int val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setIntField(fi, val);
} else {
ei.setIntElement((int)fieldOffset, val);
}
}
@MJI
public void putIntVolatile__Ljava_lang_Object_2JI__V (MJIEnv env, int unsafeRef, int objRef, long fieldOffset, int val){
putInt__Ljava_lang_Object_2JI__V(env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public void putOrderedInt__Ljava_lang_Object_2JI__V(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset,
int val) {
// volatile?
putInt__Ljava_lang_Object_2JI__V(env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public float getFloat__Ljava_lang_Object_2J__F(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getFloatField(fi);
} else {
return ei.getFloatElement((int)fieldOffset);
}
}
@MJI
public float getFloatVolatile__Ljava_lang_Object_2J__F(MJIEnv env,int unsafeRef,int objRef,long fieldOffset) {
return getFloat__Ljava_lang_Object_2J__F( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putFloat__Ljava_lang_Object_2JF__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, float val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setFloatField(fi, val);
} else {
ei.setFloatElement((int)fieldOffset, val);
}
}
@MJI
public void putFloatVolatile__Ljava_lang_Object_2JF__V (MJIEnv env, int unsafeRef,int objRef, long fieldOffset, float val){
putFloat__Ljava_lang_Object_2JF__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public long getLong__Ljava_lang_Object_2J__J(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getLongField(fi);
} else {
return ei.getLongElement((int)fieldOffset);
}
}
@MJI
public long getLongVolatile__Ljava_lang_Object_2J__J(MJIEnv env, int unsafeRef, int objRef, long fieldOffset) {
return getLong__Ljava_lang_Object_2J__J( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putLong__Ljava_lang_Object_2JJ__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, long val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setLongField(fi, val);
} else {
ei.setLongElement((int)fieldOffset, val);
}
}
@MJI
public void putLongVolatile__Ljava_lang_Object_2JJ__V (MJIEnv env, int unsafeRef, int objRef, long fieldOffset, long val){
putLong__Ljava_lang_Object_2JJ__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public void putOrderedLong__Ljava_lang_Object_2JJ__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, long val) {
putLong__Ljava_lang_Object_2JJ__V(env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public double getDouble__Ljava_lang_Object_2J__D(MJIEnv env,
int unsafeRef,
int objRef,
long fieldOffset) {
ElementInfo ei = env.getElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
return ei.getDoubleField(fi);
} else {
return ei.getDoubleElement((int)fieldOffset);
}
}
@MJI
public double getDoubleVolatile__Ljava_lang_Object_2J__D(MJIEnv env,int unsafeRef,int objRef,long fieldOffset) {
return getDouble__Ljava_lang_Object_2J__D( env, unsafeRef, objRef, fieldOffset);
}
@MJI
public void putDouble__Ljava_lang_Object_2JD__V (MJIEnv env, int unsafeRef,
int objRef, long fieldOffset, double val){
ElementInfo ei = env.getModifiableElementInfo(objRef);
if (!ei.isArray()) {
FieldInfo fi = getRegisteredFieldInfo(fieldOffset);
ei.setDoubleField(fi, val);
} else {
ei.setDoubleElement((int)fieldOffset, val);
}
}
@MJI
public void putDoubleVolatile__Ljava_lang_Object_2JD__V (MJIEnv env, int unsafeRef, int objRef, long fieldOffset, double val){
putDouble__Ljava_lang_Object_2JD__V( env, unsafeRef, objRef, fieldOffset, val);
}
@MJI
public int arrayBaseOffset__Ljava_lang_Class_2__I (MJIEnv env, int unsafeRef, int clazz) {
return 0;
}
@MJI
public int arrayIndexScale__Ljava_lang_Class_2__I (MJIEnv env, int unsafeRef, int clazz) {
return 1;
}
private static FieldInfo getRegisteredFieldInfo(long fieldOffset) {
return JPF_java_lang_reflect_Field.getRegisteredFieldInfo((int)fieldOffset);
}
//--- the explicit memory buffer allocation/free + access methods - evil pointer arithmetic
/*
* we shy away from maintaining our own address table by means of knowing that
* the byte[] object stored in the ArrayFields will not be recycled, and hashCode() will
* return its address, so the start/endAdr pairs we get from that have to be
* non-overlapping. Of course that falls apart if hashCode() would do something
* different, which is the case for any address that exceeds 32bit
*/
static class Alloc {
int objRef;
int startAdr;
int endAdr;
Alloc next;
Alloc (MJIEnv env, int baRef, long length){
this.objRef = baRef;
ElementInfo ei = env.getElementInfo(baRef);
ArrayFields afi = (ArrayFields) ei.getFields();
byte[] mem = afi.asByteArray();
startAdr = mem.hashCode();
endAdr = startAdr + (int)length -1;
}
@Override
public String toString(){
return String.format("Alloc[objRef=%x,startAdr=%x,endAdr=%x]", objRef, startAdr, endAdr);
}
}
Alloc firstAlloc;
// for debugging purposes only
private void dumpAllocs(){
System.out.println("Unsafe allocated memory blocks:{");
for (Alloc a = firstAlloc; a != null; a = a.next){
System.out.print(" ");
System.out.println(a);
}
System.out.println('}');
}
private void sortInAlloc(Alloc newAlloc){
int startAdr = newAlloc.startAdr;
if (firstAlloc == null){
firstAlloc = newAlloc;
} else {
Alloc prev = null;
for (Alloc a = firstAlloc; a != null; prev = a, a = a.next){
if (startAdr < a.startAdr){
newAlloc.next = a;
if (prev == null){
firstAlloc = newAlloc;
} else {
prev.next = newAlloc;
}
}
}
}
}
private Alloc getAlloc (int address){
for (Alloc a = firstAlloc; a != null; a = a.next){
if (address >= a.startAdr && address <= a.endAdr){
return a;
}
}
return null;
}
private Alloc removeAlloc (int startAddress){
Alloc prev = null;
for (Alloc a = firstAlloc; a != null; prev = a, a = a.next) {
if (a.startAdr == startAddress){
if (prev == null){
firstAlloc = a.next;
} else {
prev.next = a.next;
}
return a;
}
}
return null;
}
@MJI
public long allocateMemory__J__J (MJIEnv env, int unsafeRef, long nBytes) {
if (nBytes < 0 || nBytes > Integer.MAX_VALUE) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory block size: " + nBytes);
return 0;
}
// <2do> we should probably also throw OutOfMemoryErrors on configured thresholds
int baRef = env.newByteArray((int) nBytes);
// the corresponding objects have to be freed explicitly
env.registerPinDown(baRef);
Alloc alloc = new Alloc(env, baRef, nBytes);
sortInAlloc(alloc);
return alloc.startAdr;
}
@MJI
public void freeMemory__J__V (MJIEnv env, int unsafeRef, long startAddress) {
int addr = (int)startAddress;
if (startAddress != MJIEnv.NULL){
Alloc a = removeAlloc(addr);
if (a == null){
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
} else {
env.releasePinDown(a.objRef);
}
}
}
@MJI
public byte getByte__J__B (MJIEnv env, int unsafeRef, long address) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return 0;
}
ElementInfo ei = env.getElementInfo(a.objRef);
return ei.getByteElement(addr - a.startAdr);
}
@MJI
public void putByte__JB__V (MJIEnv env, int unsafeRef, long address, byte val) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return;
}
ElementInfo ei = env.getModifiableElementInfo(a.objRef);
ei.setByteElement(addr - a.startAdr, val);
}
@MJI
public char getChar__J__C (MJIEnv env, int unsafeRef, long address) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return 0;
}
ElementInfo ei = env.getElementInfo(a.objRef);
byte[] ba = ei.asByteArray();
byte b0 = ba[addr];
byte b1 = ba[addr+1];
char val;
if (env.isBigEndianPlatform()){
val = (char) ((b0 << 8) | b1);
} else {
val = (char) ((b1 << 8) | b0);
}
return val;
}
@MJI
public void putChar__JC__V (MJIEnv env, int unsafeRef, long address, char val) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return;
}
byte b1 = (byte)(0xff & val);
byte b0 = (byte)(0xff & (val >>> 8));
ElementInfo ei = env.getModifiableElementInfo(a.objRef);
if (env.isBigEndianPlatform()){
ei.setByteElement(addr, b0);
ei.setByteElement(addr+1, b1);
} else {
ei.setByteElement(addr, b1);
ei.setByteElement(addr+1, b0);
}
}
@MJI
public int getInt__J__I (MJIEnv env, int unsafeRef, long address) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return 0;
}
ElementInfo ei = env.getElementInfo(a.objRef);
byte[] ba = ei.asByteArray();
byte b0 = ba[addr];
byte b1 = ba[addr+1];
byte b2 = ba[addr+2];
byte b3 = ba[addr+3];
int val;
if (env.isBigEndianPlatform()){
val = b0;
val = (val << 8) | b1;
val = (val << 8) | b2;
val = (val << 8) | b3;
} else {
val = b3;
val = (val << 8) | b2;
val = (val << 8) | b1;
val = (val << 8) | b0;
}
return val;
}
@MJI
public void putInt__JI__V (MJIEnv env, int unsafeRef, long address, int val) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return;
}
byte b3 = (byte)(0xff & val);
byte b2 = (byte)(0xff & (val >>> 8));
byte b1 = (byte)(0xff & (val >>> 16));
byte b0 = (byte)(0xff & (val >>> 24));
ElementInfo ei = env.getModifiableElementInfo(a.objRef);
if (env.isBigEndianPlatform()){
ei.setByteElement(addr, b0);
ei.setByteElement(addr+1, b1);
ei.setByteElement(addr+2, b2);
ei.setByteElement(addr+3, b3);
} else {
ei.setByteElement(addr, b3);
ei.setByteElement(addr+1, b2);
ei.setByteElement(addr+2, b1);
ei.setByteElement(addr+3, b0);
}
}
@MJI
public long getLong__J__J (MJIEnv env, int unsafeRef, long address) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return 0;
}
ElementInfo ei = env.getElementInfo(a.objRef);
byte[] ba = ei.asByteArray();
int offset = addr - a.startAdr;
byte b0 = ba[offset];
byte b1 = ba[offset+1];
byte b2 = ba[offset+2];
byte b3 = ba[offset+3];
byte b4 = ba[offset+4];
byte b5 = ba[offset+5];
byte b6 = ba[offset+6];
byte b7 = ba[offset+7];
int val;
if (env.isBigEndianPlatform()){
val = b0;
val = (val << 8) | b1;
val = (val << 8) | b2;
val = (val << 8) | b3;
val = (val << 8) | b4;
val = (val << 8) | b5;
val = (val << 8) | b6;
val = (val << 8) | b7;
} else {
val = b7;
val = (val << 8) | b6;
val = (val << 8) | b5;
val = (val << 8) | b4;
val = (val << 8) | b3;
val = (val << 8) | b2;
val = (val << 8) | b1;
val = (val << 8) | b0;
}
return val;
}
@MJI
public void putLong__JJ__V (MJIEnv env, int unsafeRef, long address, long val) {
int addr = (int)address;
Alloc a = getAlloc(addr);
if (a == null) {
env.throwException("java.lang.IllegalArgumentException", "invalid memory address: " + Integer.toHexString(addr));
return;
}
byte b7 = (byte)(0xff & val);
byte b6 = (byte)(0xff & (val >>> 8));
byte b5 = (byte)(0xff & (val >>> 16));
byte b4 = (byte)(0xff & (val >>> 24));
byte b3 = (byte)(0xff & (val >>> 32));
byte b2 = (byte)(0xff & (val >>> 40));
byte b1 = (byte)(0xff & (val >>> 48));
byte b0 = (byte)(0xff & (val >>> 56));
ElementInfo ei = env.getModifiableElementInfo(a.objRef);
int offset = addr - a.startAdr;
if (env.isBigEndianPlatform()){
ei.setByteElement(offset, b0);
ei.setByteElement(offset+1, b1);
ei.setByteElement(offset+2, b2);
ei.setByteElement(offset+3, b3);
ei.setByteElement(offset+4, b4);
ei.setByteElement(offset+5, b5);
ei.setByteElement(offset+6, b6);
ei.setByteElement(offset+7, b7);
} else {
ei.setByteElement(offset, b7);
ei.setByteElement(offset+1, b6);
ei.setByteElement(offset+2, b5);
ei.setByteElement(offset+3, b4);
ei.setByteElement(offset+4, b3);
ei.setByteElement(offset+5, b2);
ei.setByteElement(offset+6, b1);
ei.setByteElement(offset+7, b0);
}
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl;
import java.util.List;
import java.util.Set;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.api.FlowableIllegalArgumentException;
import org.flowable.common.engine.impl.db.SuspensionState;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.common.engine.impl.interceptor.CommandExecutor;
import org.flowable.common.engine.impl.query.AbstractQuery;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.engine.repository.ProcessDefinition;
import org.flowable.engine.repository.ProcessDefinitionQuery;
/**
* @author Tom Baeyens
* @author Joram Barrez
* @author Daniel Meyer
* @author Saeid Mirzaei
*/
public class ProcessDefinitionQueryImpl extends AbstractQuery<ProcessDefinitionQuery, ProcessDefinition> implements ProcessDefinitionQuery {
private static final long serialVersionUID = 1L;
protected String id;
protected Set<String> ids;
protected String category;
protected String categoryLike;
protected String categoryNotEquals;
protected String name;
protected String nameLike;
protected String deploymentId;
protected Set<String> deploymentIds;
protected String key;
protected String keyLike;
protected String resourceName;
protected String resourceNameLike;
protected Integer version;
protected Integer versionGt;
protected Integer versionGte;
protected Integer versionLt;
protected Integer versionLte;
protected boolean latest;
protected SuspensionState suspensionState;
protected String authorizationUserId;
protected String procDefId;
protected String tenantId;
protected String tenantIdLike;
protected boolean withoutTenantId;
protected String engineVersion;
protected String eventSubscriptionName;
protected String eventSubscriptionType;
public ProcessDefinitionQueryImpl() {
}
public ProcessDefinitionQueryImpl(CommandContext commandContext) {
super(commandContext);
}
public ProcessDefinitionQueryImpl(CommandExecutor commandExecutor) {
super(commandExecutor);
}
@Override
public ProcessDefinitionQueryImpl processDefinitionId(String processDefinitionId) {
this.id = processDefinitionId;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionIds(Set<String> processDefinitionIds) {
this.ids = processDefinitionIds;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionCategory(String category) {
if (category == null) {
throw new FlowableIllegalArgumentException("category is null");
}
this.category = category;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionCategoryLike(String categoryLike) {
if (categoryLike == null) {
throw new FlowableIllegalArgumentException("categoryLike is null");
}
this.categoryLike = categoryLike;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionCategoryNotEquals(String categoryNotEquals) {
if (categoryNotEquals == null) {
throw new FlowableIllegalArgumentException("categoryNotEquals is null");
}
this.categoryNotEquals = categoryNotEquals;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionName(String name) {
if (name == null) {
throw new FlowableIllegalArgumentException("name is null");
}
this.name = name;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionNameLike(String nameLike) {
if (nameLike == null) {
throw new FlowableIllegalArgumentException("nameLike is null");
}
this.nameLike = nameLike;
return this;
}
@Override
public ProcessDefinitionQueryImpl deploymentId(String deploymentId) {
if (deploymentId == null) {
throw new FlowableIllegalArgumentException("id is null");
}
this.deploymentId = deploymentId;
return this;
}
@Override
public ProcessDefinitionQueryImpl deploymentIds(Set<String> deploymentIds) {
if (deploymentIds == null) {
throw new FlowableIllegalArgumentException("ids are null");
}
this.deploymentIds = deploymentIds;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionKey(String key) {
if (key == null) {
throw new FlowableIllegalArgumentException("key is null");
}
this.key = key;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionKeyLike(String keyLike) {
if (keyLike == null) {
throw new FlowableIllegalArgumentException("keyLike is null");
}
this.keyLike = keyLike;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionResourceName(String resourceName) {
if (resourceName == null) {
throw new FlowableIllegalArgumentException("resourceName is null");
}
this.resourceName = resourceName;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionResourceNameLike(String resourceNameLike) {
if (resourceNameLike == null) {
throw new FlowableIllegalArgumentException("resourceNameLike is null");
}
this.resourceNameLike = resourceNameLike;
return this;
}
@Override
public ProcessDefinitionQueryImpl processDefinitionVersion(Integer version) {
checkVersion(version);
this.version = version;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionVersionGreaterThan(Integer processDefinitionVersion) {
checkVersion(processDefinitionVersion);
this.versionGt = processDefinitionVersion;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionVersionGreaterThanOrEquals(Integer processDefinitionVersion) {
checkVersion(processDefinitionVersion);
this.versionGte = processDefinitionVersion;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionVersionLowerThan(Integer processDefinitionVersion) {
checkVersion(processDefinitionVersion);
this.versionLt = processDefinitionVersion;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionVersionLowerThanOrEquals(Integer processDefinitionVersion) {
checkVersion(processDefinitionVersion);
this.versionLte = processDefinitionVersion;
return this;
}
protected void checkVersion(Integer version) {
if (version == null) {
throw new FlowableIllegalArgumentException("version is null");
} else if (version <= 0) {
throw new FlowableIllegalArgumentException("version must be positive");
}
}
@Override
public ProcessDefinitionQueryImpl latestVersion() {
this.latest = true;
return this;
}
@Override
public ProcessDefinitionQuery active() {
this.suspensionState = SuspensionState.ACTIVE;
return this;
}
@Override
public ProcessDefinitionQuery suspended() {
this.suspensionState = SuspensionState.SUSPENDED;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionTenantId(String tenantId) {
if (tenantId == null) {
throw new FlowableIllegalArgumentException("processDefinition tenantId is null");
}
this.tenantId = tenantId;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionTenantIdLike(String tenantIdLike) {
if (tenantIdLike == null) {
throw new FlowableIllegalArgumentException("process definition tenantId is null");
}
this.tenantIdLike = tenantIdLike;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionWithoutTenantId() {
this.withoutTenantId = true;
return this;
}
@Override
public ProcessDefinitionQuery processDefinitionEngineVersion(String engineVersion) {
this.engineVersion = engineVersion;
return this;
}
public ProcessDefinitionQuery messageEventSubscription(String messageName) {
return eventSubscription("message", messageName);
}
@Override
public ProcessDefinitionQuery messageEventSubscriptionName(String messageName) {
return eventSubscription("message", messageName);
}
public ProcessDefinitionQuery processDefinitionStarter(String procDefId) {
this.procDefId = procDefId;
return this;
}
public ProcessDefinitionQuery eventSubscription(String eventType, String eventName) {
if (eventName == null) {
throw new FlowableIllegalArgumentException("event name is null");
}
if (eventType == null) {
throw new FlowableException("event type is null");
}
this.eventSubscriptionType = eventType;
this.eventSubscriptionName = eventName;
return this;
}
public List<String> getAuthorizationGroups() {
if (authorizationUserId == null) {
return null;
}
return CommandContextUtil.getProcessEngineConfiguration().getCandidateManager().getGroupsForCandidateUser(authorizationUserId);
}
@Override
public ProcessDefinitionQueryImpl startableByUser(String userId) {
if (userId == null) {
throw new FlowableIllegalArgumentException("userId is null");
}
this.authorizationUserId = userId;
return this;
}
// sorting ////////////////////////////////////////////
@Override
public ProcessDefinitionQuery orderByDeploymentId() {
return orderBy(ProcessDefinitionQueryProperty.DEPLOYMENT_ID);
}
@Override
public ProcessDefinitionQuery orderByProcessDefinitionKey() {
return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_KEY);
}
@Override
public ProcessDefinitionQuery orderByProcessDefinitionCategory() {
return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_CATEGORY);
}
@Override
public ProcessDefinitionQuery orderByProcessDefinitionId() {
return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_ID);
}
@Override
public ProcessDefinitionQuery orderByProcessDefinitionVersion() {
return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_VERSION);
}
@Override
public ProcessDefinitionQuery orderByProcessDefinitionName() {
return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_NAME);
}
@Override
public ProcessDefinitionQuery orderByTenantId() {
return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_TENANT_ID);
}
// results ////////////////////////////////////////////
@Override
public long executeCount(CommandContext commandContext) {
checkQueryOk();
return CommandContextUtil.getProcessDefinitionEntityManager(commandContext).findProcessDefinitionCountByQueryCriteria(this);
}
@Override
public List<ProcessDefinition> executeList(CommandContext commandContext) {
checkQueryOk();
return CommandContextUtil.getProcessDefinitionEntityManager(commandContext).findProcessDefinitionsByQueryCriteria(this);
}
@Override
public void checkQueryOk() {
super.checkQueryOk();
}
// getters ////////////////////////////////////////////
public String getDeploymentId() {
return deploymentId;
}
public Set<String> getDeploymentIds() {
return deploymentIds;
}
public String getId() {
return id;
}
public Set<String> getIds() {
return ids;
}
public String getName() {
return name;
}
public String getNameLike() {
return nameLike;
}
public String getKey() {
return key;
}
public String getKeyLike() {
return keyLike;
}
public Integer getVersion() {
return version;
}
public Integer getVersionGt() {
return versionGt;
}
public Integer getVersionGte() {
return versionGte;
}
public Integer getVersionLt() {
return versionLt;
}
public Integer getVersionLte() {
return versionLte;
}
public boolean isLatest() {
return latest;
}
public String getCategory() {
return category;
}
public String getCategoryLike() {
return categoryLike;
}
public String getResourceName() {
return resourceName;
}
public String getResourceNameLike() {
return resourceNameLike;
}
public SuspensionState getSuspensionState() {
return suspensionState;
}
public void setSuspensionState(SuspensionState suspensionState) {
this.suspensionState = suspensionState;
}
public String getCategoryNotEquals() {
return categoryNotEquals;
}
public String getTenantId() {
return tenantId;
}
public String getTenantIdLike() {
return tenantIdLike;
}
public boolean isWithoutTenantId() {
return withoutTenantId;
}
public String getEngineVersion() {
return engineVersion;
}
public String getAuthorizationUserId() {
return authorizationUserId;
}
public String getProcDefId() {
return procDefId;
}
public String getEventSubscriptionName() {
return eventSubscriptionName;
}
public String getEventSubscriptionType() {
return eventSubscriptionType;
}
}
|
|
//John Paul Mardelli
//Last updated November 19th, 2013
package com.northstar.minimap;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Fragment;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.graphics.Point;
import android.location.Location;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver.OnGlobalLayoutListener;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapView;
import com.google.android.gms.maps.MapsInitializer;
import com.google.android.gms.maps.Projection;
import com.google.android.gms.maps.model.CircleOptions;
import com.google.android.gms.maps.model.GroundOverlay;
import com.google.android.gms.maps.model.GroundOverlayOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.gms.maps.model.PolygonOptions;
import com.google.android.gms.maps.model.PolylineOptions;
import com.northstar.minimap.beacon.BeaconListener;
import com.northstar.minimap.beacon.IBeacon;
import com.northstar.minimap.beacon.StickNFindBluetoothBeacon;
import com.northstar.minimap.itinerary.ItineraryPoint;
import com.google.android.gms.maps.model.PolygonOptions;
import com.northstar.minimap.beacon.BeaconListener;
import com.northstar.minimap.beacon.IBeacon;
import com.northstar.minimap.map.Barrier;
import com.northstar.minimap.map.BluetoothLELocationSource;
import com.northstar.minimap.map.BoundaryLocationSource;
import com.northstar.minimap.map.Map;
import com.northstar.minimap.map.Table;
import com.northstar.minimap.map.UserPositionListener;
/**
* Code gotten from: http://www.matt-reid.co.uk/blog_post.php?id=93
*/
public class CustomMapFragment extends Fragment implements BeaconListener, UserPositionListener {
public static final double DRAW_PIXEL_RATIO = 2.333;
public static final double FT_IN_PIXELS = 10;
public static final long POST_USER_LOCATION_PERIOD = 30000;
private boolean postNewUserLocation = true;
private boolean proximityZonesEnabled = true;
private boolean showBeaconRangeCircles = true;
public static final int COLOR_BEACON = Color.rgb(0, 153, 255);
public static final int COLOR_BEACON_IN_PROXIMITY_ZONE = Color.rgb(255, 165, 0);
public static final int COLOR_BEACON_RANGE = Color.rgb(0, 153, 255);
public static final int COLOR_BEACON_RANGE_IN_PROXIMITY_ZONE = Color.rgb(255, 214, 153);
public static final int COLOR_TABLE = Color.parseColor("#008000");
public static final int COLOR_BARRIER = Color.parseColor("#CC0000");
private IBeacon proximityZoneBeacon;
// Google and Android objects
private MapView mapView;
private GoogleMap googleMap;
private Projection proj;
private BoundaryLocationSource locSource;
private MapActivity parentAct;
private Map map;
private List<LatLngBounds> boundBoxes = new ArrayList<LatLngBounds>();
private LatLngBounds mapBounds;
private Marker currentItineraryPoint;
private Position currentUserPosition;
private Handler postUserLocationHandler;
private List<Marker> userPosMarkers = new ArrayList<Marker>();
private String userPosOption = "All";
private CallbackListener updateUser;
private CallbackListener getUsersPos;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
locSource = new BluetoothLELocationSource();
// inflate and return the layout
View v = inflater.inflate(R.layout.fragment_map, container, false);
mapView = (MapView) v.findViewById(R.id.mapView);
mapView.onCreate(savedInstanceState);
mapView.onResume();//needed to get the map to display immediately
MapsInitializer.initialize(this.getActivity());
googleMap = mapView.getMap();
//Initial zoom for projection
googleMap.moveCamera(CameraUpdateFactory.zoomTo((float) 15.0));
//I added this. Use this to get rid of googles map stuff and put our custom stuff
googleMap.setMapType(GoogleMap.MAP_TYPE_NONE);
//Set up location stuff
googleMap.setMyLocationEnabled(true);
googleMap.setLocationSource(locSource);
googleMap.setOnMapLongClickListener(onMapLongClickListener);
//customTP = new CustomTileProvider(filename, height, width);
//overlayTOps = new TileOverlayOptions()
//overlayTOps.tileProvider(customTP);
//googleMap.addTileOverlay(overlayTOps);
//Make reference to parent for listener
parentAct = (MapActivity)this.getActivity();
//Set up Global Layout listener to get projection at right time
OnGlobalLayoutListener ready = new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
//Remove listener
mapView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
//Grab and set the projection
setProjection();
//Inform activity we are ready to process map.
parentAct.processMap();
}
};
mapView.getViewTreeObserver().addOnGlobalLayoutListener(ready);
//Create callback listener;
CallbackListener updateUser = new UpdateUserCallback(this);
CallbackListener getUsersPos = new UserPosCallback(this);
return v;
}
@Override
public void onResume() {
super.onResume();
mapView.onResume();
locSource.onResume();
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(parentAct);
showBeaconRangeCircles = sharedPref.getBoolean(SettingsActivity.KEY_PREF_BEACON_RANGE, true);
proximityZonesEnabled = sharedPref.getBoolean(SettingsActivity.KEY_PREF_PROXIMITY_ZONE, true);
if (map != null) {
for (IBeacon beacon : map.getBeacons()) {
drawBeacon(beacon);
}
}
}
@Override
public void onPause() {
super.onPause();
mapView.onPause();
locSource.onPause();
}
@Override
public void onDestroy() {
super.onDestroy();
mapView.onDestroy();
if (updateUser != null) {
updateUser.parentDestroyed();
}
if (getUsersPos != null) {
getUsersPos.parentDestroyed();
}
}
@Override
public void onLowMemory() {
super.onLowMemory();
mapView.onLowMemory();
}
@Override
public void onBeaconDistanceChanged(IBeacon beacon, double distance) {
drawBeacon(beacon);
}
@Override
public void onBeaconInProximityZoneChanged(IBeacon beacon, boolean isInProximityZone) {
for (IBeacon b: map.getBeacons()) {
drawBeacon(b);
}
if (isInProximityZone) {
proximityZoneBeacon = beacon;
} else {
proximityZoneBeacon = null;
}
}
@Override
public void onUserPositionChanged(Position userPosition, double positionError) {
int accuracy = (int) Math.round(positionError * DRAW_PIXEL_RATIO * FT_IN_PIXELS);
Position userMapPosition = MapActivity.toMapPosition(userPosition);
LatLng userLatLng = proj.fromScreenLocation(userMapPosition.toPoint());
Location userLocation = new Location("");
userLocation.setLatitude(userLatLng.latitude);
userLocation.setLongitude(userLatLng.longitude);
userLocation.setAccuracy(accuracy);
locSource.setLocation(userLocation);
currentUserPosition = userPosition;
//Update the server with new user position
if (this.getActivity() != null) {
if (postNewUserLocation) {
final CustomMapFragment customMapFragment = this;
postNewUserLocation = false;
postUserLocationHandler = new Handler();
postUserLocationHandler.postDelayed(new Runnable() {
@Override
public void run() {
Globals state = (Globals) customMapFragment.getActivity().getApplicationContext();
try {
JSONObject userJson = new JSONObject(state.data.userJson);
userJson.put("X", customMapFragment.getCurrentUserPosition().getX());
userJson.put("Y", customMapFragment.getCurrentUserPosition().getY());
state.comm.updateUser(updateUser, userJson.toString());
state.data.userJson = userJson.toString();
postNewUserLocation = true;
} catch (JSONException e) {
e.printStackTrace();
}
}
}, POST_USER_LOCATION_PERIOD);
}
}
}
public void getUsersPos(){
Globals state = (Globals) this.getActivity().getApplicationContext();
state.comm.getUsersJson(getUsersPos);
}
public Position getCurrentUserPosition() {
return currentUserPosition;
}
public void updateUsersPos(){
removeUsersPos();
if(userPosOption.equals("All")){
AllUsersPos();
}
else if(userPosOption.equals("Team")){
TeamUsersPos();
}
}
public void removeUsersPos(){
for(int i = 0; i < userPosMarkers.size(); i++){
Marker mark = userPosMarkers.get(i);
mark.remove();
}
userPosMarkers.clear();
}
public void AllUsersPos(){
Globals state = (Globals) this.getActivity().getApplicationContext();
try {
JSONArray usersPos = new JSONArray(state.data.usersJson);
for (int i = 0; i < usersPos.length(); i++) {
JSONObject userPos = usersPos.getJSONObject(i);
if(!userPos.getString("Id").equals(state.data.userID)){
MarkerOptions userPosMarkOpts = new MarkerOptions();
LatLng markerLoc = proj.fromScreenLocation(new Point(userPos.getInt("X"),
userPos.getInt("Y")));
userPosMarkOpts = userPosMarkOpts.position(markerLoc);
userPosMarkOpts = userPosMarkOpts.title(userPos.getString("Name"));
Marker userPosMark = googleMap.addMarker(userPosMarkOpts);
userPosMarkers.add(userPosMark);
}
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void TeamUsersPos(){
Globals state = (Globals) this.getActivity().getApplicationContext();
try {
JSONArray usersPos = new JSONArray(state.data.usersJson);
for (int i = 0; i < usersPos.length(); i++) {
JSONObject userPos = usersPos.getJSONObject(i);
if(!userPos.getString("Id").equals(state.data.userID) &&
userPos.getString("TeamId").equals(state.data.teamID)){
MarkerOptions userPosMarkOpts = new MarkerOptions();
LatLng markerLoc = proj.fromScreenLocation(new Point(userPos.getInt("X"),
userPos.getInt("Y")));
userPosMarkOpts = userPosMarkOpts.position(markerLoc);
userPosMarkOpts = userPosMarkOpts.title(userPos.getString("Name"));
Marker userPosMark = googleMap.addMarker(userPosMarkOpts);
userPosMarkers.add(userPosMark);
}
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void setProjection() {
proj = googleMap.getProjection();
}
public void setMap(Map map){
googleMap.clear();
this.map = map;
locSource.setMap(this.map);
processTables();
processBarriers();
drawBeaconMarkers();
//Temporary location set
Location location = new Location("TempProviderWhatever");
LatLng coordinate = proj.fromScreenLocation(new Point(0, 0));
location.setLatitude(coordinate.latitude);
location.setLongitude(coordinate.longitude);
location.setAccuracy(100);
locSource.setLocation(location);
parentAct.setBeaconListener(this);
parentAct.setUserPositionListener(this);
}
private void drawBeacon(IBeacon beacon) {
double drawDistance = beacon.computeDistance() * DRAW_PIXEL_RATIO * FT_IN_PIXELS;
int beaconColor = (beacon.isInProximityZone() && proximityZonesEnabled) ?
COLOR_BEACON_IN_PROXIMITY_ZONE : COLOR_BEACON;
Point p = (MapActivity.toMapPosition(beacon.getPosition())).toPoint();
LatLng markerLoc = proj.fromScreenLocation(p);
if (beacon.getMarkerCircle() != null) {
beacon.getMarkerCircle().remove();
}
beacon.setMarkerCircle(googleMap.addCircle(new CircleOptions()
.center(markerLoc)
.radius(10)
.strokeColor(Color.BLACK)
.strokeWidth(3)
.fillColor(beaconColor)));
if (drawDistance > 0) {
if (beacon.getRangeCircle() != null) {
beacon.getRangeCircle().remove();
}
if (showBeaconRangeCircles) {
int rangeColor = (beacon.isInProximityZone() && proximityZonesEnabled) ?
COLOR_BEACON_RANGE_IN_PROXIMITY_ZONE : COLOR_BEACON_RANGE;
beacon.setRangeCircle(googleMap.addCircle(new CircleOptions()
.center(markerLoc)
.radius(drawDistance)
.strokeColor(rangeColor)
.strokeWidth(5)));
}
}
}
private void drawBeaconMarkers() {
for (IBeacon beacon: map.getBeacons()) {
drawBeacon(beacon);
}
}
private void processTables(){
List<Table> tables = map.getTables();
for(int i = 0; i < tables.size(); i++){
Table table = tables.get(i);
storeBoundBoxes(table);
drawTables(table);
}
}
private void processBarriers(){
List<Barrier> barriers = map.getBarriers();
for(int i = 0; i < barriers.size(); i++){
Barrier barrier = barriers.get(i);
storeBoundBoxes(barrier);
drawBarriers(barrier);
}
}
public void setCurrentItineraryPoint(ItineraryPoint point) {
if (currentItineraryPoint != null) {
currentItineraryPoint.remove();
}
MarkerOptions currentItinMarkerOptions = new MarkerOptions();
// Set the position and title of the Marker.
LatLng markerLoc = proj.fromScreenLocation(point.getPos().toPoint());
currentItinMarkerOptions = currentItinMarkerOptions.position(markerLoc);
currentItinMarkerOptions = currentItinMarkerOptions.title(point.getName());
currentItineraryPoint = googleMap.addMarker(currentItinMarkerOptions);
}
private void storeBoundBoxes(Barrier barrier){
LatLng neCorner = proj.fromScreenLocation(new Point((int)barrier.getPosition().getX() + barrier.getWidth(),
(int)barrier.getPosition().getY()));
LatLng swCorner = proj.fromScreenLocation(new Point((int)barrier.getPosition().getX(),
(int)barrier.getPosition().getY() + barrier.getHeight()));
LatLngBounds bound = new LatLngBounds(swCorner, neCorner);
boundBoxes.add(bound);
}
private void drawTables(Table table){
double heightInc = table.getHeight()/table.getHeightSubdivisions();
double widthInc = table.getWidth()/table.getWidthSubdivisions();
for(int wSub = 0; wSub < table.getWidthSubdivisions(); wSub++){
for(int hSub = 0; hSub < table.getHeightSubdivisions(); hSub++){
PolygonOptions tableSquare = new PolygonOptions();
double leftX = (table.getPosition().getX() + widthInc * wSub);
double rightX = (table.getPosition().getX() + widthInc * (wSub + 1));
double topY = (table.getPosition().getY() + heightInc * hSub);
double bottomY = (table.getPosition().getY() + heightInc * (hSub + 1));
leftX *= FT_IN_PIXELS;
rightX *= FT_IN_PIXELS;
topY *= FT_IN_PIXELS;
bottomY *= FT_IN_PIXELS;
LatLng topLeft = proj.fromScreenLocation(new Point((int)leftX, (int)topY));
LatLng bottomLeft = proj.fromScreenLocation(new Point((int)leftX, (int)bottomY));
LatLng bottomRight = proj.fromScreenLocation(new Point((int)rightX, (int)bottomY));
LatLng topRight = proj.fromScreenLocation(new Point((int)rightX, (int)topY));
tableSquare = tableSquare.add(topLeft, bottomLeft, bottomRight, topRight, topLeft)
.strokeColor(COLOR_TABLE)
.strokeWidth(4);
googleMap.addPolygon(tableSquare);
}
}
}
private void drawBarriers(Barrier barrier){
PolygonOptions barrierSquare = new PolygonOptions();
PolylineOptions barrierDiag1 = new PolylineOptions();
PolylineOptions barrierDiag2 = new PolylineOptions();
double leftX = (barrier.getPosition().getX());
double rightX = (barrier.getPosition().getX() + barrier.getWidth());
double topY = (barrier.getPosition().getY());
double bottomY = (barrier.getPosition().getY() + barrier.getHeight());
leftX *= FT_IN_PIXELS;
rightX *= FT_IN_PIXELS;
topY *= FT_IN_PIXELS;
bottomY *= FT_IN_PIXELS;
LatLng topLeft = proj.fromScreenLocation(new Point((int)leftX, (int)topY));
LatLng bottomLeft = proj.fromScreenLocation(new Point((int)leftX, (int)bottomY));
LatLng bottomRight = proj.fromScreenLocation(new Point((int)rightX, (int)bottomY));
LatLng topRight = proj.fromScreenLocation(new Point((int)rightX, (int)topY));
//Make the barrier square
barrierSquare = barrierSquare.add(topLeft, bottomLeft, bottomRight, topRight, topLeft)
.strokeColor(COLOR_BARRIER)
.strokeWidth(4);
googleMap.addPolygon(barrierSquare);
//Make the barrier diagonals
barrierDiag1 = barrierDiag1.add(topLeft, bottomRight)
.color(COLOR_BARRIER)
.width(4);
googleMap.addPolyline(barrierDiag1);
barrierDiag2 = barrierDiag2.add(bottomLeft, topRight)
.color(COLOR_BARRIER)
.width(4);
googleMap.addPolyline(barrierDiag2);
}
private GoogleMap.OnMapLongClickListener onMapLongClickListener =
new GoogleMap.OnMapLongClickListener() {
public void onMapLongClick(LatLng point) {
Position mapPosition = new Position(proj.toScreenLocation(point));
Position measuredPosition = MapActivity.toMeasuredPosition(mapPosition);
parentAct.calibrate(measuredPosition);
}
};
}
|
|
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.core.model.targetgraph;
import static com.facebook.buck.core.cell.TestCellBuilder.createCellRoots;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.cell.TestCellPathResolver;
import com.facebook.buck.core.description.arg.CommonDescriptionArg;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.targetgraph.impl.TargetNodeFactory;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleParams;
import com.facebook.buck.core.rules.impl.FakeBuildRule;
import com.facebook.buck.core.util.immutables.BuckStyleImmutable;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.parser.exceptions.NoSuchBuildTargetException;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.facebook.buck.rules.visibility.VisibilityPatternParser;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.hash.Hashing;
import org.immutables.value.Value;
import org.junit.Test;
public class TargetNodeVisibilityTest {
private static final ProjectFilesystem filesystem = new FakeProjectFilesystem();
private static final BuildTarget orcaTarget =
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//src/com/facebook/orca", "orca");
private static final BuildTarget publicTarget =
BuildTargetFactory.newInstance(
filesystem.getRootPath(), "//src/com/facebook/for", "everyone");
private static final BuildTarget nonPublicTarget1 =
BuildTargetFactory.newInstance(
filesystem.getRootPath(), "//src/com/facebook/something1", "nonPublic");
private static final BuildTarget nonPublicTarget2 =
BuildTargetFactory.newInstance(
filesystem.getRootPath(), "//src/com/facebook/something2", "nonPublic");
private static final ImmutableList<String> DEFAULT = ImmutableList.of();
private static final ImmutableList<String> PUBLIC = ImmutableList.of("PUBLIC");
private static final ImmutableList<String> ORCA =
ImmutableList.of(orcaTarget.getFullyQualifiedName());
private static final ImmutableList<String> SOME_OTHER = ImmutableList.of("//some/other:target");
@Test
public void testVisibilityPublic() throws NoSuchBuildTargetException {
TargetNode<?> publicTargetNode = createTargetNode(publicTarget, PUBLIC);
TargetNode<?> orcaRule = createTargetNode(orcaTarget, DEFAULT);
assertTrue(publicTargetNode.isVisibleTo(orcaRule));
assertFalse(orcaRule.isVisibleTo(publicTargetNode));
}
@Test
public void testVisibilityNonPublic() throws NoSuchBuildTargetException {
TargetNode<?> nonPublicTargetNode1 = createTargetNode(nonPublicTarget1, ORCA);
TargetNode<?> nonPublicTargetNode2 = createTargetNode(nonPublicTarget2, ORCA);
TargetNode<?> orcaRule = createTargetNode(orcaTarget, DEFAULT);
TargetNode<?> publicTargetNode = createTargetNode(publicTarget, PUBLIC);
assertTrue(
shouldBeVisibleMessage(nonPublicTargetNode1, orcaTarget),
nonPublicTargetNode1.isVisibleTo(orcaRule));
assertTrue(
shouldBeVisibleMessage(nonPublicTargetNode2, orcaTarget),
nonPublicTargetNode2.isVisibleTo(orcaRule));
assertFalse(orcaRule.isVisibleTo(nonPublicTargetNode1));
assertFalse(orcaRule.isVisibleTo(nonPublicTargetNode2));
assertTrue(publicTargetNode.isVisibleTo(nonPublicTargetNode1));
assertFalse(nonPublicTargetNode1.isVisibleTo(publicTargetNode));
}
@Test
public void testVisibilityNonPublicFailure() throws NoSuchBuildTargetException {
TargetNode<?> nonPublicTargetNode1 = createTargetNode(nonPublicTarget1, ORCA);
TargetNode<?> publicTargetNode = createTargetNode(publicTarget, PUBLIC);
try {
nonPublicTargetNode1.isVisibleToOrThrow(publicTargetNode);
fail("checkVisibility() should throw an exception");
} catch (RuntimeException e) {
assertEquals(
String.format(
"%s depends on %s, which is not visible. More info at:\nhttps://buckbuild.com/concept/visibility.html",
publicTarget, nonPublicTargetNode1.getBuildTarget()),
e.getMessage());
}
}
@Test
public void testVisibilityMix() throws NoSuchBuildTargetException {
TargetNode<?> nonPublicTargetNode1 = createTargetNode(nonPublicTarget1, ORCA);
TargetNode<?> nonPublicTargetNode2 = createTargetNode(nonPublicTarget2, ORCA);
TargetNode<?> publicTargetNode = createTargetNode(publicTarget, PUBLIC);
TargetNode<?> orcaRule = createTargetNode(orcaTarget, DEFAULT);
assertTrue(
shouldBeVisibleMessage(nonPublicTargetNode1, orcaTarget),
nonPublicTargetNode1.isVisibleTo(orcaRule));
assertTrue(
shouldBeVisibleMessage(nonPublicTargetNode2, orcaTarget),
nonPublicTargetNode2.isVisibleTo(orcaRule));
assertTrue(publicTargetNode.isVisibleTo(orcaRule));
assertFalse(orcaRule.isVisibleTo(nonPublicTargetNode1));
assertFalse(orcaRule.isVisibleTo(nonPublicTargetNode2));
assertFalse(orcaRule.isVisibleTo(publicTargetNode));
}
@Test
public void testVisibilityMixFailure() throws NoSuchBuildTargetException {
TargetNode<?> nonPublicTargetNode1 = createTargetNode(nonPublicTarget1, ORCA);
TargetNode<?> nonPublicTargetNode2 = createTargetNode(nonPublicTarget2, SOME_OTHER);
TargetNode<?> publicTargetNode = createTargetNode(publicTarget, PUBLIC);
TargetNode<?> orcaRule = createTargetNode(orcaTarget, DEFAULT);
publicTargetNode.isVisibleToOrThrow(orcaRule);
nonPublicTargetNode1.isVisibleToOrThrow(orcaRule);
try {
nonPublicTargetNode2.isVisibleToOrThrow(orcaRule);
fail("checkVisibility() should throw an exception");
} catch (RuntimeException e) {
assertEquals(
String.format(
"%s depends on %s, which is not visible. More info at:\nhttps://buckbuild.com/concept/visibility.html",
orcaTarget, nonPublicTargetNode2.getBuildTarget()),
e.getMessage());
}
}
@Test
public void testVisibilityForDirectory() throws NoSuchBuildTargetException {
BuildTarget libTarget =
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//lib", "lib");
TargetNode<?> targetInSpecifiedDirectory =
createTargetNode(
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//src/com/facebook", "test"),
DEFAULT);
TargetNode<?> targetUnderSpecifiedDirectory =
createTargetNode(
BuildTargetFactory.newInstance(
filesystem.getRootPath(), "//src/com/facebook/buck", "test"),
DEFAULT);
TargetNode<?> targetInOtherDirectory =
createTargetNode(
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//src/com/instagram", "test"),
DEFAULT);
TargetNode<?> targetInParentDirectory =
createTargetNode(
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//", "test"), DEFAULT);
// Build rule that visible to targets in or under directory src/com/facebook
TargetNode<?> directoryTargetNode =
createTargetNode(libTarget, ImmutableList.of("//src/com/facebook/..."));
assertTrue(directoryTargetNode.isVisibleTo(targetInSpecifiedDirectory));
assertTrue(directoryTargetNode.isVisibleTo(targetUnderSpecifiedDirectory));
assertFalse(directoryTargetNode.isVisibleTo(targetInOtherDirectory));
assertFalse(directoryTargetNode.isVisibleTo(targetInParentDirectory));
// Build rule that's visible to all targets, equals to PUBLIC.
TargetNode<?> pubicTargetNode = createTargetNode(libTarget, ImmutableList.of("//..."));
assertTrue(pubicTargetNode.isVisibleTo(targetInSpecifiedDirectory));
assertTrue(pubicTargetNode.isVisibleTo(targetUnderSpecifiedDirectory));
assertTrue(pubicTargetNode.isVisibleTo(targetInOtherDirectory));
assertTrue(pubicTargetNode.isVisibleTo(targetInParentDirectory));
}
@Test
public void testOnlyWithinViewIsVisible() throws NoSuchBuildTargetException {
TargetNode<?> publicTargetNode = createTargetNode(publicTarget, PUBLIC, ORCA);
TargetNode<?> publicOrcaRule = createTargetNode(orcaTarget, PUBLIC, SOME_OTHER);
assertTrue(publicOrcaRule.isVisibleTo(publicTargetNode));
assertFalse(publicTargetNode.isVisibleTo(publicOrcaRule));
}
private String shouldBeVisibleMessage(TargetNode<?> rule, BuildTarget target) {
return String.format(
"%1$s should be visible to %2$s because the visibility list of %1$s contains %2$s",
rule.getBuildTarget(), target);
}
public static class FakeRuleDescription
implements DescriptionWithTargetGraph<FakeRuleDescriptionArg> {
@Override
public Class<FakeRuleDescriptionArg> getConstructorArgType() {
return FakeRuleDescriptionArg.class;
}
@Override
public BuildRule createBuildRule(
BuildRuleCreationContextWithTargetGraph context,
BuildTarget buildTarget,
BuildRuleParams params,
FakeRuleDescriptionArg args) {
return new FakeBuildRule(buildTarget, context.getProjectFilesystem(), params);
}
@BuckStyleImmutable
@Value.Immutable
interface AbstractFakeRuleDescriptionArg extends CommonDescriptionArg {}
}
private static TargetNode<?> createTargetNode(
BuildTarget buildTarget, ImmutableList<String> visibilities)
throws NoSuchBuildTargetException {
return createTargetNode(buildTarget, visibilities, DEFAULT);
}
private static TargetNode<?> createTargetNode(
BuildTarget buildTarget, ImmutableList<String> visibilities, ImmutableList<String> withinView)
throws NoSuchBuildTargetException {
VisibilityPatternParser parser = new VisibilityPatternParser();
CellPathResolver cellNames = TestCellPathResolver.get(filesystem);
FakeRuleDescription description = new FakeRuleDescription();
FakeRuleDescriptionArg arg =
FakeRuleDescriptionArg.builder().setName(buildTarget.getShortName()).build();
return new TargetNodeFactory(new DefaultTypeCoercerFactory())
.createFromObject(
Hashing.sha1().hashString(buildTarget.getFullyQualifiedName(), UTF_8),
description,
arg,
filesystem,
buildTarget,
ImmutableSet.of(),
visibilities
.stream()
.map(s -> parser.parse(cellNames, s))
.collect(ImmutableSet.toImmutableSet()),
withinView
.stream()
.map(s -> parser.parse(cellNames, s))
.collect(ImmutableSet.toImmutableSet()),
createCellRoots(filesystem));
}
}
|
|
package org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.set;
import java.io.StringReader;
import java.util.Collection;
import java.util.concurrent.BlockingQueue;
import org.apache.log4j.Logger;
import org.buddycloud.channelserver.Configuration;
import org.buddycloud.channelserver.channel.ChannelManager;
import org.buddycloud.channelserver.db.ClosableIteratorImpl;
import org.buddycloud.channelserver.db.exception.NodeStoreException;
import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.JabberPubsub;
import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.PubSubElementProcessorAbstract;
import org.buddycloud.channelserver.pubsub.model.GlobalItemID;
import org.buddycloud.channelserver.pubsub.model.NodeItem;
import org.buddycloud.channelserver.pubsub.model.NodeSubscription;
import org.buddycloud.channelserver.pubsub.model.impl.GlobalItemIDImpl;
import org.buddycloud.channelserver.pubsub.subscription.Subscriptions;
import org.buddycloud.channelserver.utils.XMLConstants;
import org.dom4j.Element;
import org.dom4j.Namespace;
import org.dom4j.dom.DOMElement;
import org.dom4j.io.SAXReader;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
import org.xmpp.packet.Message;
import org.xmpp.packet.Packet;
import org.xmpp.packet.PacketError;
import org.xmpp.resultsetmanagement.ResultSet;
public class ItemDelete extends PubSubElementProcessorAbstract {
private static final Logger LOGGER = Logger.getLogger(ItemDelete.class);
private GlobalItemID itemId;
private NodeItem nodeItem;
private Element parsedPayload;
public ItemDelete(BlockingQueue<Packet> outQueue, ChannelManager channelManager) {
this.setOutQueue(outQueue);
this.setChannelManager(channelManager);
acceptedElementName = "retract";
}
@Override
public void process(Element elm, JID actor, IQ reqIQ, Element rsm) throws InterruptedException, NodeStoreException {
element = elm;
request = reqIQ;
response = IQ.createResultIQ(request);
node = element.attributeValue(XMLConstants.NODE_ATTR);
this.actor = actor;
if (null == this.actor) {
this.actor = request.getFrom();
}
if (!nodePresent()) {
outQueue.put(response);
return;
}
if (!Configuration.getInstance().isLocalNode(node)) {
makeRemoteRequest();
return;
}
try {
if (!checkNodeExists() || !itemIdProvided() || !itemExists() || !validPayload() || !canDelete()) {
outQueue.put(response);
return;
}
deleteItem();
outQueue.put(response);
deleteReplies();
sendNotifications(node, itemId);
return;
} catch (NodeStoreException e) {
LOGGER.error(e);
setErrorCondition(PacketError.Type.wait, PacketError.Condition.internal_server_error);
} catch (NullPointerException e) {
LOGGER.error(e);
setErrorCondition(PacketError.Type.modify, PacketError.Condition.bad_request);
} catch (IllegalArgumentException e) {
LOGGER.error(e);
setErrorCondition(PacketError.Type.modify, PacketError.Condition.bad_request);
}
outQueue.put(response);
}
private void deleteReplies() throws NodeStoreException {
if (null != nodeItem.getInReplyTo()) {
return;
}
ClosableIteratorImpl<NodeItem> replies = channelManager.getNodeItemReplies(node, itemId.getItemID(), null, -1);
NodeItem reply = null;
while (replies.hasNext()) {
reply = replies.next();
channelManager.deleteNodeItemById(reply.getNodeId(), reply.getId());
sendNotifications(node, new GlobalItemIDImpl(new JID(this.getServerDomain()), reply.getNodeId(), reply.getId()));
}
}
private void sendNotifications(String node, GlobalItemID itemId) throws NodeStoreException {
try {
String notify =
request.getElement().element(XMLConstants.PUBSUB_ELEM).element(XMLConstants.RETRACT_ELEM).attributeValue(XMLConstants.NOTIFY_ATTR);
if ((notify != null) && (Boolean.FALSE.toString().equals(notify) || notify.equals("0"))) {
return;
}
ResultSet<NodeSubscription> subscriptions = channelManager.getNodeSubscriptionListeners(node);
Message notification = getNotificationMessage(node, itemId);
for (NodeSubscription subscription : subscriptions) {
LOGGER.debug("Subscription [node: " + subscription.getNodeId() + ", listener: " + subscription.getListener() + ", subscription: "
+ subscription.getSubscription() + "]");
if (subscription.getSubscription().equals(Subscriptions.subscribed)) {
notification.setTo(subscription.getListener());
outQueue.put(notification.createCopy());
}
}
Collection<JID> admins = getAdminUsers();
for (JID admin : admins) {
notification.setTo(admin);
outQueue.put(notification.createCopy());
}
} catch (NullPointerException e) {
LOGGER.error(e);
return;
} catch (InterruptedException e) {
LOGGER.error(e);
return;
}
}
private Message getNotificationMessage(String node, GlobalItemID itemId) {
Message notification = new Message();
notification.setType(Message.Type.headline);
notification.getElement().addAttribute("remote-server-discover", "false");
Element event = notification.addChildElement("event", JabberPubsub.NS_PUBSUB_EVENT);
Element items = event.addElement("items");
items.addAttribute("node", node);
Element retract = items.addElement("retract");
retract.addAttribute("id", itemId.getItemID());
return notification;
}
private void deleteItem() throws NodeStoreException {
channelManager.deleteNodeItemById(node, itemId.getItemID());
}
private boolean canDelete() throws NodeStoreException {
if (!userOwnsItem() && !userManagesNode()) {
setErrorCondition(PacketError.Type.auth, PacketError.Condition.forbidden);
return false;
}
return true;
}
private boolean userOwnsItem() {
try {
return parsedPayload.element("author").elementText("name").equals(actor.toBareJID());
} catch (NullPointerException e) {
return false;
}
}
private boolean userManagesNode() throws NodeStoreException {
return channelManager.getNodeMembership(node, actor).getAffiliation().canAuthorize();
}
private boolean validPayload() {
try {
SAXReader xmlReader = new SAXReader();
xmlReader.setMergeAdjacentText(true);
xmlReader.setStringInternEnabled(true);
xmlReader.setStripWhitespaceText(true);
parsedPayload = xmlReader.read(new StringReader(nodeItem.getPayload())).getRootElement();
return true;
} catch (Exception e) {
LOGGER.error(e);
setErrorCondition(PacketError.Type.wait, PacketError.Condition.internal_server_error);
return false;
}
}
private boolean itemExists() throws NodeStoreException {
nodeItem = channelManager.getNodeItem(node, itemId.getItemID());
if (nodeItem != null) {
return true;
}
setErrorCondition(PacketError.Type.cancel, PacketError.Condition.item_not_found);
return false;
}
private boolean itemIdProvided() {
String id =
request.getElement().element(XMLConstants.PUBSUB_ELEM).element(XMLConstants.RETRACT_ELEM).element(XMLConstants.ITEM_ELEM)
.attributeValue(XMLConstants.ID_ATTR);
if ((id != null) && !id.isEmpty()) {
if (GlobalItemIDImpl.isGlobalId(id)) {
itemId = GlobalItemIDImpl.fromBuddycloudString(id);
} else {
itemId = new GlobalItemIDImpl(new JID(this.getServerDomain()), node, id);
}
return true;
}
response.setType(IQ.Type.error);
Element nodeIdRequired = new DOMElement("item-required", new Namespace("", JabberPubsub.NS_PUBSUB_ERROR));
Element badRequest = new DOMElement(PacketError.Condition.bad_request.toXMPP(), new Namespace("", JabberPubsub.NS_XMPP_STANZAS));
Element error = new DOMElement("error");
error.addAttribute("type", PacketError.Type.modify.toXMPP());
error.add(badRequest);
error.add(nodeIdRequired);
response.setChildElement(error);
return false;
}
}
|
|
/**
* Copyright 2010 - 2013 CosmoCode GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.cosmocode.commons.reflect;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.ComputationException;
import com.google.common.collect.MapMaker;
import com.google.common.collect.Ordering;
import de.cosmocode.commons.Strings;
import de.cosmocode.commons.validation.Rule;
import java.io.File;
import java.lang.annotation.Annotation;
import java.lang.reflect.Modifier;
import java.util.Comparator;
import java.util.concurrent.ConcurrentMap;
/**
* Static utility class for {@link Class}es, {@link Classpath}s and {@link Packages}.
*
* @since 1.8
* @author Willi Schoenborn
*/
public final class Reflection {
private static final ConcurrentMap<String, Class<?>> CACHE = new MapMaker().
softValues().makeComputingMap(Reflection.forName());
private static final Ordering<Class<?>> ORDER_BY_NAME = Ordering.natural().onResultOf(Reflection.getName());
private static final Ordering<Class<?>> ORDER_BY_HIERARCHY = Reflection.orderByHierarchy(Reflection.orderByName());
private Reflection() {
}
/**
* Returns the Class object associated with the class or interface with the given string name.
* This method does, in contrast to {@link Class#forName(String)}, cache the results.
*
* @since 1.6
* @param name the class name
* @return the loaded class
* @throws ClassNotFoundException if the class does not exist
*/
public static Class<?> forName(String name) throws ClassNotFoundException {
try {
return CACHE.get(name);
} catch (ComputationException e) {
throw new ClassNotFoundException(e.getMessage(), e);
}
}
/**
* Returns a function which loads {@link Class}es by their name using {@link Class#forName(String)}.
*
* <p>
* The returned function will wrap {@link ClassNotFoundException}s inside {@link IllegalArgumentException}s.
* </p>
*
* @since 1.9
* @return a function loading classes
*/
public static Function<String, Class<?>> forName() {
return ForName.INSTANCE;
}
/**
* Returns a function which transforms {@link Class}es into Strings by using
* {@link Class#getName()}.
*
* @since 1.9
* @return a function using getName() to produce Strings
*/
public static Function<Class<?>, String> getName() {
return GetName.INSTANCE;
}
/**
* Returns a function which transforms {@link Class}es into Strings by using
* {@link Class#getSimpleName()}.
*
* @since 1.9
* @return a function using getSimpleName() to produce Strings
*/
public static Function<Class<?>, String> getSimpleName() {
return GetSimpleName.INSTANCE;
}
/**
* Returns a function which transforms {@link Class}es into {@link Class}es by using
* {@link Class#getSuperclass()}.
*
* @since 1.10
* @return a function using getSuperclass() to produce Classes
*/
public static Function<Class<?>, Class<?>> getSuperClass() {
return GetSuperClass.INSTANCE;
}
/**
* Returns a function which transforms {@link Class}es into an array of {@link Class}es by using
* {@link Class#getInterfaces()}.
*
* @since 1.11
* @return a function using getInterfaces() to produce an array of Classes
*/
public static Function<Class<?>, Class<?>[]> getInterfaces() {
return GetInterfaces.INSTANCE;
}
/**
* Returns a function which returns an iterable over all interfaces the given
* input implements either explicitly or implicitly.
*
* <p>
* Note: The iterable provided by the returned function may return
* the same interface twice.
* </p>
*
* @since 1.11
* @return a function using getInterfaces() to produce an array of Classes
*/
public static Function<Class<?>, Iterable<Class<?>>> getAllInterfaces() {
return GetAllInterfaces.INSTANCE;
}
/**
* Returns an iterable over all interfaces the given
* input implements either explicitly or implicitly.
*
* <p>
* Note: The returned iterable may return the same interface twice.
* </p>
*
* @since 1.11
* @param type the type being inspected
* @return an iterable over all interfaces of the given type
* @throws NullPointerException if type is null
*/
public static Iterable<Class<?>> getAllInterfaces(Class<?> type) {
Preconditions.checkNotNull(type, "Type");
return getAllInterfaces().apply(type);
}
/**
* Returns a function which returns an iterable over all super type the given
* input extends or implements either explicitly or implicitly in level
* order. Super classes are returned before interfaces.
*
* <p>
* Note: The iterable provided by the returned function may return
* the same type multiple times.
* </p>
*
* @since 1.12
* @return a function using getInterfaces() to produce an array of Classes
*/
public static Function<Class<?>, Iterable<Class<?>>> getAllSuperTypes() {
return GetAllSuperTypes.INSTANCE;
}
/**
* Returns an iterable over all super types the given
* input implements either explicitly or implicitly in level
* order. Super classes are returned before interfaces.
*
* <p>
* Note: The returned iterable may return the same type twice.
* </p>
*
* @since 1.12
* @param type the type being inspected
* @return an iterable over all interfaces of the given type
* @throws NullPointerException if type is null
*/
public static Iterable<Class<?>> getAllSuperTypes(Class<?> type) {
Preconditions.checkNotNull(type, "Type");
return getAllSuperTypes().apply(type);
}
/**
* Returns a function which casts class literals into subclass literals
* using {@link Class#asSubclass(Class)}.
*
* @since 1.8
* @param <T> the generic class type
* @param type the super class type
* @return a function casting class literals to sub class literals
* @throws NullPointerException if type is null
*/
public static <T> Function<Class<?>, Class<? extends T>> asSubclass(Class<T> type) {
return new AsSubClass<T>(type);
}
/**
* Returns a predicate which delegates to {@link Class#isEnum()}.
*
* @since 1.8
* @return a predicate which matches enum types
*/
public static Rule<Class<?>> isEnum() {
return IsEnum.INSTANCE;
}
/**
* Returns a predicate which delegates to {@link Class#isAnnotation()}.
*
* @since 1.8
* @return a predicate which matches annotation types
*/
public static Rule<Class<?>> isAnnotation() {
return IsAnnotation.INSTANCE;
}
/**
* Returns a predicate which delegates to {@link Class#isArray()}.
*
* @since 1.8
* @return a predicate which matches array types
*/
public static Rule<Class<?>> isArray() {
return IsArray.INSTANCE;
}
/**
* Returns a predicate which delegates to {@link Modifier#isAbstract(int)}.
*
* @since 1.8
* @return a predicate which matches abstract classes
*/
public static Rule<Class<?>> isAbstract() {
return IsAbstract.INSTANCE;
}
/**
* Returns a predicate which delegates to {@link Class#isInterface()}.
*
* @since 1.8
* @return a predicate which matches interfaces
*/
public static Rule<Class<?>> isInterface() {
return IsInterface.INSTANCE;
}
/**
* Returns a predicate which returns true if the given input
* is neither an interface, abstract, an enum or an array.
*
* @since 1.17
* @return a predice which matches concrete classes
*/
public static Rule<Class<?>> isConcreteClass() {
return IsConcreteClass.INSTANCE;
}
/**
* Returns a predicate which matches super types of the specified
* type.
*
* @since 1.8
* @param type the sub type
* @return a predicate which matches super types of the given type
* @throws NullPointerException if type is null
*/
public static Rule<Class<?>> isSupertypeOf(Class<?> type) {
return new IsSuperTypeOf(type);
}
/**
* Returns a predicate which matches sub types of the specified
* type.
*
* @since 1.8
* @param type the super type
* @return a predicate which matches sub types of the given type
* @throws NullPointerException if type is null
*/
public static Rule<Class<?>> isSubtypeOf(Class<?> type) {
return new IsSubTypeOf(type);
}
/**
* Returns a predicate which delegates to {@link Class#isAnnotationPresent(Class)}.
*
* @since 1.8
* @param annotation the annotation
* @return a predicate matching classes annotated with the specified annotation
* @throws NullPointerException if annotation is null
*/
public static Rule<Class<?>> isAnnotationPresent(Class<? extends Annotation> annotation) {
return new IsAnnotationPresent(annotation);
}
/**
* Returns an {@link Ordering} which uses string comparision of {@link Class#getName()}.
*
* @since 1.9
* @return an ordering for sorting by name
*/
public static Ordering<Class<?>> orderByName() {
return Reflection.ORDER_BY_NAME;
}
/**
* Returns an {@link Ordering} which uses the relation between classes to compare them.
* Two classes that are equals according to {@link Object#equals(Object)} are considered
* equals by the returned comparator. Sub types are considered less than super classes.
* {@link Double}, {@link Long} and {@link Integer} e.g. are considered less than
* {@link Number}. Two classes that are not related regarding inheritence are compared using
* {@link Reflection#orderByName()}.
*
* @since 1.9
* @return an ordering which sorts classes by hierarchy
*/
public static Ordering<Class<?>> orderByHierarchy() {
return Reflection.ORDER_BY_HIERARCHY;
}
/**
* Returns an {@link Ordering} which uses the relation between classes to compare them.
* Two classes that are equals according to {@link Object#equals(Object)} are considered
* equals by the returned comparator. Sub types are considered less than super classes.
* {@link Double}, {@link Long} and {@link Integer} e.g. are considered less than
* {@link Number}. Two classes that are not related regarding inheritence are compared using
* the given comparator
*
* @since 1.9
* @param comparator the comparator which is used in case of a tie
* @return an ordering which sorts classes by hierarchy
* @throws NullPointerException if comparator is null
*/
public static Ordering<Class<?>> orderByHierarchy(Comparator<Class<?>> comparator) {
return new HierarchyOrdering(comparator);
}
/**
* Creates a new {@link Classpath} using the classpath property
* of this virtual machine.
*
* @since 1.8
* @return a {@link Classpath} backed by the classpath of this virtual machine
*/
public static Classpath defaultClasspath() {
final String classpath = System.getProperty("java.class.path");
return Reflection.classpathOf(Strings.defaultIfBlank(classpath, ""));
}
/**
* Creates a {@link Classpath} using the specified classpath value.
*
* @since 1.8
* @param classpath the backing classpath value ({@link File#pathSeparator} separated)
* @return a {@link Classpath} backed by the specified classpath
* @throws NullPointerException if classpath is null
*/
public static Classpath classpathOf(String classpath) {
Preconditions.checkNotNull(classpath, "Classpath");
return new DefaultClasspath(classpath);
}
}
|
|
package com.saerasoft.caesium;
import android.app.Activity;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.preference.PreferenceManager;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.util.Objects;
/**
* Created by lymphatus on 08/10/15.
*/
public class ImageCompressAsyncTask extends AsyncTask<Object, Integer, Long> {
private Context mContext;
private NotificationManager notificationManager;
private NotificationCompat.Builder nBuilder;
private Intent killerIntent;
public static int COMPRESS_NOTIFICATION_ID = 0;
static {
System.loadLibrary("caesium");
System.loadLibrary("jpeg");
}
public ImageCompressAsyncTask(Context context) {
this.mContext = context;
}
protected Long doInBackground(Object... objs) {
//Parse passed objects
CHeaderCollection mCollection = (CHeaderCollection) objs[0];
SQLiteDatabase db = (SQLiteDatabase) objs[1];
int max_count = (int) objs[2];
//Initialize a global counter
int n = 0;
//Initialize the compressed size counter
long size = 0;
//Initialize the in files size
long inFilesSize = 0;
//Utility variable to keep track of headers index
int currentHeaderIndex = 0;
//Get the starting time; we will use as performance meter and for hitting images
Long startTimestamp = System.currentTimeMillis();
//Get quality and exif from preferences
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(mContext);
int quality = Integer.valueOf(prefs.getString(SettingsActivity.KEY_COMPRESSION_LEVEL, "65"));
int exif = (prefs.getBoolean(SettingsActivity.KEY_COMPRESSION_EXIF, true)) ? 1 : 0;
//Scan each header
for (int i = 0; i < mCollection.getHeaders().size(); i++) {
CHeader header = mCollection.getHeaders().get(i);
if (header.isSelected()) {
int header_count = header.getCount();
//And each image
for (int j = 0; j < header_count; j++) {
CImage image = header.getImages().get(j);
long inSize = image.getSize();
//How much size we've eaten until now
//TODO BUG This does not give you 0 at the end
inFilesSize += inSize;
//Check for possible memory leaks
//TODO Not necessary anymore
if (fitsInMemory(image.getPath())) {
//Keep trace of the input file size
//Start the actual compression process
Log.d("CompressTask", "PROCESSING: " + image.getPath());
Log.d("CompressTask", "In size: " + image.getSize());
//If it's a JPEG, go for the turbo lib
try {
switch (image.getMimeType()) {
case "image/jpeg":
//TODO We get crashes using the lib with standard compression
//Use the Android lib instead for now
if (quality == 0) {
CompressRoutine(image.getPath(), exif, quality);
} else {
Bitmap bitmap = BitmapFactory.decodeFile(image.getPath());
FileOutputStream fos = new FileOutputStream(image.getPath(), false);
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, fos);
bitmap.recycle();
}
//CompressRoutine(image.getPath(), exif, quality);
break;
case "image/png":
//PNG section
Bitmap bitmap = BitmapFactory.decodeFile(image.getPath());
FileOutputStream fos = new FileOutputStream(image.getPath(), false);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos);
bitmap.recycle();
break;
default:
//TODO Webp?
Log.e("CompressTask", "Cannot compress this kind of image.");
break;
}
} catch (FileNotFoundException e) {
Log.e("CompressTask", "File not found.");
} catch (NullPointerException e) {
Log.e("CompressTask", "Null pointer");
} catch (OutOfMemoryError e) {
Log.e("CompressTask", "OutOfMemory");
}
//Get the out file for its stats
File outFile = new File(image.getPath());
if (outFile.length() != 0) {
size += outFile.length();
} else {
size += inSize;
}
Log.d("CompressTask", "Out size: " + new File(image.getPath()).length());
/*
*
* TODO Insert the image into the database, because we are compressing it
* At this stage, we already have a filtered list, but we need to check if we
* compressed an edited image, so, instead of adding, we need to update the entry
*
*/
//DatabaseHelper.insertImageIntoDatabase(db, image);
//DatabaseHelper.hitImageRow(db, image.getPath(), startTimestamp);
DatabaseHelper.databaseRoutine(db, image, true);
publishProgress(n++, max_count, i);
// Escape early if cancel() is called
if (isCancelled()) {
return size;
}
}
}
}
}
return size;
}
@Override
protected void onPreExecute() {
//Build a killer service to destroy the notification if the app is swiped out
killerIntent = new Intent(mContext, NotificationKillerService.class);
mContext.startService(killerIntent);
//Click intent
PendingIntent contentIntent = PendingIntent.getActivity(mContext, 0,
new Intent(mContext, MainActivity.class), PendingIntent.FLAG_UPDATE_CURRENT);
//Build up the notification
notificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE);
nBuilder = new NotificationCompat.Builder(mContext);
nBuilder.setContentTitle(mContext.getString(R.string.notification_compress_title))
.setContentText(mContext.getString(R.string.notification_compress_description))
.setSmallIcon(R.mipmap.ic_launcher)
.setOngoing(true)
.setAutoCancel(true)
.setContentIntent(contentIntent);
//Change the button icon
MainActivityFragment.onPreCompress();
}
@Override
protected void onProgressUpdate(Integer... progress) {
// Sets the progress indicator to a max value, the
// current completion percentage, and "determinate"
// state
nBuilder.setProgress(progress[1], progress[0], false);
// Displays the progress bar for the first time.
notificationManager.notify(0, nBuilder.build());
MainActivityFragment.onCompressProgress(
progress[0], //Current progress
progress[1], //Max value
progress[2]); //If the header is still on progress
}
@Override
protected void onPostExecute(Long result) {
//Kill the service
mContext.stopService(killerIntent);
// When the loop is finished, updates the notification
nBuilder.setContentText(mContext.getString(R.string.notification_compress_description_finished))
// Removes the progress bar
.setProgress(0, 0, false);
notificationManager.notify(COMPRESS_NOTIFICATION_ID, nBuilder.build());
//Update UI after compression
MainActivityFragment.onPostCompress(result);
//Kill the notification
notificationManager.cancel(COMPRESS_NOTIFICATION_ID);
}
@Override
protected void onCancelled(Long size) {
//Kill the notification
notificationManager.cancel(COMPRESS_NOTIFICATION_ID);
MainActivityFragment.onPostCompress(size);
}
//JNI Methods
public boolean fitsInMemory(String path) {
//Setup the options for image reading
BitmapFactory.Options options = new BitmapFactory.Options();
//Do not decode the entire image, just what we need
options.inJustDecodeBounds = true;
//Try do decode the file
BitmapFactory.decodeFile(path, options);
//Set all the remaining attributes
//TODO Build a method to know if the image is too big
int width = options.outWidth;
int height = options.outHeight;
return true;
}
public native void CompressRoutine(String in, int exif, int quality);
}
|
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.ToolExtensionPoints;
import com.intellij.analysis.AnalysisScope;
import com.intellij.codeInspection.GlobalInspectionTool;
import com.intellij.codeInspection.InspectionEP;
import com.intellij.codeInspection.InspectionManager;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspection;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspectionBase;
import com.intellij.codeInspection.deadCode.UnusedDeclarationPresentation;
import com.intellij.codeInspection.ex.*;
import com.intellij.codeInspection.reference.EntryPoint;
import com.intellij.codeInspection.reference.RefElement;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.roots.ContentEntry;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiManager;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.testFramework.fixtures.DefaultLightProjectDescriptor;
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase;
import com.intellij.testFramework.fixtures.impl.GlobalInspectionContextForTests;
import com.intellij.util.containers.ContainerUtil;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author max
*/
@SuppressWarnings("HardCodedStringLiteral")
public abstract class InspectionTestCase extends LightCodeInsightFixtureTestCase {
private static final boolean MIGRATE_TEST = false;
private static final DefaultLightProjectDescriptor ourDescriptor = new DefaultLightProjectDescriptor() {
@Override
public void configureModule(@NotNull Module module, @NotNull ModifiableRootModel model, @NotNull ContentEntry contentEntry) {
super.configureModule(module, model, contentEntry);
contentEntry.addSourceFolder(contentEntry.getUrl() + "/ext_src", false);
contentEntry.addSourceFolder(contentEntry.getUrl() + "/test_src", true);
}
};
private EntryPoint myUnusedCodeExtension;
private VirtualFile ext_src;
private LightTestMigration myMigration;
public static GlobalInspectionToolWrapper getUnusedDeclarationWrapper() {
InspectionEP ep = new InspectionEP();
ep.presentation = UnusedDeclarationPresentation.class.getName();
ep.implementationClass = UnusedDeclarationInspection.class.getName();
ep.shortName = UnusedDeclarationInspectionBase.SHORT_NAME;
ep.displayName = UnusedDeclarationInspectionBase.DISPLAY_NAME;
return new GlobalInspectionToolWrapper(ep);
}
public InspectionManagerEx getManager() {
return (InspectionManagerEx)InspectionManager.getInstance(getProject());
}
public void doTest(@NonNls @NotNull String folderName, @NotNull LocalInspectionTool tool) {
doTest(folderName, new LocalInspectionToolWrapper(tool));
}
public void doTest(@NonNls @NotNull String folderName, @NotNull GlobalInspectionTool tool) {
doTest(folderName, new GlobalInspectionToolWrapper(tool));
}
public void doTest(@NonNls @NotNull String folderName, @NotNull GlobalInspectionTool tool, boolean checkRange) {
doTest(folderName, new GlobalInspectionToolWrapper(tool), checkRange);
}
public void doTest(@NonNls @NotNull String folderName, @NotNull GlobalInspectionTool tool, boolean checkRange, boolean runDeadCodeFirst) {
doTest(folderName, new GlobalInspectionToolWrapper(tool), checkRange, runDeadCodeFirst);
}
public void doTest(@NonNls @NotNull String folderName, @NotNull InspectionToolWrapper tool) {
doTest(folderName, tool, false);
}
public void doTest(@NonNls @NotNull String folderName,
@NotNull InspectionToolWrapper tool,
boolean checkRange) {
doTest(folderName, tool, checkRange, false);
}
public void doTest(@NonNls @NotNull String folderName,
@NotNull InspectionToolWrapper toolWrapper,
boolean checkRange,
boolean runDeadCodeFirst,
@NotNull InspectionToolWrapper... additional) {
final String testDir = getTestDataPath() + "/" + folderName;
final List<InspectionToolWrapper<?, ?>> tools = getTools(runDeadCodeFirst, toolWrapper, additional);
GlobalInspectionContextImpl context = runTool(folderName, toolWrapper, tools);
InspectionTestUtil.compareToolResults(context, checkRange, testDir, ContainerUtil.append(Collections.singletonList(toolWrapper), additional));
if (MIGRATE_TEST) {
myMigration = new LightTestMigration(getTestName(false), getClass(), testDir, tools);
}
}
protected GlobalInspectionContextImpl runTool(@NotNull final String testName,
@NotNull InspectionToolWrapper toolWrapper,
List<? extends InspectionToolWrapper<?, ?>> tools) {
VirtualFile projectDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(getTestDataPath(), testName));
assertNotNull(projectDir);
VirtualFile srcDir;
if (projectDir.findChild("src") != null) {
srcDir = myFixture.copyDirectoryToProject(testName + "/src", "");
}
else {
srcDir = myFixture.copyDirectoryToProject(testName,"");
}
if (projectDir.findChild("ext_src") != null) {
ext_src = myFixture.copyDirectoryToProject(testName + "/ext_src", "ext_src");
}
if (projectDir.findChild("test_src") != null) {
myFixture.copyDirectoryToProject(testName + "/test_src", "test_src");
}
AnalysisScope scope = createAnalysisScope(srcDir);
GlobalInspectionContextForTests globalContext = InspectionsKt.createGlobalContextForTool(scope, getProject(), tools);
InspectionTestUtil.runTool(toolWrapper, scope, globalContext);
return globalContext;
}
@NotNull
private static List<InspectionToolWrapper<?, ?>> getTools(boolean runDeadCodeFirst,
@NotNull InspectionToolWrapper toolWrapper,
@NotNull InspectionToolWrapper[] additional) {
List<InspectionToolWrapper<?, ?>> toolWrappers = new ArrayList<>();
if (runDeadCodeFirst) {
toolWrappers.add(getUnusedDeclarationWrapper());
}
toolWrappers.add(toolWrapper);
ContainerUtil.addAll(toolWrappers, additional);
return toolWrappers;
}
@NotNull
protected AnalysisScope createAnalysisScope(VirtualFile sourceDir) {
PsiManager psiManager = PsiManager.getInstance(getProject());
return new AnalysisScope(psiManager.findDirectory(sourceDir));
}
@NotNull
@Override
protected LightProjectDescriptor getProjectDescriptor() {
return ourDescriptor;
}
@Override
protected void setUp() throws Exception {
super.setUp();
myUnusedCodeExtension = new EntryPoint() {
@NotNull
@Override
public String getDisplayName() {
return "duh";
}
@Override
public boolean isEntryPoint(@NotNull RefElement refElement, @NotNull PsiElement psiElement) {
return isEntryPoint(psiElement);
}
@Override
public boolean isEntryPoint(@NotNull PsiElement psiElement) {
return ext_src != null && VfsUtilCore.isAncestor(ext_src, PsiUtilCore.getVirtualFile(psiElement), false);
}
@Override
public boolean isSelected() {
return true;
}
@Override
public void setSelected(boolean selected) {
}
@Override
public void readExternal(Element element) {
}
@Override
public void writeExternal(Element element) {
}
};
Extensions.getRootArea().<EntryPoint>getExtensionPoint(ToolExtensionPoints.DEAD_CODE_TOOL)
.registerExtension(myUnusedCodeExtension, getTestRootDisposable());
}
@Override
protected void tearDown() throws Exception {
try {
myUnusedCodeExtension = null;
ext_src = null;
}
catch (Throwable e) {
addSuppressedException(e);
}
finally {
super.tearDown();
}
if (myMigration != null) {
myMigration.tryMigrate();
}
}
@Override
@NonNls
protected String getTestDataPath() {
return PathManagerEx.getTestDataPath() + "/inspection/";
}
}
|
|
package com.devsaki.redsaki.adapter;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.res.ColorStateList;
import android.net.Uri;
import android.os.AsyncTask;
import android.support.customtabs.CustomTabsIntent;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.RecyclerView;
import android.text.Html;
import android.text.format.DateUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.URLUtil;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.devsaki.redsaki.R;
import com.devsaki.redsaki.data.PostColumns;
import com.devsaki.redsaki.data.RedSakiProvider;
import com.devsaki.redsaki.data.SubredditColumns;
import com.devsaki.redsaki.dto.PostDTO;
import com.devsaki.redsaki.util.CommonMethods;
import com.devsaki.redsaki.util.HttpCallUtil;
import org.json.JSONException;
import java.io.IOException;
import java.util.List;
/**
* Created by DevSaki on 16/10/2016.
*/
public class PostAdapter extends RecyclerView.Adapter<PostAdapter.ViewHolder> {
private List<PostDTO> items;
private Context context;
public PostAdapter(List<PostDTO> items, Context context) {
this.items = items;
this.context = context;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = LayoutInflater.from(parent.getContext()).inflate(R.layout.card_item_post, parent, false);
return new ViewHolder(v);
}
@Override
public void onBindViewHolder(final ViewHolder holder, int position) {
final PostDTO item = items.get(position);
String title = "<strong><font color=\"#006aba\">@title</font></strong> <small>(@domain)</small>".replace("@title", item.getTitle()).replace("@domain", item.getDomain());
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
holder.tvTitle.setText(Html.fromHtml(title, Html.FROM_HTML_MODE_LEGACY));
} else {
holder.tvTitle.setText(Html.fromHtml(title));
}
if (item.getThumbnail() != null && URLUtil.isValidUrl(item.getThumbnail())) {
Glide.with(context).load(item.getThumbnail()).into(holder.ivImage);
holder.ivImage.setVisibility(View.VISIBLE);
} else
holder.ivImage.setVisibility(View.GONE);
CharSequence time = DateUtils.getRelativeTimeSpanString(item.getCreated() * 1000l, System.currentTimeMillis(), DateUtils.MINUTE_IN_MILLIS);
String data = "<small><font color=\"#808080\">submitted <b>@time</b> by <b>@author</b> to <b>@subreddit</b></font></small>"
.replace("@author", item.getAuthor()).replace("@time", time).replace("@subreddit", item.getSubreddit());
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
holder.tvData.setText(Html.fromHtml(data, Html.FROM_HTML_MODE_LEGACY));
} else {
holder.tvData.setText(Html.fromHtml(data));
}
int color = ContextCompat.getColor(context, R.color.colorAccent);
if (item.isSaved()) {
holder.btnSave.setColorFilter(color);
}
if (item.isUp()) {
holder.btnUp.setColorFilter(color);
}
if (item.isDown()) {
holder.btnDown.setColorFilter(color);
}
holder.llContent.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CustomTabsIntent.Builder builder = new CustomTabsIntent.Builder();
CustomTabsIntent customTabsIntent = builder.build();
customTabsIntent.launchUrl((Activity) context, Uri.parse(item.getUrl()));
}
});
holder.btnShare.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
sendIntent.putExtra(Intent.EXTRA_SUBJECT, item.getTitle());
sendIntent.putExtra(Intent.EXTRA_TEXT, item.getUrl());
sendIntent.setType("text/plain");
context.startActivity(Intent.createChooser(sendIntent, "Share link!"));
}
});
holder.btnComments.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CustomTabsIntent.Builder builder = new CustomTabsIntent.Builder();
CustomTabsIntent customTabsIntent = builder.build();
customTabsIntent.launchUrl((Activity) context, Uri.parse("https://www.reddit.com" + item.getPermanlink()));
}
});
holder.btnSave.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (CommonMethods.checkLogin(context)) {
new PostAsyncTask((Activity) context, item, PostAsyncTask.Action.SAVE, holder.btnSave, null).execute();
}
}
});
holder.btnUp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (CommonMethods.checkLogin(context)) {
new PostAsyncTask((Activity) context, item, PostAsyncTask.Action.UP, holder.btnUp, holder.btnDown).execute();
}
}
});
holder.btnDown.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (CommonMethods.checkLogin(context)) {
new PostAsyncTask((Activity) context, item, PostAsyncTask.Action.DOWN, holder.btnDown, holder.btnUp).execute();
}
}
});
holder.itemView.setTag(item);
}
@Override
public int getItemCount() {
return items.size();
}
public static class ViewHolder extends RecyclerView.ViewHolder {
public ImageView ivImage;
public TextView tvTitle;
public TextView tvData;
public ImageButton btnUp;
public ImageButton btnDown;
public ImageButton btnShare;
public ImageButton btnComments;
public ImageButton btnSave;
public LinearLayout llContent;
public ViewHolder(View itemView) {
super(itemView);
ivImage = (ImageView) itemView.findViewById(R.id.ivImage);
tvTitle = (TextView) itemView.findViewById(R.id.tvTitle);
tvData = (TextView) itemView.findViewById(R.id.tvData);
btnUp = (ImageButton) itemView.findViewById(R.id.btnUp);
btnDown = (ImageButton) itemView.findViewById(R.id.btnDown);
btnShare = (ImageButton) itemView.findViewById(R.id.btnShare);
btnComments = (ImageButton) itemView.findViewById(R.id.btnComments);
btnSave = (ImageButton) itemView.findViewById(R.id.btnSave);
llContent = (LinearLayout) itemView.findViewById(R.id.llContent);
}
}
public static class PostAsyncTask extends AsyncTask<Void, Void, Boolean> {
private Activity activity;
private PostDTO item;
private Action action;
private ImageButton imageButton;
private ImageButton imageButton2;
enum Action {
SAVE, UP, DOWN
}
public PostAsyncTask(Activity activity, PostDTO item, Action action, ImageButton imageButton, ImageButton imageButton2) {
this.activity = activity;
this.item = item;
this.action = action;
this.imageButton = imageButton;
this.imageButton2 = imageButton2;
}
@Override
protected Boolean doInBackground(Void... params) {
try {
if (action == Action.SAVE) {
HttpCallUtil.savedPost(activity, item.getId(), !item.isSaved());
item.setSaved(!item.isSaved());
return item.isSaved();
} else if (action == Action.UP) {
HttpCallUtil.votePost(activity, item.getId(), item.isUp() ? 0 : 1);
item.setUp(!item.isUp());
item.setDown(false);
return item.isUp();
} else if (action == Action.DOWN) {
HttpCallUtil.votePost(activity, item.getId(), item.isDown() ? 0 : -1);
item.setUp(false);
item.setDown(!item.isDown());
return item.isDown();
}
} catch (IOException | JSONException e) {
Log.e("PostAdapter", "btnSave", e);
}
return null;
}
@Override
protected void onPostExecute(Boolean aBoolean) {
if(aBoolean!=null){
int color = ContextCompat.getColor(activity, R.color.colorAccent);
if (aBoolean) {
imageButton.setColorFilter(color);
if(action==Action.DOWN || action==Action.UP){
imageButton2.setColorFilter(null);
}
} else {
imageButton.setColorFilter(null);
}
ContentValues contentValues = new ContentValues();
contentValues.put(PostColumns.UP, item.isUp()?1:0);
contentValues.put(PostColumns.DOWN, item.isDown()?1:0);
contentValues.put(PostColumns.SAVED, item.isSaved()?1:0);
activity.getContentResolver().update(RedSakiProvider.Posts.CONTENT_URI, contentValues, "ID=?", new String[]{item.getId()});
}
}
}
}
|
|
package com.platypii.baseline.lasers.rangefinder;
import com.platypii.baseline.util.Analytics;
import com.platypii.baseline.util.Exceptions;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothProfile;
import android.bluetooth.le.BluetoothLeScanner;
import android.bluetooth.le.ScanCallback;
import android.bluetooth.le.ScanFilter;
import android.bluetooth.le.ScanRecord;
import android.bluetooth.le.ScanResult;
import android.bluetooth.le.ScanSettings;
import android.content.Context;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import java.util.Collections;
import java.util.List;
import static com.platypii.baseline.bluetooth.BluetoothState.BT_CONNECTED;
import static com.platypii.baseline.bluetooth.BluetoothState.BT_CONNECTING;
import static com.platypii.baseline.bluetooth.BluetoothState.BT_STARTING;
import static com.platypii.baseline.bluetooth.BluetoothState.BT_STOPPING;
/**
* Thread that reads from bluetooth laser rangefinder.
* Laser measurements are emitted as EventBus messages.
*/
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
class RangefinderRunnable implements Runnable {
private static final String TAG = "RangefinderRunnable";
@NonNull
private final RangefinderService service;
@NonNull
private final Context context;
@NonNull
private final BluetoothAdapter bluetoothAdapter;
@Nullable
private BluetoothGatt bluetoothGatt;
@Nullable
private BluetoothLeScanner bluetoothScanner;
@Nullable
private ScanCallback scanCallback;
@Nullable
private RangefinderProtocol protocol;
RangefinderRunnable(@NonNull RangefinderService service, @NonNull Context context, @NonNull BluetoothAdapter bluetoothAdapter) {
this.service = service;
this.context = context;
this.bluetoothAdapter = bluetoothAdapter;
}
/**
* Scan for bluetooth LE devices that look like a rangefinder
*/
@Override
public void run() {
Log.i(TAG, "Rangefinder bluetooth thread starting");
if (!bluetoothAdapter.isEnabled()) {
Log.e(TAG, "Bluetooth is not enabled");
return;
}
// Scan for rangefinders
Log.i(TAG, "Scanning for rangefinder");
service.setState(BT_STARTING);
scan();
// TODO: this whole run() is fast, probably shouldn't even be a Runnable
Log.i(TAG, "RangefinderRunnable finished");
}
private void scan() {
bluetoothScanner = bluetoothAdapter.getBluetoothLeScanner();
if (bluetoothScanner == null) {
Log.e(TAG, "Failed to get bluetooth LE scanner");
return;
}
final ScanFilter scanFilter = new ScanFilter.Builder().build();
final List<ScanFilter> scanFilters = Collections.singletonList(scanFilter);
final ScanSettings scanSettings = new ScanSettings.Builder().build();
scanCallback = new ScanCallback() {
@Override
public void onScanResult(int callbackType, @NonNull ScanResult result) {
super.onScanResult(callbackType, result);
if (service.getState() == BT_STARTING) {
final BluetoothDevice device = result.getDevice();
final ScanRecord record = result.getScanRecord();
if (ATNProtocol.isATN(device)) {
Log.i(TAG, "ATN rangefinder found, connecting to: " + device.getName());
connect(device);
protocol = new ATNProtocol(bluetoothGatt);
} else if (UineyeProtocol.isUineye(device, record)) {
Log.i(TAG, "Uineye rangefinder found, connecting to: " + device.getName());
connect(device);
protocol = new UineyeProtocol(bluetoothGatt);
} else if (SigSauerProtocol.isSigSauer(device, record)) {
Log.i(TAG, "SigSauer rangefinder found, connecting to: " + device.getName());
connect(device);
protocol = new SigSauerProtocol(bluetoothGatt);
}
}
}
};
bluetoothScanner.startScan(scanFilters, scanSettings, scanCallback);
}
private void connect(@NonNull BluetoothDevice device) {
stopScan();
service.setState(BT_CONNECTING);
// Connect to device
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { // minsdk 23
bluetoothGatt = device.connectGatt(context, true, gattCallback, BluetoothDevice.TRANSPORT_LE);
} else {
bluetoothGatt = device.connectGatt(context, true, gattCallback);
}
// Log event
final Bundle bundle = new Bundle();
bundle.putString("device_name", device.getName());
Analytics.logEvent(context, "rangefinder_found", bundle);
}
private void stopScan() {
if (service.getState() != BT_STARTING) {
Exceptions.report(new IllegalStateException("Scanner shouldn't exist in state " + service.getState()));
}
// Stop scanning
if (bluetoothScanner != null) {
bluetoothScanner.stopScan(scanCallback);
}
}
@NonNull
private final BluetoothGattCallback gattCallback = new BluetoothGattCallback() {
@Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
if (status == BluetoothGatt.GATT_SUCCESS) {
if (newState == BluetoothProfile.STATE_CONNECTED) {
Log.i(TAG, "Rangefinder connected");
// TODO: If we have connected to a device before, skip discover services and connect directly.
bluetoothGatt.discoverServices();
service.setState(BT_CONNECTED);
} else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
gatt.close();
disconnected();
} else {
// Connecting or disconnecting state
Log.i(TAG, "Rangefinder state " + newState);
}
} else {
gatt.close();
if (newState == BluetoothProfile.STATE_DISCONNECTED) {
Log.i(TAG, "Rangefinder remote disconnect");
} else {
Log.e(TAG, "Bluetooth connection state error " + status + " " + newState);
}
}
}
@Override
public void onServicesDiscovered(BluetoothGatt gatt, int status) {
if (status == BluetoothGatt.GATT_SUCCESS) {
Log.i(TAG, "Rangefinder bluetooth services discovered");
protocol.onServicesDiscovered();
} else {
Log.i(TAG, "Rangefinder service discovery failed");
}
}
@Override
public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic ch) {
if (ch.getUuid().equals(protocol.getCharacteristic())) {
protocol.processBytes(ch.getValue());
} else {
Log.i(TAG, "Rangefinder onCharacteristicChanged " + ch);
}
}
};
private void disconnected() {
Log.i(TAG, "Rangefinder disconnected");
service.setState(BT_CONNECTING);
// TODO: Scan? Or wait for auto-connect?
}
void stop() {
// Close bluetooth socket
if (bluetoothGatt != null) {
bluetoothGatt.close();
bluetoothGatt = null;
}
// Stop scanning
if (service.getState() == BT_STARTING) {
stopScan();
}
service.setState(BT_STOPPING);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.commons;
import java.util.Iterator;
import java.util.NoSuchElementException;
import javax.annotation.Nonnull;
/**
* Utility methods to parse a path.
* <p/>
* Each method validates the input, except if the system property
* {packageName}.SKIP_VALIDATION is set, in which case only minimal validation
* takes place within this function, so when the parameter is an illegal path,
* the the result of this method is undefined.
*/
public class PathUtils {
private PathUtils() {
// utility class
}
/**
* Whether the path is the root path ("/").
*
* @param path the path
* @return whether this is the root
*/
public static boolean denotesRoot(String path) {
assert isValid(path);
return denotesRootPath(path);
}
private static boolean denotesRootPath(String path) {
return "/".equals(path);
}
/**
* @param element The path segment to check for being the current element
* @return {@code true} if the specified element equals "."; {@code false} otherwise.
*/
public static boolean denotesCurrent(String element) {
return ".".equals(element);
}
/**
* @param element The path segment to check for being the parent element
* @return {@code true} if the specified element equals ".."; {@code false} otherwise.
*/
public static boolean denotesParent(String element) {
return "..".equals(element);
}
/**
* Whether the path is absolute (starts with a slash) or not.
*
* @param path the path
* @return true if it starts with a slash
*/
public static boolean isAbsolute(String path) {
assert isValid(path);
return isAbsolutePath(path);
}
private static boolean isAbsolutePath(String path) {
return !path.isEmpty() && path.charAt(0) == '/';
}
/**
* Get the parent of a path. The parent of the root path ("/") is the root
* path.
*
* @param path the path
* @return the parent path
*/
@Nonnull
public static String getParentPath(String path) {
return getAncestorPath(path, 1);
}
/**
* Get the nth ancestor of a path. The 1st ancestor is the parent path,
* 2nd ancestor the grandparent path, and so on...
* <p/>
* If nth <= 0, the path argument is returned as is.
*
* @param path the path
* @return the ancestor path
*/
@Nonnull
public static String getAncestorPath(String path, int nth) {
assert isValid(path);
if (path.isEmpty() || denotesRootPath(path)
|| nth <= 0) {
return path;
}
int end = path.length() - 1;
int pos = -1;
while (nth-- > 0) {
pos = path.lastIndexOf('/', end);
if (pos > 0) {
end = pos - 1;
} else if (pos == 0) {
return "/";
} else {
return "";
}
}
return path.substring(0, pos);
}
/**
* Get the last element of the (absolute or relative) path. The name of the
* root node ("/") and the name of the empty path ("") is the empty path.
*
* @param path the complete path
* @return the last element
*/
@Nonnull
public static String getName(String path) {
assert isValid(path);
if (path.isEmpty() || denotesRootPath(path)) {
return "";
}
int end = path.length() - 1;
int pos = path.lastIndexOf('/', end);
if (pos != -1) {
return path.substring(pos + 1, end + 1);
}
return path;
}
/**
* Calculate the number of elements in the path. The root path has zero
* elements.
*
* @param path the path
* @return the number of elements
*/
public static int getDepth(String path) {
assert isValid(path);
int count = 1, i = 0;
if (isAbsolutePath(path)) {
if (denotesRootPath(path)) {
return 0;
}
i++;
}
while (true) {
i = path.indexOf('/', i) + 1;
if (i == 0) {
return count;
}
count++;
}
}
/**
* Returns an {@code Iterable} for the path elements. The root path ("/") and the
* empty path ("") have zero elements.
*
* @param path the path
* @return an Iterable for the path elements
*/
@Nonnull
public static Iterable<String> elements(final String path) {
assert isValid(path);
final Iterator<String> it = new Iterator<String>() {
int pos = PathUtils.isAbsolute(path) ? 1 : 0;
String next;
@Override
public boolean hasNext() {
if (next == null) {
if (pos >= path.length()) {
return false;
}
int i = path.indexOf('/', pos);
if (i < 0) {
next = path.substring(pos);
pos = path.length();
} else {
next = path.substring(pos, i);
pos = i + 1;
}
}
return true;
}
@Override
public String next() {
if (hasNext()) {
String next = this.next;
this.next = null;
return next;
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
};
return new Iterable<String>() {
@Override
public Iterator<String> iterator() {
return it;
}
};
}
/**
* Concatenate path elements.
*
* @param parentPath the parent path
* @param relativePaths the relative path elements to add
* @return the concatenated path
*/
@Nonnull
public static String concat(String parentPath, String... relativePaths) {
assert isValid(parentPath);
int parentLen = parentPath.length();
int size = relativePaths.length;
StringBuilder buff = new StringBuilder(parentLen + size * 5);
buff.append(parentPath);
boolean needSlash = parentLen > 0 && !denotesRootPath(parentPath);
for (String s : relativePaths) {
assert isValid(s);
if (isAbsolutePath(s)) {
throw new IllegalArgumentException("Cannot append absolute path " + s);
}
if (!s.isEmpty()) {
if (needSlash) {
buff.append('/');
}
buff.append(s);
needSlash = true;
}
}
return buff.toString();
}
/**
* Concatenate path elements.
*
* @param parentPath the parent path
* @param subPath the subPath path to add
* @return the concatenated path
*/
@Nonnull
public static String concat(String parentPath, String subPath) {
assert isValid(parentPath);
assert isValid(subPath);
// special cases
if (parentPath.isEmpty()) {
return subPath;
} else if (subPath.isEmpty()) {
return parentPath;
} else if (isAbsolutePath(subPath)) {
throw new IllegalArgumentException("Cannot append absolute path " + subPath);
}
StringBuilder buff = new StringBuilder(parentPath);
if (!denotesRootPath(parentPath)) {
buff.append('/');
}
buff.append(subPath);
return buff.toString();
}
/**
* Check if a path is a (direct or indirect) ancestor of another path.
*
* @param ancestor the ancestor path
* @param path the potential offspring path
* @return true if the path is an offspring of the ancestor
*/
public static boolean isAncestor(String ancestor, String path) {
assert isValid(ancestor);
assert isValid(path);
if (ancestor.isEmpty() || path.isEmpty()) {
return false;
}
if (denotesRoot(ancestor)) {
if (denotesRoot(path)) {
return false;
}
} else {
ancestor += "/";
}
return path.startsWith(ancestor);
}
/**
* Relativize a path wrt. a parent path such that
* {@code relativize(parentPath, concat(parentPath, path)) == paths}
* holds.
*
* @param parentPath parent pth
* @param path path to relativize
* @return relativized path
*/
@Nonnull
public static String relativize(String parentPath, String path) {
assert isValid(parentPath);
assert isValid(path);
if (parentPath.equals(path)) {
return "";
}
String prefix = denotesRootPath(parentPath)
? parentPath
: parentPath + '/';
if (path.startsWith(prefix)) {
return path.substring(prefix.length());
}
throw new IllegalArgumentException("Cannot relativize " + path + " wrt. " + parentPath);
}
/**
* Get the index of the next slash.
*
* @param path the path
* @param index the starting index
* @return the index of the next slash (possibly the starting index), or -1
* if not found
*/
public static int getNextSlash(String path, int index) {
assert isValid(path);
return path.indexOf('/', index);
}
/**
* Check if the path is valid, and throw an IllegalArgumentException if not.
* A valid path is absolute (starts with a '/') or relative (doesn't start
* with '/'), and contains none or more elements. A path may not end with
* '/', except for the root path. Elements itself must be at least one
* character long.
*
* @param path the path
*/
public static void validate(String path) {
if (path.isEmpty() || denotesRootPath(path)) {
return;
} else if (path.charAt(path.length() - 1) == '/') {
throw new IllegalArgumentException("Path may not end with '/': " + path);
}
char last = 0;
for (int index = 0, len = path.length(); index < len; index++) {
char c = path.charAt(index);
if (c == '/') {
if (last == '/') {
throw new IllegalArgumentException("Path may not contains '//': " + path);
}
}
last = c;
}
}
/**
* Check if the path is valid. A valid path is absolute (starts with a '/')
* or relative (doesn't start with '/'), and contains none or more elements.
* A path may not end with '/', except for the root path. Elements itself must
* be at least one character long.
*
* @param path the path
* @return {@code true} iff the path is valid.
*/
public static boolean isValid(String path) {
if (path.isEmpty() || denotesRootPath(path)) {
return true;
} else if (path.charAt(path.length() - 1) == '/') {
return false;
}
char last = 0;
for (int index = 0, len = path.length(); index < len; index++) {
char c = path.charAt(index);
if (c == '/') {
if (last == '/') {
return false;
}
}
last = c;
}
return true;
}
}
|
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.contextualsearch;
import android.os.Handler;
import org.chromium.base.VisibleForTesting;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.compositor.bottombar.OverlayPanel;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchBlacklist.BlacklistReason;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.content.browser.ContentViewCore;
import org.chromium.content_public.browser.GestureStateListener;
import org.chromium.ui.touch_selection.SelectionEventType;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Controls selection gesture interaction for Contextual Search.
*/
public class ContextualSearchSelectionController {
/**
* The type of selection made by the user.
*/
public enum SelectionType {
UNDETERMINED,
TAP,
LONG_PRESS
}
// The number of milliseconds to wait for a selection change after a tap before considering
// the tap invalid. This can't be too small or the subsequent taps may not have established
// a new selection in time. This is because selectWordAroundCaret doesn't always select.
// TODO(donnd): Fix in Blink, crbug.com/435778.
private static final int INVALID_IF_NO_SELECTION_CHANGE_AFTER_TAP_MS = 50;
// The default navigation-detection-delay in milliseconds.
private static final int TAP_NAVIGATION_DETECTION_DELAY = 16;
private static final String CONTAINS_WORD_PATTERN = "(\\w|\\p{L}|\\p{N})+";
// A URL is:
// 0-1: schema://
// 1+: any word char, _ or -
// 1+: . followed by 1+ of any word char, _ or -
// 0-1: 0+ of any word char or .,@?^=%&:/~#- followed by any word char or @?^-%&/~+#-
// TODO(twellington): expand accepted schemas? Require a schema?
private static final Pattern URL_PATTERN = Pattern.compile("((http|https|file)://)?"
+ "([\\w_-]+(?:(?:\\.[\\w_-]+)+))([\\w.,@?^=%&:/~+#-]*[\\w@?^=%&/~+#-])?");
// Max selection length must be limited or the entire request URL can go past the 2K limit.
private static final int MAX_SELECTION_LENGTH = 100;
private static final int MILLISECONDS_TO_NANOSECONDS = 1000000;
private final ChromeActivity mActivity;
private final ContextualSearchSelectionHandler mHandler;
private final Runnable mHandleInvalidTapRunnable;
private final Handler mRunnableHandler;
private final float mPxToDp;
private final Pattern mContainsWordPattern;
private String mSelectedText;
private SelectionType mSelectionType;
private boolean mWasTapGestureDetected;
// Reflects whether the last tap was valid and whether we still have a tap-based selection.
private boolean mWasLastTapValid;
private boolean mIsWaitingForInvalidTapDetection;
private boolean mIsSelectionEstablished;
private boolean mShouldHandleSelectionModification;
private boolean mDidExpandSelection;
private float mX;
private float mY;
// The time of the most last scroll activity, or 0 if none.
private long mLastScrollTimeNs;
// Tracks whether a Context Menu has just been shown and the UX has been dismissed.
// The selection may be unreliable until the next reset. See crbug.com/628436.
private boolean mIsContextMenuShown;
private class ContextualSearchGestureStateListener extends GestureStateListener {
@Override
public void onScrollStarted(int scrollOffsetY, int scrollExtentY) {
mHandler.handleScroll();
}
@Override
public void onScrollEnded(int scrollOffsetY, int scrollExtentY) {
mLastScrollTimeNs = System.nanoTime();
}
@Override
public void onScrollUpdateGestureConsumed() {
// The onScrollEnded notification is unreliable, so mark time during scroll updates too.
// See crbug.com/600863.
mLastScrollTimeNs = System.nanoTime();
}
// TODO(donnd): Remove this once we get notification of the selection changing
// after a tap-select gets a subsequent tap nearby. Currently there's no
// notification in this case.
// See crbug.com/444114.
@Override
public void onSingleTap(boolean consumed, int x, int y) {
// We may be notified that a tap has happened even when the system consumed the event.
// This is being used to support tapping on an existing selection to show the selection
// handles. We should process this tap unless we have already shown the selection
// handles (have a long-press selection) and the tap was consumed.
if (!(consumed && mSelectionType == SelectionType.LONG_PRESS)) {
scheduleInvalidTapNotification();
}
}
}
/**
* Constructs a new Selection controller for the given activity. Callbacks will be issued
* through the given selection handler.
* @param activity The {@link ChromeActivity} to control.
* @param handler The handler for callbacks.
*/
public ContextualSearchSelectionController(ChromeActivity activity,
ContextualSearchSelectionHandler handler) {
mActivity = activity;
mHandler = handler;
mPxToDp = 1.f / mActivity.getResources().getDisplayMetrics().density;
mRunnableHandler = new Handler();
mHandleInvalidTapRunnable = new Runnable() {
@Override
public void run() {
onInvalidTapDetectionTimeout();
}
};
mContainsWordPattern = Pattern.compile(CONTAINS_WORD_PATTERN);
}
/**
* Notifies that the base page has started loading a page.
*/
void onBasePageLoadStarted() {
resetAllStates();
}
/**
* Notifies that a Context Menu has been shown.
* Future controller events may be unreliable until the next reset.
*/
void onContextMenuShown() {
// Hide the UX.
mHandler.handleSelectionDismissal();
mIsContextMenuShown = true;
}
/**
* Notifies that the Contextual Search has ended.
* @param reason The reason for ending the Contextual Search.
*/
void onSearchEnded(OverlayPanel.StateChangeReason reason) {
// If the user explicitly closes the panel after establishing a selection with long press,
// it should not reappear until a new selection is made. This prevents the panel from
// reappearing when a long press selection is modified after the user has taken action to
// get rid of the panel. See crbug.com/489461.
if (shouldPreventHandlingCurrentSelectionModification(reason)) {
preventHandlingCurrentSelectionModification();
}
// Long press selections should remain visible after ending a Contextual Search.
if (mSelectionType == SelectionType.TAP) {
clearSelection();
}
}
/**
* Returns a new {@code GestureStateListener} that will listen for events in the Base Page.
* This listener will handle all Contextual Search-related interactions that go through the
* listener.
*/
public ContextualSearchGestureStateListener getGestureStateListener() {
return new ContextualSearchGestureStateListener();
}
/**
* @return the {@link ChromeActivity}.
*/
ChromeActivity getActivity() {
// TODO(donnd): don't expose the activity.
return mActivity;
}
/**
* @return the type of the selection.
*/
SelectionType getSelectionType() {
return mSelectionType;
}
/**
* @return the selected text.
*/
String getSelectedText() {
return mSelectedText;
}
/**
* @return Whether the last Tap was valid (and the tap-selection still in place).
*/
boolean getWasLastTapValid() {
return mWasLastTapValid;
}
/**
* @return The last X coordinate;
*/
float getLastX() {
return mX;
}
/**
* @return The last Y coordinate;
*/
float getLastY() {
return mY;
}
/**
* @return The Pixel to Device independent Pixel ratio.
*/
float getPxToDp() {
return mPxToDp;
}
/**
* @return The time of the most recent scroll, or 0 if none.
*/
long getLastScrollTime() {
return mLastScrollTimeNs;
}
/**
* Clears the selection.
*/
void clearSelection() {
ContentViewCore baseContentView = getBaseContentView();
if (baseContentView != null) {
baseContentView.clearSelection();
}
resetAllStates();
}
/**
* Handles a change in the current Selection.
* @param selection The selection portion of the context.
*/
void handleSelectionChanged(String selection) {
if (mDidExpandSelection) {
mSelectedText = selection;
mDidExpandSelection = false;
return;
}
if (selection == null || selection.isEmpty()) {
scheduleInvalidTapNotification();
// When the user taps on the page it will place the caret in that position, which
// will trigger a onSelectionChanged event with an empty string.
if (mSelectionType == SelectionType.TAP) {
// Since we mostly ignore a selection that's empty, we only need to partially reset.
resetSelectionStates();
return;
}
}
if (!selection.isEmpty()) {
unscheduleInvalidTapNotification();
}
mSelectedText = selection;
if (mWasTapGestureDetected) {
mSelectionType = SelectionType.TAP;
handleSelection(selection, mSelectionType);
mWasTapGestureDetected = false;
} else {
boolean isValidSelection = validateSelectionSuppression(selection);
mHandler.handleSelectionModification(selection, isValidSelection, mX, mY);
}
}
/**
* Handles a notification that a selection event took place.
* @param eventType The type of event that took place.
* @param posXPix The x coordinate of the selection start handle.
* @param posYPix The y coordinate of the selection start handle.
*/
void handleSelectionEvent(int eventType, float posXPix, float posYPix) {
boolean shouldHandleSelection = false;
switch (eventType) {
case SelectionEventType.SELECTION_HANDLES_SHOWN:
if (!mIsContextMenuShown) {
mWasTapGestureDetected = false;
mSelectionType = SelectionType.LONG_PRESS;
shouldHandleSelection = true;
// Since we're showing pins, we don't care if the previous tap was invalid
// anymore.
unscheduleInvalidTapNotification();
}
break;
case SelectionEventType.SELECTION_HANDLES_CLEARED:
mHandler.handleSelectionDismissal();
resetAllStates();
break;
case SelectionEventType.SELECTION_HANDLE_DRAG_STOPPED:
shouldHandleSelection = mShouldHandleSelectionModification;
break;
case SelectionEventType.SELECTION_ESTABLISHED:
mIsSelectionEstablished = true;
break;
case SelectionEventType.SELECTION_DISSOLVED:
mIsSelectionEstablished = false;
break;
default:
}
if (shouldHandleSelection) {
ContentViewCore baseContentView = getBaseContentView();
if (baseContentView != null) {
String selection = baseContentView.getSelectedText();
if (selection != null) {
mX = posXPix;
mY = posYPix;
mSelectedText = selection;
handleSelection(selection, SelectionType.LONG_PRESS);
}
}
}
}
/**
* Re-enables selection modification handling and invokes
* ContextualSearchSelectionHandler.handleSelection().
* @param selection The text that was selected.
* @param type The type of selection made by the user.
*/
private void handleSelection(String selection, SelectionType type) {
mShouldHandleSelectionModification = true;
boolean isValidSelection = validateSelectionSuppression(selection);
mHandler.handleSelection(selection, isValidSelection, type, mX, mY);
}
/**
* Resets all internal state of this class, including the tap state.
*/
private void resetAllStates() {
resetSelectionStates();
mWasLastTapValid = false;
mLastScrollTimeNs = 0;
mIsContextMenuShown = false;
}
/**
* Resets all of the internal state of this class that handles the selection.
*/
private void resetSelectionStates() {
mSelectionType = SelectionType.UNDETERMINED;
mSelectedText = null;
mWasTapGestureDetected = false;
}
/**
* Handles an unhandled tap gesture.
*/
void handleShowUnhandledTapUIIfNeeded(int x, int y) {
mWasTapGestureDetected = false;
// TODO(donnd): shouldn't we check == TAP here instead of LONG_PRESS?
// TODO(donnd): refactor to avoid needing a new handler API method as suggested by Pedro.
if (mSelectionType != SelectionType.LONG_PRESS) {
mWasTapGestureDetected = true;
TapSuppressionHeuristics tapHeuristics = new TapSuppressionHeuristics(this, x, y);
// TODO(donnd): Move to be called when the panel closes to work with states that change.
tapHeuristics.logConditionState();
// Tell the manager what it needs in order to log metrics on whether the tap would have
// been suppressed if each of the heuristics were satisfied.
mHandler.handleMetricsForWouldSuppressTap(tapHeuristics);
mX = x;
mY = y;
if (tapHeuristics.shouldSuppressTap()) {
mWasLastTapValid = false;
mHandler.handleSuppressedTap();
} else {
mWasLastTapValid = true;
// TODO(donnd): Find a better way to determine that a navigation will be triggered
// by the tap, or merge with other time-consuming actions like gathering surrounding
// text or detecting page mutations.
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
mHandler.handleValidTap();
}
}, TAP_NAVIGATION_DETECTION_DELAY);
}
} else {
mWasLastTapValid = false;
mHandler.handleInvalidTap();
}
}
/**
* @return The Base Page's {@link ContentViewCore}, or {@code null} if there is no current tab.
*/
ContentViewCore getBaseContentView() {
Tab currentTab = mActivity.getActivityTab();
return currentTab != null ? currentTab.getContentViewCore() : null;
}
/**
* Expands the current selection by the specified amounts.
* @param selectionStartAdjust The start offset adjustment of the selection to use to highlight
* the search term.
* @param selectionEndAdjust The end offset adjustment of the selection to use to highlight
* the search term.
*/
void adjustSelection(int selectionStartAdjust, int selectionEndAdjust) {
// TODO(donnd): add code to verify that the selection is still valid before changing it.
// crbug.com/508354
if (selectionStartAdjust == 0 && selectionEndAdjust == 0) return;
ContentViewCore basePageContentView = getBaseContentView();
if (basePageContentView != null && basePageContentView.getWebContents() != null) {
mDidExpandSelection = true;
basePageContentView.getWebContents().adjustSelectionByCharacterOffset(
selectionStartAdjust, selectionEndAdjust);
}
}
// ============================================================================================
// Invalid Tap Notification
// ============================================================================================
/**
* Schedules a notification to check if the tap was invalid.
* When we call selectWordAroundCaret it selects nothing in cases where the tap was invalid.
* We have no way to know other than scheduling a notification to check later.
* This allows us to hide the bar when there's no selection.
*/
private void scheduleInvalidTapNotification() {
// TODO(donnd): Fix selectWordAroundCaret to we can tell if it selects, instead
// of using a timer here! See crbug.com/435778.
mRunnableHandler.postDelayed(mHandleInvalidTapRunnable,
INVALID_IF_NO_SELECTION_CHANGE_AFTER_TAP_MS);
}
/**
* Un-schedules all pending notifications to check if a tap was invalid.
*/
private void unscheduleInvalidTapNotification() {
mRunnableHandler.removeCallbacks(mHandleInvalidTapRunnable);
mIsWaitingForInvalidTapDetection = true;
}
/**
* Notify's the system that tap gesture has been completed.
*/
private void onInvalidTapDetectionTimeout() {
mHandler.handleInvalidTap();
mIsWaitingForInvalidTapDetection = false;
}
// ============================================================================================
// Selection Modification
// ============================================================================================
/**
* This method checks whether the selection modification should be handled. This method
* is needed to allow modifying selections that are occluded by the Panel.
* See crbug.com/489461.
*
* @param reason The reason the panel is closing.
* @return Whether the selection modification should be handled.
*/
private boolean shouldPreventHandlingCurrentSelectionModification(
OverlayPanel.StateChangeReason reason) {
return getSelectionType() == SelectionType.LONG_PRESS
&& (reason == OverlayPanel.StateChangeReason.BACK_PRESS
|| reason == OverlayPanel.StateChangeReason.BASE_PAGE_SCROLL
|| reason == OverlayPanel.StateChangeReason.SWIPE
|| reason == OverlayPanel.StateChangeReason.FLING
|| reason == OverlayPanel.StateChangeReason.CLOSE_BUTTON);
}
/**
* Temporarily prevents the controller from handling selection modification events on the
* current selection. Handling will be re-enabled when a new selection is made through either a
* tap or long press.
*/
private void preventHandlingCurrentSelectionModification() {
mShouldHandleSelectionModification = false;
}
// ============================================================================================
// Misc.
// ============================================================================================
/**
* @return whether a tap gesture has been detected, for testing.
*/
@VisibleForTesting
boolean wasAnyTapGestureDetected() {
return mIsWaitingForInvalidTapDetection;
}
/**
* @return whether the selection has been established, for testing.
*/
@VisibleForTesting
boolean isSelectionEstablished() {
return mIsSelectionEstablished;
}
/**
* Evaluates whether the given selection is valid and notifies the handler about potential
* selection suppression.
* TODO(pedrosimonetti): substitute this once the system supports suppressing selections.
* @param selection The given selection.
* @return Whether the selection is valid.
*/
private boolean validateSelectionSuppression(String selection) {
boolean isValid = isValidSelection(selection);
if (mSelectionType == SelectionType.TAP) {
BlacklistReason reason =
ContextualSearchBlacklist.findReasonToSuppressSelection(selection);
mHandler.handleSelectionSuppression(reason);
// Only really suppress if enabled by field trial. Currently we can't prevent a
// selection from being issued, so we end up clearing the selection immediately
// afterwards, which does not look great.
// TODO(pedrosimonetti): actually suppress selection once the system supports it.
if (ContextualSearchFieldTrial.isBlacklistEnabled() && reason != BlacklistReason.NONE) {
isValid = false;
}
}
return isValid;
}
/** Determines if the given selection is valid or not.
* @param selection The selection portion of the context.
* @return whether the given selection is considered a valid target for a search.
*/
private boolean isValidSelection(String selection) {
return isValidSelection(selection, getBaseContentView());
}
@VisibleForTesting
boolean isValidSelection(String selection, ContentViewCore baseContentView) {
if (selection.length() > MAX_SELECTION_LENGTH) {
return false;
}
if (!doesContainAWord(selection)) {
return false;
}
if (baseContentView != null && baseContentView.isFocusedNodeEditable()) {
return false;
}
return true;
}
/**
* Determines if the given selection contains a word or not.
* @param selection The the selection to check for a word.
* @return Whether the selection contains a word anywhere within it or not.
*/
@VisibleForTesting
public boolean doesContainAWord(String selection) {
return mContainsWordPattern.matcher(selection).find();
}
/**
* @param selectionContext The String including the surrounding text and the selection.
* @param startOffset The offset to the start of the selection (inclusive).
* @param endOffset The offset to the end of the selection (non-inclusive).
* @return Whether the selection is part of URL. A valid URL is:
* 0-1: schema://
* 1+: any word char, _ or -
* 1+: . followed by 1+ of any word char, _ or -
* 0-1: 0+ of any word char or .,@?^=%&:/~#- followed by any word char or @?^-%&/~+#-
*/
public static boolean isSelectionPartOfUrl(String selectionContext, int startOffset,
int endOffset) {
Matcher matcher = URL_PATTERN.matcher(selectionContext);
// Starts are inclusive and ends are non-inclusive for both GSAContext & matcher.
while (matcher.find()) {
if (startOffset >= matcher.start() && endOffset <= matcher.end()) {
return true;
}
}
return false;
}
}
|
|
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.storagegateway.model;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.storagegateway.AWSStorageGateway#activateGateway(ActivateGatewayRequest) ActivateGateway operation}.
* <p>
* This operation activates the gateway you previously deployed on your VMware host. For more information, see <a
* href="http://docs.amazonwebservices.com/storagegateway/latest/userguide/DownloadAndDeploy.html"> Downloading and Deploying AWS Storage Gateway VM </a>
* . In the activation process you specify information such as the region you want to use for storing snapshots, the time zone for scheduled snapshots
* and the gateway schedule window, an activation key, and a name for your gateway. The activation process also associates your gateway with your account
* (see UpdateGatewayInformation).
* </p>
* <p>
* <b>NOTE:</b>You must power on the gateway VM before you can activate your gateway.
* </p>
*
* @see com.amazonaws.services.storagegateway.AWSStorageGateway#activateGateway(ActivateGatewayRequest)
*/
public class ActivateGatewayRequest extends AmazonWebServiceRequest {
/**
* Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 50<br/>
*/
private String activationKey;
/**
* A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 255<br/>
* <b>Pattern: </b>^[ -\.0-\[\]-~]*[!-\.0-\[\]-~][ -\.0-\[\]-~]*$<br/>
*/
private String gatewayName;
/**
* One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>GMT-12:00, GMT-11:00, GMT-10:00, GMT-9:00, GMT-8:00, GMT-7:00, GMT-6:00, GMT-5:00, GMT-4:00, GMT-3:30, GMT-3:00, GMT-2:00, GMT-1:00, GMT, GMT+1:00, GMT+2:00, GMT+3:00, GMT+3:30, GMT+4:00, GMT+4:30, GMT+5:00, GMT+5:30, GMT+5:45, GMT+6:00, GMT+7:00, GMT+8:00, GMT+9:00, GMT+9:30, GMT+10:00, GMT+11:00, GMT+12:00
*/
private String gatewayTimezone;
/**
* One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 25<br/>
*/
private String gatewayRegion;
/**
* One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>STORED, CACHED
*/
private String gatewayType;
/**
* Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 50<br/>
*
* @return Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
*/
public String getActivationKey() {
return activationKey;
}
/**
* Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 50<br/>
*
* @param activationKey Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
*/
public void setActivationKey(String activationKey) {
this.activationKey = activationKey;
}
/**
* Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 50<br/>
*
* @param activationKey Your gateway activation key. You can obtain the activation key by
* sending an HTTP GET request with redirects enabled to the gateway IP
* address (port 80). The redirect URL returned in the response provides
* you the activation key for your gateway in the query string parameter
* <code>activationKey</code>. It may also include other
* activation-related parameters, however, these are merely defaults --
* the arguments you pass to the <code>ActivateGateway</code> API call
* determine the actual configuration of your gateway.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ActivateGatewayRequest withActivationKey(String activationKey) {
this.activationKey = activationKey;
return this;
}
/**
* A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 255<br/>
* <b>Pattern: </b>^[ -\.0-\[\]-~]*[!-\.0-\[\]-~][ -\.0-\[\]-~]*$<br/>
*
* @return A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
*/
public String getGatewayName() {
return gatewayName;
}
/**
* A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 255<br/>
* <b>Pattern: </b>^[ -\.0-\[\]-~]*[!-\.0-\[\]-~][ -\.0-\[\]-~]*$<br/>
*
* @param gatewayName A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
*/
public void setGatewayName(String gatewayName) {
this.gatewayName = gatewayName;
}
/**
* A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 255<br/>
* <b>Pattern: </b>^[ -\.0-\[\]-~]*[!-\.0-\[\]-~][ -\.0-\[\]-~]*$<br/>
*
* @param gatewayName A unique identifier for your gateway. This name becomes part of the
* gateway Amazon Resources Name (ARN) which is what you use as an input
* to other operations.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ActivateGatewayRequest withGatewayName(String gatewayName) {
this.gatewayName = gatewayName;
return this;
}
/**
* One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>GMT-12:00, GMT-11:00, GMT-10:00, GMT-9:00, GMT-8:00, GMT-7:00, GMT-6:00, GMT-5:00, GMT-4:00, GMT-3:30, GMT-3:00, GMT-2:00, GMT-1:00, GMT, GMT+1:00, GMT+2:00, GMT+3:00, GMT+3:30, GMT+4:00, GMT+4:30, GMT+5:00, GMT+5:30, GMT+5:45, GMT+6:00, GMT+7:00, GMT+8:00, GMT+9:00, GMT+9:30, GMT+10:00, GMT+11:00, GMT+12:00
*
* @return One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
*
* @see GatewayTimezone
*/
public String getGatewayTimezone() {
return gatewayTimezone;
}
/**
* One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>GMT-12:00, GMT-11:00, GMT-10:00, GMT-9:00, GMT-8:00, GMT-7:00, GMT-6:00, GMT-5:00, GMT-4:00, GMT-3:30, GMT-3:00, GMT-2:00, GMT-1:00, GMT, GMT+1:00, GMT+2:00, GMT+3:00, GMT+3:30, GMT+4:00, GMT+4:30, GMT+5:00, GMT+5:30, GMT+5:45, GMT+6:00, GMT+7:00, GMT+8:00, GMT+9:00, GMT+9:30, GMT+10:00, GMT+11:00, GMT+12:00
*
* @param gatewayTimezone One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
*
* @see GatewayTimezone
*/
public void setGatewayTimezone(String gatewayTimezone) {
this.gatewayTimezone = gatewayTimezone;
}
/**
* One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>GMT-12:00, GMT-11:00, GMT-10:00, GMT-9:00, GMT-8:00, GMT-7:00, GMT-6:00, GMT-5:00, GMT-4:00, GMT-3:30, GMT-3:00, GMT-2:00, GMT-1:00, GMT, GMT+1:00, GMT+2:00, GMT+3:00, GMT+3:30, GMT+4:00, GMT+4:30, GMT+5:00, GMT+5:30, GMT+5:45, GMT+6:00, GMT+7:00, GMT+8:00, GMT+9:00, GMT+9:30, GMT+10:00, GMT+11:00, GMT+12:00
*
* @param gatewayTimezone One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see GatewayTimezone
*/
public ActivateGatewayRequest withGatewayTimezone(String gatewayTimezone) {
this.gatewayTimezone = gatewayTimezone;
return this;
}
/**
* One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>GMT-12:00, GMT-11:00, GMT-10:00, GMT-9:00, GMT-8:00, GMT-7:00, GMT-6:00, GMT-5:00, GMT-4:00, GMT-3:30, GMT-3:00, GMT-2:00, GMT-1:00, GMT, GMT+1:00, GMT+2:00, GMT+3:00, GMT+3:30, GMT+4:00, GMT+4:30, GMT+5:00, GMT+5:30, GMT+5:45, GMT+6:00, GMT+7:00, GMT+8:00, GMT+9:00, GMT+9:30, GMT+10:00, GMT+11:00, GMT+12:00
*
* @param gatewayTimezone One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
*
* @see GatewayTimezone
*/
public void setGatewayTimezone(GatewayTimezone gatewayTimezone) {
this.gatewayTimezone = gatewayTimezone.toString();
}
/**
* One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>GMT-12:00, GMT-11:00, GMT-10:00, GMT-9:00, GMT-8:00, GMT-7:00, GMT-6:00, GMT-5:00, GMT-4:00, GMT-3:30, GMT-3:00, GMT-2:00, GMT-1:00, GMT, GMT+1:00, GMT+2:00, GMT+3:00, GMT+3:30, GMT+4:00, GMT+4:30, GMT+5:00, GMT+5:30, GMT+5:45, GMT+6:00, GMT+7:00, GMT+8:00, GMT+9:00, GMT+9:30, GMT+10:00, GMT+11:00, GMT+12:00
*
* @param gatewayTimezone One of the values that indicates the time zone you want to set for the
* gateway. The time zone is used, for example, for scheduling snapshots
* and your gateway's maintenance schedule.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see GatewayTimezone
*/
public ActivateGatewayRequest withGatewayTimezone(GatewayTimezone gatewayTimezone) {
this.gatewayTimezone = gatewayTimezone.toString();
return this;
}
/**
* One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 25<br/>
*
* @return One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
*/
public String getGatewayRegion() {
return gatewayRegion;
}
/**
* One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 25<br/>
*
* @param gatewayRegion One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
*/
public void setGatewayRegion(String gatewayRegion) {
this.gatewayRegion = gatewayRegion;
}
/**
* One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 25<br/>
*
* @param gatewayRegion One of the values that indicates the region where you want to store
* the snapshot backups. The gateway region specified must be the same
* region as the region in your <code>Host</code> header in the request.
* For more information about available regions and endpoints for AWS
* Storage Gateway, see <a
* .amazonwebservices.com/general/latest/gr/rande.html#sg_region">Regions
* and Endpoints</a> in the <b>Amazon Web Services Glossary</b>.
* <p><i>Valid Values</i>: "us-east-1", "us-west-1", "us-west-2",
* "eu-west-1", "ap-northeast-1", "ap-southest-1", "sa-east-1"
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ActivateGatewayRequest withGatewayRegion(String gatewayRegion) {
this.gatewayRegion = gatewayRegion;
return this;
}
/**
* One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>STORED, CACHED
*
* @return One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
*
* @see GatewayType
*/
public String getGatewayType() {
return gatewayType;
}
/**
* One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>STORED, CACHED
*
* @param gatewayType One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
*
* @see GatewayType
*/
public void setGatewayType(String gatewayType) {
this.gatewayType = gatewayType;
}
/**
* One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>STORED, CACHED
*
* @param gatewayType One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see GatewayType
*/
public ActivateGatewayRequest withGatewayType(String gatewayType) {
this.gatewayType = gatewayType;
return this;
}
/**
* One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>STORED, CACHED
*
* @param gatewayType One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
*
* @see GatewayType
*/
public void setGatewayType(GatewayType gatewayType) {
this.gatewayType = gatewayType.toString();
}
/**
* One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>STORED, CACHED
*
* @param gatewayType One of the values that defines the type of gateway to activate. The
* type specified is critical to all later functions of the gateway and
* cannot be changed after activation. The default value is
* <code>STORED</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see GatewayType
*/
public ActivateGatewayRequest withGatewayType(GatewayType gatewayType) {
this.gatewayType = gatewayType.toString();
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (activationKey != null) sb.append("ActivationKey: " + activationKey + ", ");
if (gatewayName != null) sb.append("GatewayName: " + gatewayName + ", ");
if (gatewayTimezone != null) sb.append("GatewayTimezone: " + gatewayTimezone + ", ");
if (gatewayRegion != null) sb.append("GatewayRegion: " + gatewayRegion + ", ");
if (gatewayType != null) sb.append("GatewayType: " + gatewayType + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getActivationKey() == null) ? 0 : getActivationKey().hashCode());
hashCode = prime * hashCode + ((getGatewayName() == null) ? 0 : getGatewayName().hashCode());
hashCode = prime * hashCode + ((getGatewayTimezone() == null) ? 0 : getGatewayTimezone().hashCode());
hashCode = prime * hashCode + ((getGatewayRegion() == null) ? 0 : getGatewayRegion().hashCode());
hashCode = prime * hashCode + ((getGatewayType() == null) ? 0 : getGatewayType().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof ActivateGatewayRequest == false) return false;
ActivateGatewayRequest other = (ActivateGatewayRequest)obj;
if (other.getActivationKey() == null ^ this.getActivationKey() == null) return false;
if (other.getActivationKey() != null && other.getActivationKey().equals(this.getActivationKey()) == false) return false;
if (other.getGatewayName() == null ^ this.getGatewayName() == null) return false;
if (other.getGatewayName() != null && other.getGatewayName().equals(this.getGatewayName()) == false) return false;
if (other.getGatewayTimezone() == null ^ this.getGatewayTimezone() == null) return false;
if (other.getGatewayTimezone() != null && other.getGatewayTimezone().equals(this.getGatewayTimezone()) == false) return false;
if (other.getGatewayRegion() == null ^ this.getGatewayRegion() == null) return false;
if (other.getGatewayRegion() != null && other.getGatewayRegion().equals(this.getGatewayRegion()) == false) return false;
if (other.getGatewayType() == null ^ this.getGatewayType() == null) return false;
if (other.getGatewayType() != null && other.getGatewayType().equals(this.getGatewayType()) == false) return false;
return true;
}
}
|
|
package min3d.sampleProject1;
import min3d.core.Object3dContainer;
import min3d.core.RendererActivity;
import min3d.objectPrimitives.Box;
import min3d.objectPrimitives.Sphere;
import min3d.vos.Light;
import min3d.vos.LightType;
import min3d.vos.Number3d;
import android.util.Log;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.LinearLayout;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
/**
* Light properties explorer.
* Position, direction, spotCutoff, spotExpo, type, visibility...
*
* Can also be used as a testbed for other parameter-related testing.
*
* @author Lee
*/
public class ExampleLightProperties extends RendererActivity
{
Object3dContainer _rect;
Object3dContainer _obj;
Light _light;
@Override
protected void onCreateSetContentView()
{
setContentView(R.layout.lightproperties_layout);
// Add OpenGL surface
LinearLayout ll = (LinearLayout) this.findViewById(R.id.scene1Holder);
ll.addView(_glSurfaceView);
}
@Override
public void onInitScene()
{
// Add seekbar listeners
int[] ids = { R.id.seekbarA1, R.id.seekbarA2, R.id.seekbarA3, R.id.seekbarB1, R.id.seekbarB2, R.id.seekbarB3, R.id.seekbarC1, R.id.seekbarC2 };
for (int id : ids)
{
SeekBar b;
b = (SeekBar) this.findViewById(id);
b.setProgress(50);
b.setOnSeekBarChangeListener(seekChange);
}
// Add checkbox listeners
int[] ids2 = { R.id.cbx1, R.id.cbx2 };
for (int id : ids2)
{
CheckBox ck = (CheckBox) this.findViewById(id);
ck.setOnCheckedChangeListener(checkChange);
}
// Set some defaults
CheckBox c1 = (CheckBox) this.findViewById(R.id.cbx1);
c1.setChecked(true);
CheckBox c2 = (CheckBox) this.findViewById(R.id.cbx2);
c2.setChecked(true);
SeekBar sB3 = (SeekBar) this.findViewById(R.id.seekbarB3);
sB3.setProgress(100); // pointing away from camera
SeekBar sAngle = (SeekBar) this.findViewById(R.id.seekbarC1);
sAngle.setProgress(33); // some angle
SeekBar sExp = (SeekBar) this.findViewById(R.id.seekbarC2);
sExp.setProgress(45); // cutoff
}
OnSeekBarChangeListener seekChange = new OnSeekBarChangeListener()
{
public void onProgressChanged(SeekBar $s, int $progress, boolean $touch)
{
float n = (float)$progress/100f;
n = (n - 0.5f) * 2f;
switch($s.getId())
{
case R.id.seekbarA1:
_light.position.setX(n*3);
break;
case R.id.seekbarA2:
_light.position.setY(n*3);
break;
case R.id.seekbarA3:
_light.position.setZ(n*3); // +3 + n*6);
break;
case R.id.seekbarB1:
//_light.spotDirection.setX(n);
scene.camera().position.x = n*10;
break;
case R.id.seekbarB2:
//_light.spotDirection.setY(n);
scene.camera().position.y = n*10;
break;
case R.id.seekbarB3:
//_light.spotDirection.setZ(n);
scene.camera().position.z = n*10;
break;
case R.id.seekbarC1:
n += 1; //[0,2]
n *= 45+4;
if (n > 90) n = 180;
_light.spotCutoffAngle(n);
Log.v("x", "cutoff " + n);
break;
case R.id.seekbarC2:
n += 1; //[0,2]
n *= 22; // [0,44]
n -= 4; // [-4,40]
if (n < 0) n = 0;
_light.spotExponent(n);
Log.v("x", "exp " + n);
break;
}
Log.v("x", "campos" + scene.camera().position.toString());
if (_light.position.isDirty())
{
Log.v("x", "pos " + _light.position.toString());
}
if (_light.direction.isDirty())
{
Number3d n3 = _light.direction.toNumber3d();
n3.normalize();
_light.direction.setAllFrom(n3);
Log.v("x", "dir " + _light.direction.toString());
}
}
public void onStartTrackingTouch(SeekBar $s)
{
}
public void onStopTrackingTouch(SeekBar $s)
{
}
};
OnCheckedChangeListener checkChange = new OnCheckedChangeListener()
{
public void onCheckedChanged(CompoundButton $ck, boolean isChecked)
{
switch($ck.getId())
{
case R.id.cbx1:
if (isChecked)
_light.type(LightType.DIRECTIONAL);
else
_light.type(LightType.POSITIONAL);
Log.v("x", "light is now " + _light.type() + _light.type().glValue());
break;
case R.id.cbx2:
_light.isVisible(isChecked);
Log.v("x", "light visible: " + _light.isVisible());
break;
}
}
};
//
public void initScene()
{
scene.backgroundColor().setAll(0xff222222);
_light = new Light();
_light.position.setAll(0, 0, +3);
_light.diffuse.setAll(255, 255, 255, 255);
_light.ambient.setAll(0, 0, 0, 0);
_light.specular.setAll(0, 0, 0, 0);
_light.emissive.setAll(0, 0, 0, 0);
scene.lights().add(_light);
_rect = new Box(6, 6, .1f);
_rect.position().z = -3;
scene.addChild(_rect);
_obj = new Sphere(1.5f, 20,20);
scene.addChild(_obj);
}
@Override
public void updateScene()
{
//_rect.rotation().y+=0.33;
}
@Override
public void onUpdateScene()
{
}
}
|
|
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.granite.identity.application.authenticator.oidc;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.LogFactory;
import org.apache.oltu.oauth2.client.OAuthClient;
import org.apache.oltu.oauth2.client.request.OAuthClientRequest;
import org.apache.oltu.oauth2.client.response.OAuthAuthzResponse;
import org.apache.oltu.oauth2.client.response.OAuthClientResponse;
import org.apache.oltu.oauth2.client.response.OAuthJSONAccessTokenResponse;
import org.apache.oltu.oauth2.common.exception.OAuthProblemException;
import org.apache.oltu.oauth2.common.exception.OAuthSystemException;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.testng.Assert;
import org.testng.IObjectFactory;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.DataProvider;
import org.testng.annotations.ObjectFactory;
import org.testng.annotations.Test;
import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.exception.AuthenticationFailedException;
import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils;
import com.granite.identity.application.authenticator.oidc.internal.OpenIDConnectAuthenticatorServiceComponent;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants;
import org.wso2.carbon.identity.core.util.IdentityCoreConstants;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.user.api.RealmConfiguration;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.UserRealm;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.tenant.TenantManager;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.powermock.api.mockito.PowerMockito.doReturn;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.powermock.api.mockito.PowerMockito.whenNew;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
/***
* Unit test class for OpenIDConnectAuthenticatorTest class.
*/
@PrepareForTest({LogFactory.class, OAuthClient.class, URL.class, FrameworkUtils.class, OpenIDConnectAuthenticatorServiceComponent.class,
OAuthAuthzResponse.class, OAuthClientRequest.class, OAuthClientResponse.class, IdentityUtil.class})
public class OpenIDConnectAuthenticatorTest {
@Mock
private HttpServletRequest mockServletRequest;
@Mock
private HttpServletResponse mockServletResponse;
@Mock
private OAuthClientResponse mockOAuthClientResponse;
@Mock
private OAuthClientRequest mockOAuthClientRequest;
@Mock
private OAuthJSONAccessTokenResponse mockOAuthJSONAccessTokenResponse;
@Mock
private AuthenticationContext mockAuthenticationContext;
@Mock
private HttpURLConnection mockConnection;
@Mock
private OAuthAuthzResponse mockOAuthzResponse;
@Mock
private RealmService mockRealmService;
@Mock
private UserRealm mockUserRealm;
@Mock
private UserStoreManager mockUserStoreManager;
@Mock
private TenantManager mockTenantManger;
@Mock
private RealmConfiguration mockRealmConfiguration;
@Mock
private OAuthClient mockOAuthClient;
@InjectMocks
@Spy
OpenIDConnectAuthenticator openIDConnectAuthenticator;
private static Map<String, String> authenticatorProperties;
private static String accessToken = "4952b467-86b2-31df-b63c-0bf25cec4f86s";
private static String idToken = "eyJ4NXQiOiJOVEF4Wm1NeE5ETXlaRGczTVRVMVpHTTBNekV6T0RKaFpXSTRORE5" +
"sWkRVMU9HRmtOakZpTVEiLCJraWQiOiJOVEF4Wm1NeE5ETXlaRGczTVRVMVpHTTBNekV6T0RKaFpXSTRORE5sWkRVMU9" +
"HRmtOakZpTVEiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImF1ZCI6WyJ1NUZJZkc1eHpMdkJHaWFtb0FZenpjc" +
"XBCcWdhIl0sImF6cCI6InU1RklmRzV4ekx2QkdpYW1vQVl6emNxcEJxZ2EiLCJhdXRoX3RpbWUiOjE1MDY1NzYwODAsImlzcyI6" +
"Imh0dHBzOlwvXC9sb2NhbGhvc3Q6OTQ0M1wvb2F1dGgyXC90b2tlbiIsImV4cCI6MTUwNjU3OTY4NCwibm9uY2UiOiI" +
"wZWQ4ZjFiMy1lODNmLTQ2YzAtOGQ1Mi1mMGQyZTc5MjVmOTgiLCJpYXQiOjE1MDY1NzYwODQsInNpZCI6Ijg3MDZmNWR" +
"hLTU0ZmMtNGZiMC1iNGUxLTY5MDZmYTRiMDRjMiJ9.HopPYFs4lInXvGztNEkJKh8Kdy52eCGbzYy6PiVuM_BlCcGff3SHO" +
"oZxDH7JbIkPpKBe0cnYQWBxfHuGTUWhvnu629ek6v2YLkaHlb_Lm04xLD9FNxuZUNQFw83pQtDVpoX5r1V-F0DdUc7gA1RKN3" +
"xMVYgRyfslRDveGYplxVVNQ1LU3lrZhgaTfcMEsC6rdbd1HjdzG71EPS4674HCSAUelOisNKGa2NgORpldDQsj376QD0G9Mhc8WtW" +
"oguftrCCGjBy1kKT4VqFLOqlA-8wUhOj_rZT9SUIBQRDPu0RZobvsskqYo40GEZrUoa";
private static OAuthClientResponse token;
private Map<String, String> paramValueMap;
private int TENANT_ID = 1234;
@BeforeTest
public void init() {
authenticatorProperties = new HashMap<>();
authenticatorProperties.put("callbackUrl", "http://localhost:8080/playground2/oauth2client");
authenticatorProperties.put("commonAuthQueryParams", "scope=openid&state=OIDC&loginType=basic");
authenticatorProperties.put("UserInfoUrl", "https://localhost:9443/oauth2/userinfo");
authenticatorProperties.put(OIDCAuthenticatorConstants.CLIENT_ID, "u5FIfG5xzLvBGiamoAYzzcqpBqga");
authenticatorProperties.put(OIDCAuthenticatorConstants.CLIENT_SECRET, "_kLtobqi08GytnypVW_Mmy1niAIa");
authenticatorProperties.put(OIDCAuthenticatorConstants.OAUTH2_TOKEN_URL, "https://localhost:9443/oauth2/token");
authenticatorProperties.put(OIDCAuthenticatorConstants.OAUTH2_AUTHZ_URL, "https://localhost:9443/oauth2/authorize");
authenticatorProperties.put(IdentityApplicationConstants.Authenticator.SAML2SSO.IS_USER_ID_IN_CLAIMS, "true");
token = null;
}
@DataProvider(name = "seperator")
public Object[][] getSeperator() {
return new String[][]{
{","},
{",,,"}
};
}
@DataProvider(name = "requestDataHandler")
public Object[][] getRequestStatus() {
return new String[][]{
// When all parameters not null.
{"openid","active,OIDC", "BASIC", "Error Login.", "true", "active", "Invalid can handle response for the request.", "Invalid context identifier."},
// When gran_type and login_type are null.
{null,"active,OIDC", null, "Error Login.", "true", "active", "Invalid can handle response for the request.", "Invalid context identifier."},
// When all parameters null.
{null, null, null, null, "false", null, "Invalid can handle response for the request.", "Invalid context identifier."}
};
}
@Test(dataProvider = "requestDataHandler")
public void testCanHandle(String grantType, String state, String loginType, String error, String expectedCanHandler, String expectedContext, String msgCanHandler, String msgContext) throws IOException {
when(mockServletRequest.getParameter(
OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE)).thenReturn(grantType);
when(mockServletRequest.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE)).thenReturn(state);
when(mockServletRequest.getParameter(OIDCAuthenticatorConstants.LOGIN_TYPE)).thenReturn(loginType);
when(mockServletRequest.getParameter(OIDCAuthenticatorConstants.OAUTH2_ERROR)).thenReturn(error);
assertEquals(openIDConnectAuthenticator.canHandle(mockServletRequest), Boolean.parseBoolean(expectedCanHandler), msgCanHandler);
assertEquals(openIDConnectAuthenticator.getContextIdentifier(mockServletRequest), expectedContext, msgContext);
}
@Test
public void testGetAuthorizationServerEndpoint() throws IOException {
assertNull(openIDConnectAuthenticator.getAuthorizationServerEndpoint(authenticatorProperties),
"Unable to get the authorization server endpoint.");
}
@Test
public void testGetCallbackUrl() throws IOException {
assertEquals(openIDConnectAuthenticator.getCallBackURL(authenticatorProperties),
"http://localhost:8080/playground2/oauth2client",
"Callback URL is not valid.");
}
@Test
public void testGetTokenEndpoint() throws IOException {
assertNull(openIDConnectAuthenticator.getTokenEndpoint(authenticatorProperties),
"Unable to get the token endpoint.");
}
@Test
public void testGetState() throws IOException {
assertEquals(openIDConnectAuthenticator.getState("OIDC", authenticatorProperties),
"OIDC", "Unable to get the scope.");
}
@Test
public void testGetScope() throws IOException {
assertEquals(openIDConnectAuthenticator.getScope("openid", authenticatorProperties),
"openid", "Unable to get the scope.");
}
@Test
public void testRequiredIDToken() throws IOException {
assertTrue(openIDConnectAuthenticator.requiredIDToken(authenticatorProperties),
"Does not require the ID token.");
}
@Test
public void testGetCallBackURL() throws IOException {
assertEquals(openIDConnectAuthenticator.getCallBackURL(authenticatorProperties),
"http://localhost:8080/playground2/oauth2client",
"Callback URL is not valid.");
}
@Test
public void testGetUserInfoEndpoint() throws IOException {
assertEquals(openIDConnectAuthenticator.getUserInfoEndpoint(token, authenticatorProperties),
"https://localhost:9443/oauth2/userinfo", "unable to get the user infor endpoint");
}
@Test
public void testGetSubjectAttributes() throws OAuthSystemException,
OAuthProblemException, AuthenticationFailedException, IOException {
Map<ClaimMapping, String> result;
// Test with no json response.
when(mockOAuthClientResponse.getParam(OIDCAuthenticatorConstants.ACCESS_TOKEN)).
thenReturn("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9");
result = openIDConnectAuthenticator.getSubjectAttributes(mockOAuthClientResponse, authenticatorProperties);
assertTrue(result.isEmpty(), "result is not Empty.");
// Test with a json response which is not empty.
Map<String, Object> jsonObject = new HashMap<>();
jsonObject.put("email", new String("{\"http://www.wso2.org/email\" : \"example@wso2.com\"}"));
String json = jsonObject.toString();
doReturn(json).when(openIDConnectAuthenticator).sendRequest(any(String.class),
any(String.class));
result = openIDConnectAuthenticator.getSubjectAttributes(mockOAuthClientResponse, authenticatorProperties);
assertTrue(!result.isEmpty(), "result is Empty.");
// Test with a json response which is empty.
doReturn(" ").when(openIDConnectAuthenticator).sendRequest(any(String.class),
any(String.class));
result = openIDConnectAuthenticator.getSubjectAttributes(mockOAuthClientResponse, authenticatorProperties);
assertTrue(result.isEmpty(), "result is not Empty.");
}
@DataProvider(name = "commonAuthParamProvider")
public Object[][] getCommonAuthParams() {
return new String[][]{
// If condition : queryString != null && queryString.contains("scope=")&& queryString.contains("redirect_uri=").
{"scope=openid&state=OIDC&loginType=basic&redirect_uri=https://localhost:9443/redirect", "https://localhost:9443/redirect", "The redirect URI is invalid"},
// If condition : queryString != null && queryString.contains("scope=").
{"state=OIDC&loginType=basic&redirect_uri=https://localhost:9443/redirect",
"https://localhost:9443/redirect", "The redirect URI is invalid"},
// If condition : queryString != null && queryString.contains("redirect_uri=").
{"state=OIDC&loginType=basic", "https://localhost:9443/redirect", "The redirect URI is invalid"}
};
}
@Test ( dataProvider = "commonAuthParamProvider")
public void testInitiateAuthenticationRequest(String authParam, String expectedValue,
String errorMsg) throws OAuthSystemException,
OAuthProblemException, AuthenticationFailedException, UserStoreException {
mockAuthenticationRequestContext(mockAuthenticationContext);
when(mockServletResponse.encodeRedirectURL(anyString())).thenReturn("https://localhost:9443/redirect");
when(mockAuthenticationContext.getAuthenticatorProperties()).thenReturn(authenticatorProperties);
when(mockAuthenticationContext.getContextIdentifier()).thenReturn("ContextIdentifier");
when(mockServletRequest.getParameter("domain")).thenReturn("carbon_super");
openIDConnectAuthenticator.initiateAuthenticationRequest(mockServletRequest, mockServletResponse,
mockAuthenticationContext);
authenticatorProperties.put("commonAuthQueryParams", authParam);
openIDConnectAuthenticator.initiateAuthenticationRequest(mockServletRequest, mockServletResponse,
mockAuthenticationContext);
assertEquals(mockServletResponse.encodeRedirectURL("encodeRedirectUri"), expectedValue, errorMsg);
}
@Test ( expectedExceptions = AuthenticationFailedException.class)
public void testInitiateAuthenticationRequestNullProperties() throws OAuthSystemException,
OAuthProblemException, AuthenticationFailedException, UserStoreException {
mockAuthenticationRequestContext(mockAuthenticationContext);
when(mockAuthenticationContext.getAuthenticatorProperties()).thenReturn(null);
openIDConnectAuthenticator.initiateAuthenticationRequest(mockServletRequest, mockServletResponse,
mockAuthenticationContext);
}
@Test
public void testPassProcessAuthenticationResponse() throws Exception {
setupTest();
openIDConnectAuthenticator.processAuthenticationResponse(mockServletRequest,
mockServletResponse, mockAuthenticationContext);
assertEquals(mockAuthenticationContext.getProperty(OIDCAuthenticatorConstants.ACCESS_TOKEN),
accessToken, "Invalid access token in the authentication context.");
assertEquals(mockAuthenticationContext.getProperty(OIDCAuthenticatorConstants.ID_TOKEN), idToken,
"Invalid Id token in the authentication context.");
}
@Test(expectedExceptions = AuthenticationFailedException.class)
public void testPassProcessAuthenticationResponseWithoutAccessToken() throws Exception {
setupTest();
// Empty access token and id token
setParametersForOAuthClientResponse(mockOAuthClientResponse, "", "");
openIDConnectAuthenticator.processAuthenticationResponse(mockServletRequest,
mockServletResponse, mockAuthenticationContext);
}
@Test
public void testPassProcessAuthenticationWithBlankCallBack() throws Exception {
setupTest();
authenticatorProperties.put("callbackUrl", " ");
mockStatic(IdentityUtil.class);
when(IdentityUtil.getServerURL(FrameworkConstants.COMMONAUTH, true, true)).thenReturn("http:/localhost:9443/oauth2/callback");
setParametersForOAuthClientResponse(mockOAuthClientResponse, accessToken, idToken);
openIDConnectAuthenticator.processAuthenticationResponse(mockServletRequest,
mockServletResponse, mockAuthenticationContext);
}
@Test
public void testPassProcessAuthenticationWithParamValue() throws Exception {
setupTest();
authenticatorProperties.put("callbackUrl", "http://localhost:8080/playground2/oauth2client");
Map<String, String> paramMap = new HashMap<>();
paramMap.put("redirect_uri","http:/localhost:9443/oauth2/redirect");
when(mockAuthenticationContext.getProperty("oidc:param.map")).thenReturn(paramMap);
setParametersForOAuthClientResponse(mockOAuthClientResponse, accessToken, idToken);
openIDConnectAuthenticator.processAuthenticationResponse(mockServletRequest,
mockServletResponse, mockAuthenticationContext);
}
@Test(dataProvider = "seperator")
public void testBuildClaimMappings(String separator) throws Exception {
Map<ClaimMapping, String> claims = new HashMap<>();
Map<String, Object> entries = new HashMap<>();
entries.put("scope", new Object());
for (Map.Entry<String, Object> entry : entries.entrySet()) {
openIDConnectAuthenticator.buildClaimMappings(claims, entry, separator);
assertTrue(!claims.isEmpty(), "Claims[] is empty.");
}
entries = new HashMap<>();
entries.put("scope", new String("[ \n" +
" {\"name\":\"Ram\", \"email\":\"example1@gmail.com\", \"age\":23}, \n" +
" {\"name\":\"Shyam\", \"email\":\"example2@gmail.com\", \"age\":28}, \n" +
"]"));
for (Map.Entry<String, Object> entry : entries.entrySet()) {
openIDConnectAuthenticator.buildClaimMappings(claims, entry, separator);
assertTrue(!claims.isEmpty(), "Claims[] is empty.");
}
}
@Test(dataProvider = "requestDataHandler")
public void testGetContextIdentifier(String grantType, String state, String loginType, String error, String expectedCanHandler, String expectedContext, String msgCanHandler, String msgContext) throws Exception {
when(mockServletRequest.getParameter(
OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE)).thenReturn(grantType);
when(mockServletRequest.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE)).thenReturn(state);
when(mockServletRequest.getParameter(OIDCAuthenticatorConstants.LOGIN_TYPE)).thenReturn(loginType);
when(mockServletRequest.getParameter(OIDCAuthenticatorConstants.OAUTH2_ERROR)).thenReturn(error);
assertEquals(openIDConnectAuthenticator.getContextIdentifier(mockServletRequest), expectedContext,
msgContext);
}
@Test
public void testGetFriendlyName() throws Exception {
assertEquals(openIDConnectAuthenticator.getFriendlyName(), "openidconnect",
"Invalid friendly name.");
}
@Test
public void testGetName() throws Exception {
assertEquals(openIDConnectAuthenticator.getName(), "GraniteOpenIDConnectAuthenticator",
"Invalid authenticator name.");
}
@Test
public void testGetClaimDialectURI() throws Exception {
assertEquals(openIDConnectAuthenticator.getClaimDialectURI(), "http://wso2.org/oidc/claim",
"Invalid claim dialect uri.");
}
@Test
public void testGetSubjectFromUserIDClaimURI() throws FrameworkException {
// Subject is null.
assertNull(openIDConnectAuthenticator.getSubjectFromUserIDClaimURI(mockAuthenticationContext));
// Subject is not null.
mockStatic(FrameworkUtils.class);
when(FrameworkUtils.getFederatedSubjectFromClaims(mockAuthenticationContext,
openIDConnectAuthenticator
.getClaimDialectURI())).thenReturn("subject");
Assert.assertNotNull(openIDConnectAuthenticator.getSubjectFromUserIDClaimURI(mockAuthenticationContext));
}
@Test
public void testSendRequest() throws Exception {
// InputStream is null.
String result = openIDConnectAuthenticator.sendRequest(null, accessToken);
assertTrue(result.isEmpty(), "The send request should be empty.");
// InputStream is not null.
InputStream stream =
IOUtils.toInputStream("Some test data for my input stream", "UTF-8");
URL url = mock(URL.class);
whenNew(URL.class).withParameterTypes(String.class)
.withArguments(anyString()).thenReturn(url);
when(url.openConnection()).thenReturn(mockConnection);
when(mockConnection.getInputStream()).thenReturn(stream);
result = openIDConnectAuthenticator.sendRequest("https://www.google.com", accessToken);
assertTrue(!result.isEmpty(), "The send request should not be empty.");
}
@Test
public void testGetOauthResponseWithoutExceptions() throws OAuthSystemException,
OAuthProblemException, AuthenticationFailedException {
when(mockOAuthClient.accessToken(mockOAuthClientRequest)).thenReturn(mockOAuthJSONAccessTokenResponse);
Assert.assertNotNull(openIDConnectAuthenticator.getOauthResponse(mockOAuthClient, mockOAuthClientRequest));
}
@Test(expectedExceptions = AuthenticationFailedException.class)
public void testGetOauthResponseWithExceptions() throws OAuthSystemException,
OAuthProblemException, AuthenticationFailedException {
OAuthClientRequest oAuthClientRequest = mock(OAuthClientRequest.class);
OAuthClient oAuthClient = mock(OAuthClient.class);
when(oAuthClient.accessToken(oAuthClientRequest)).thenThrow(OAuthSystemException.class);
openIDConnectAuthenticator.getOauthResponse(oAuthClient, oAuthClientRequest);
}
@Test
public void testGetOauthResponseWithOAuthProblemExceptions() throws OAuthSystemException,
OAuthProblemException, AuthenticationFailedException {
OAuthClientRequest oAuthClientRequest = mock(OAuthClientRequest.class);
OAuthClient oAuthClient = mock(OAuthClient.class);
when(oAuthClient.accessToken(oAuthClientRequest)).thenThrow(OAuthProblemException.class);
assertNull(openIDConnectAuthenticator.getOauthResponse(oAuthClient, oAuthClientRequest));
}
@ObjectFactory
public IObjectFactory getObjectFactory() {
return new org.powermock.modules.testng.PowerMockObjectFactory();
}
/***
* Method which set up background for the process authentication method.
*
* @throws OAuthProblemException an instance of OAuthProblemException
* @throws AuthenticationFailedException an instance of AuthenticationFailedException
* @throws UserStoreException an instance of UserStoreException
*/
private void setupTest() throws Exception {
mockStatic(OAuthAuthzResponse.class);
when(OAuthAuthzResponse.oauthCodeAuthzResponse(mockServletRequest)).thenReturn(mockOAuthzResponse);
when(mockServletRequest.getParameter("domain")).thenReturn("carbon.super");
mockAuthenticationRequestContext(mockAuthenticationContext);
when(mockOAuthzResponse.getCode()).thenReturn("200");
doReturn(mockOAuthClientResponse).when(openIDConnectAuthenticator).getOauthResponse(any(OAuthClient.class),
any(OAuthClientRequest.class));
when(mockAuthenticationContext.getProperty(
OIDCAuthenticatorConstants.ACCESS_TOKEN)).thenReturn(accessToken);
when(mockAuthenticationContext.getProperty(OIDCAuthenticatorConstants.ID_TOKEN)).thenReturn(idToken);
setParametersForOAuthClientResponse(mockOAuthClientResponse, accessToken, idToken);
mockStatic(OpenIDConnectAuthenticatorServiceComponent.class);
when(OpenIDConnectAuthenticatorServiceComponent.getRealmService()).thenReturn(mockRealmService);
when(mockRealmService.getTenantManager()).thenReturn(mockTenantManger);
when(mockTenantManger.getTenantId(anyString())).thenReturn(TENANT_ID);
when(mockRealmService.getTenantUserRealm(anyInt())).thenReturn(mockUserRealm);
when(mockUserRealm.getUserStoreManager()).thenReturn(mockUserStoreManager);
when(mockUserStoreManager.getRealmConfiguration()).thenReturn(mockRealmConfiguration);
when(mockRealmConfiguration.getUserStoreProperty(IdentityCoreConstants.MULTI_ATTRIBUTE_SEPARATOR))
.thenReturn(",");
// Test with a json response which is not empty.
Map<String, Object> jsonObject = new HashMap<>();
jsonObject.put("name", new String("{\"name\" : \"rasikap@wso2.com\"}"));
String json = jsonObject.toString();
doReturn(json).when(openIDConnectAuthenticator).sendRequest(any(String.class),
any(String.class));
}
private void setParametersForOAuthClientResponse(OAuthClientResponse mockOAuthClientResponse,
String accessToken, String idToken) {
when(mockOAuthClientResponse.getParam(OIDCAuthenticatorConstants.ACCESS_TOKEN)).thenReturn(accessToken);
when(mockOAuthClientResponse.getParam(OIDCAuthenticatorConstants.ID_TOKEN)).thenReturn(idToken);
}
private void mockAuthenticationRequestContext(AuthenticationContext mockAuthenticationContext) {
when(mockAuthenticationContext.getAuthenticatorProperties()).thenReturn(authenticatorProperties);
paramValueMap = new HashMap<>();
when(mockAuthenticationContext.getProperty("oidc:param.map")).thenReturn(paramValueMap);
when(mockAuthenticationContext.getContextIdentifier()).thenReturn("");
}
}
|
|
package edu.ncsu.mobile.traces;
import android.app.DatePickerDialog;
import android.app.DatePickerDialog.OnDateSetListener;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.location.Location;
import android.os.Bundle;
import android.os.Handler;
import android.os.SystemClock;
import android.support.v4.app.FragmentActivity;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.app.ActionBarDrawerToggle;
import android.text.InputType;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.DatePicker;
import android.view.animation.BounceInterpolator;
import android.view.animation.Interpolator;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.RadioGroup;
import android.widget.RelativeLayout;
import android.widget.SearchView;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.Projection;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.koushikdutta.ion.Ion;
import com.sothree.slidinguppanel.SlidingUpPanelLayout;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ExecutionException;
import static edu.ncsu.mobile.traces.R.id;
import static edu.ncsu.mobile.traces.R.layout;
public class MapsActivity extends FragmentActivity implements LocationListener,GoogleMap.OnMapLongClickListener, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, View.OnClickListener {
private GoogleApiClient googleAPI;
private boolean firstRun;
private SearchView search;
private RelativeLayout rel_layout;
private static final String LOG_APPTAG = "Traces App";
protected GoogleMap mMap; // Might be null if Google Play services APK is not available.
protected HashMap<Marker, CustomMarker> mMarkersHashMap;
private ArrayList<CustomMarker> customMarkersArray = new ArrayList<>();
private AddressAPIQuery addressQuery = new AddressAPIQuery(null, null, null, null);
private CoordinateAPIQuery coordinateAPIQuery = new CoordinateAPIQuery(null, null, null, null, null);
// Sliding (up) panel references
private EditText mAddressEditText;
private EditText mRadiusEditText;
private EditText mFromDateEditText;
private EditText mUntilDateEditText;
private SlidingUpPanelLayout mSlidingPanelLayout;
// DatePicker references (Advanced Search Drawer)
private DatePickerDialog mFromDatePickerDialog;
private DatePickerDialog mUntilDatePickerDialog;
private SimpleDateFormat mDateFormatter;
// Drawer layout
private DrawerLayout mDrawerLayout;
private ListView mDrawerList;
private ActionBarDrawerToggle mDrawerToggle;
// private String[] mDrawerStrings;
private CharSequence mDrawerTitle;
private CharSequence mTitle;
private ArrayList<Intel> mTweetIntel;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
firstRun = true;
googleAPI = new GoogleApiClient.Builder(this)
.addApi(LocationServices.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
setContentView(layout.activity_maps);
setUpMapIfNeeded();
mMap.setOnMapLongClickListener(this);
search = new SearchView(MapsActivity.this);
rel_layout = (RelativeLayout) findViewById(id.rl);
// Reference to sliding panel
mSlidingPanelLayout = (SlidingUpPanelLayout) findViewById(R.id.sliding_layout);
mDateFormatter = new SimpleDateFormat("yyyy-MM-dd", Locale.US);
// Advanced Search fields
mAddressEditText = (EditText) findViewById(R.id.addressText);
mRadiusEditText = (EditText) findViewById(id.radiusText);
mFromDateEditText = (EditText) findViewById(id.fromDateText);
mFromDateEditText.setInputType(InputType.TYPE_NULL);
mUntilDateEditText = (EditText) findViewById(id.untilDateText);
mUntilDateEditText.setInputType(InputType.TYPE_NULL);
this.setDateTimeField();
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
RadioGroup.LayoutParams.WRAP_CONTENT, RadioGroup.LayoutParams.WRAP_CONTENT);
params.setMargins(40, 30, 0, 0);
search.setQueryHint("Enter Location");
search.setBackgroundColor(Color.WHITE);
search.getBackground().setAlpha(205);
search.setLayoutParams(params);
rel_layout.addView(search);
//***setOnQueryTextListener***
search.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
if (query.trim().isEmpty()) {
Toast.makeText(getBaseContext(), "Enter Location",
Toast.LENGTH_SHORT).show();
} else {
retrieveTweetLocationsAndPlot();
search.setQuery("", false);
search.setIconified(true);
}
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
//Do Nothing
return false;
}
});
// set listener on keyboard search button for the address field
mAddressEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_SEARCH) {
hideSoftKeyboard();
mSlidingPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
sendSearchValues(findViewById(v.getId()));
return true;
}
return false;
}
});
// set listener on keyboard radius button for the radius field
mRadiusEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_SEARCH) {
hideSoftKeyboard();
mSlidingPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
sendSearchValues(findViewById(v.getId()));
return true;
}
return false;
}
});
}
@Override
public void onLocationChanged(Location location) {
}
@Override
protected void onResume() {
super.onResume();
setUpMapIfNeeded();
}
private void setUpMapIfNeeded() {
// Do a null check to confirm that we have not already instantiated the map.
if (mMap == null) {
// Try to obtain the map from the SupportMapFragment.
mMap = ((SupportMapFragment) getSupportFragmentManager().findFragmentById(id.map))
.getMap();
}
// Check if we were successful in obtaining the map.
if (mMap != null) {
mMap.setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(com.google.android.gms.maps.model.Marker marker) {
marker.showInfoWindow();
return true;
}
});
} else {
Toast.makeText(getApplicationContext(), "Unable to create Maps", Toast.LENGTH_SHORT).show();
}
}
private void centerMapToCurrentLocation() {
// only get location once per app run
if (!firstRun)
return;
Location myLocation = LocationServices.FusedLocationApi.getLastLocation(googleAPI);
if (myLocation != null) {
double latitude = myLocation.getLatitude();
double longitude = myLocation.getLongitude();
LatLng latLng = new LatLng(latitude, longitude);
mMap.addMarker(new MarkerOptions().position(latLng));
} else {
errorToast("Cannot determine current location.\n Lets pretend we work for Google!");
myLocation = new Location("");
myLocation.setLatitude(37.4219928);
myLocation.setLongitude(-122.0840694);
}
plotTweetsByDefaultLocation(myLocation);
}
private void plotTweetsByDefaultLocation(Location myLocation) {
coordinateAPIQuery.lat = myLocation.getLatitude() + "";
coordinateAPIQuery.lng = myLocation.getLongitude() + "";
coordinateAPIQuery.rad = null;
coordinateAPIQuery.since = null;
coordinateAPIQuery.until = null;
plotTweetsOnMap(new CoordinateGet(), coordinateAPIQuery);
}
private void retrieveTweetLocationsAndPlot() {
// only street is required, the rest can be set to null or empty string
addressQuery.s = search.getQuery().toString().trim();
addressQuery.rad = null;
addressQuery.since = null;
addressQuery.until = null;
plotTweetsOnMap(new AddressGet(), addressQuery);
}
private void zoomToNewLocation(LatLng loc) {
mMap.animateCamera(CameraUpdateFactory.newLatLngZoom(loc, 13.9f));
}
// If original is true then gets the full size image URL which can be very large
private String betterImageURL(String url, boolean original) {
int s = url.lastIndexOf("_normal");
return (url.substring(0, s) + (original ? "" : "_bigger")) + url.substring(s + 7);
}
private void plotTweetsOnMap(BaseGet queryGet, BaseAPIQuery queryData) {
try {
TracesAPIWrapper resultWrapper;
TracesAPI result;
try {
resultWrapper = queryGet.execute(queryData).get();
} catch (InterruptedException | ExecutionException t) {
errorToast("Thread Killed: " + t.getMessage());
return;
}
if (resultWrapper.error != null) {
errorToast(resultWrapper.error);
return;
} else {
result = resultWrapper.api;
}
mMap.clear();
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
mTweetIntel = new ArrayList<Intel>();//String[result.getIntel().size()];
final String listTitle = result.getSearchLocation().getAddress();
// Initialize the HashMap for Markers and MyMarker object
mMarkersHashMap = new HashMap<>();
customMarkersArray = new ArrayList<>();
for (Intel tweet : result.getIntel()) {
User user = tweet.getUser();
final String userName = user.getName();
final String profileImageUrl = betterImageURL(user.getProfileImageUrlHttps(), false);
final String tweetText = tweet.getText();
final long retweetCount = tweet.getRetweetCount();
final String dataUrl = user.getUrl();
final long favCount = tweet.getFavoriteCount();
//final String profileLocation = user.getProfileLocation();
final edu.ncsu.mobile.traces.Location loc = tweet.getLocation();
mTweetIntel.add(tweet);
final LatLng userPos = new LatLng(
loc.getLat(),
loc.getLng()
);
/* Create markers for the tweet data.
Must run this on the UI thread since it's a UI operation.
*/
runOnUiThread(new Runnable() {
public void run() {
try {
customMarkersArray.add(new CustomMarker(userName, tweetText, profileImageUrl, userPos, retweetCount, favCount, dataUrl));
} catch (Exception e) {
Log.e(LOG_APPTAG, "Error adding bitmap marker.", e);
}
}
});
}
mTitle = mDrawerTitle = getTitle();
//mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
mDrawerList = (ListView) findViewById(R.id.left_drawer);
mDrawerList.setOnItemClickListener(new DrawerItemClickListener());
//this.getActionBar().setDisplayHomeAsUpEnabled(true);
//this.getActionBar().setHomeButtonEnabled(true);
final CustomAdapter adapter = new CustomAdapter(getApplicationContext(), customMarkersArray);
mDrawerList.setAdapter(adapter);
mDrawerList.bringToFront();
mDrawerList.setOnScrollListener(new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
// TODO Auto-generated method stub
if (scrollState == SCROLL_STATE_IDLE) {
mDrawerList.bringToFront();
// mDrawerLayout.requestLayout();
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem,
int visibleItemCount, int totalItemCount) {
// TODO Auto-generated method stub
}
});
plotMarkers(customMarkersArray);
edu.ncsu.mobile.traces.Location search_loc = result.getSearchLocation().getLocation();
zoomToNewLocation(new LatLng(search_loc.getLat(), search_loc.getLng()));
} catch (Exception e) {
errorToast("Unknown error: " + e.getClass().getSimpleName());
}
}
private void plotMarkers(ArrayList<CustomMarker> customMarkersArray) {
if(customMarkersArray.size() > 0)
{
for (CustomMarker myMarker : customMarkersArray)
{
Bitmap bmImg = null;
try {
bmImg = Ion.with(getApplicationContext())
.load(myMarker.getmProfileImgHttpUrl()).asBitmap().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
int ColorIndicatorValue;
//ColorIndicatorValue Should vary based on retweet Count/Our Custom defined Formula
if (myMarker.getRetweetCount() > 5) {
ColorIndicatorValue = Color.RED;
} else if (myMarker.getRetweetCount() >= 1 && myMarker.getRetweetCount() <= 5) {
ColorIndicatorValue = Color.YELLOW;
} else {
ColorIndicatorValue = Color.GREEN;
}
Bitmap mapMarkerImg = getCircleCroppedBitmap(bmImg, ColorIndicatorValue);
// Create user marker with custom icon and other options
MarkerOptions markerOption = new MarkerOptions()
.icon(BitmapDescriptorFactory.fromBitmap(mapMarkerImg))
.title(myMarker.getmUserName())
.snippet(myMarker.getmTweetText())
.position(myMarker.getmLocation());
Marker currentMarker = mMap.addMarker(markerOption);
mMarkersHashMap.put(currentMarker, myMarker);
mMap.setInfoWindowAdapter(new MarkerInfoWindowAdapter());
}
}
}
public boolean bounceMarker(final Marker marker){
//Make the marker bounce
final Handler handler = new Handler();
final long startTime = SystemClock.uptimeMillis();
final long duration = 2000;
Projection proj = mMap.getProjection();
final LatLng markerLatLng = marker.getPosition();
Point startPoint = proj.toScreenLocation(markerLatLng);
startPoint.offset(0, -100);
final LatLng startLatLng = proj.fromScreenLocation(startPoint);
final Interpolator interpolator = new BounceInterpolator();
handler.post(new Runnable() {
@Override
public void run() {
long elapsed = SystemClock.uptimeMillis() - startTime;
float t = interpolator.getInterpolation((float) elapsed / duration);
double lng = t * markerLatLng.longitude + (1 - t) * startLatLng.longitude;
double lat = t * markerLatLng.latitude + (1 - t) * startLatLng.latitude;
marker.setPosition(new LatLng(lat, lng));
if (t < 1.0) {
// Post again 16ms later.
handler.postDelayed(this, 16);
}
}
});
//return false; //have not consumed the event
return true; //have consumed the event
}
public Bitmap getCircleCroppedBitmap(Bitmap bitmap,int colorIndicator) {
Bitmap output = Bitmap.createBitmap(bitmap.getWidth(),
bitmap.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
// canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
canvas.drawCircle(bitmap.getWidth() / 2, bitmap.getHeight() / 2,
bitmap.getWidth() / 2, paint);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
canvas.drawBitmap(bitmap, rect, rect, paint);
/* Re-scale the profile image to decent size & return */
// return Bitmap.createScaledBitmap(output, 120, 120, false);
//Image is 73x73 so don't want to resize since it looks like crap
Bitmap borderedOutput = getColorBorderedBitmapVersion(output, colorIndicator);
return Bitmap.createScaledBitmap(borderedOutput, 100, 100, false);
}
private Bitmap getColorBorderedBitmapVersion(Bitmap bitmap,int color) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int radius = Math.min(h / 2, w / 2);
Bitmap output = Bitmap.createBitmap(w + 8, h + 8, Bitmap.Config.ARGB_8888);
Paint p = new Paint();
p.setAntiAlias(true);
Canvas c = new Canvas(output);
c.drawARGB(0, 0, 0, 0);
p.setStyle(Paint.Style.FILL);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
c.drawBitmap(bitmap, 4, 4, p);
p.setXfermode(null);
p.setStyle(Paint.Style.STROKE);
p.setColor(color);
p.setStrokeWidth(3);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
return output;
}
/* Toast that takes in Error Message. */
private void errorToast(String error) {
Log.d(LOG_APPTAG, error);
Toast.makeText(getBaseContext(), error,
Toast.LENGTH_LONG).show();
}
@Override
public void onMapLongClick(final LatLng latLng) {
//Probably should not put a marker when no results are found
mMap.addMarker(new MarkerOptions()
.position(latLng)
.title("You long-pressed here")
.icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_RED))).setAlpha(0.5f);
// **Would be Cool if we can get to give a Explosive or Fadeout animation to the marker when tweets load on map**
runOnUiThread(new Runnable() {
@Override
public void run() {
coordinateAPIQuery.lat = latLng.latitude + "";
coordinateAPIQuery.lng = latLng.longitude + "";
coordinateAPIQuery.rad = null;
coordinateAPIQuery.since = null;
coordinateAPIQuery.until = null;
plotTweetsOnMap(new CoordinateGet(), coordinateAPIQuery);
}
});
}
/**
* Retrieves values from the text fields in the advanced search box,
* hides the drawer, and submits the search results to be displayed
*
* @param view the view
*/
public void sendSearchValues(View view) {
this.verifyDates();
addressQuery.s = mAddressEditText.getText().toString();
addressQuery.rad = mRadiusEditText.getText().toString();
addressQuery.since = mFromDateEditText.getText().toString();
addressQuery.until = mUntilDateEditText.getText().toString();
mSlidingPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
plotTweetsOnMap(new AddressGet(), addressQuery);
}
@Override
public void onConnected(Bundle bundle) {
centerMapToCurrentLocation();
firstRun = false;
}
@Override
public void onConnectionSuspended(int i) {
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
}
@Override
public void onStart() {
super.onStart();
googleAPI.connect();
}
@Override
public void onStop() {
super.onStop();
googleAPI.disconnect();
}
private void setDateTimeField() {
mFromDateEditText.setOnClickListener(this);
mUntilDateEditText.setOnClickListener(this);
Calendar newCalendar = Calendar.getInstance();
mFromDatePickerDialog = new DatePickerDialog(this, new OnDateSetListener() {
public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) {
Calendar newDate = Calendar.getInstance();
newDate.set(year, monthOfYear, dayOfMonth);
mFromDateEditText.setText(mDateFormatter.format(newDate.getTime()));
}
},newCalendar.get(Calendar.YEAR), newCalendar.get(Calendar.MONTH), newCalendar.get(Calendar.DAY_OF_MONTH));
mUntilDatePickerDialog = new DatePickerDialog(this, new OnDateSetListener() {
public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) {
Calendar newDate = Calendar.getInstance();
newDate.set(year, monthOfYear, dayOfMonth);
mUntilDateEditText.setText(mDateFormatter.format(newDate.getTime()));
}
},newCalendar.get(Calendar.YEAR), newCalendar.get(Calendar.MONTH), newCalendar.get(Calendar.DAY_OF_MONTH));
}
@Override
public void onClick(View view) {
if(view == mFromDateEditText) {
mFromDatePickerDialog.show();
} else if(view == mUntilDateEditText) {
mUntilDatePickerDialog.show();
}
}
public class MarkerInfoWindowAdapter implements GoogleMap.InfoWindowAdapter
{
public MarkerInfoWindowAdapter()
{
}
@Override
public View getInfoWindow(Marker marker)
{
return null;
}
@Override
public View getInfoContents(Marker marker)
{
bounceMarker(marker);
// * Felt it'd be cool if we Auto-center Marker position to center of the map screen
int zoom = (int)mMap.getCameraPosition().zoom;
CameraUpdate cu = CameraUpdateFactory.newLatLngZoom(new LatLng(marker.getPosition().latitude + (double)90/Math.pow(2, zoom), marker.getPosition().longitude), zoom);
mMap.animateCamera(cu);
View v = getLayoutInflater().inflate(layout.info_window, null);
CustomMarker myMarker = mMarkersHashMap.get(marker);
ImageView markerIcon = (ImageView) v.findViewById(id.popUpImageView);
TextView markerTweet = (TextView)v.findViewById(id.popUpTweetContent);
TextView markerTitle = (TextView)v.findViewById(id.popUpTitle);
// TextView markerFavoriteCount = (TextView)v.findViewById(id.textFavorite);
Bitmap bmImg = null;
try {
bmImg = Ion.with(getApplicationContext())
.load(myMarker.getmProfileImgHttpUrl()).asBitmap().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
int colorValue = Color.LTGRAY;
Bitmap mapMarkerImg = getCircleCroppedBitmap(bmImg,colorValue);
markerIcon.setImageBitmap(mapMarkerImg);
markerTitle.setText(myMarker.getmUserName());
markerTweet.setText(myMarker.getmTweetText());
// markerFavoriteCount.setText(""+myMarker.getFavoriteCount());
return v;
}
}
/* The click listner for ListView in the navigation drawer */
private class DrawerItemClickListener implements ListView.OnItemClickListener {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
}
private void selectItem(int position) {
// update the main content by replacing fragments
/*
Fragment fragment = new PlanetFragment();
Bundle args = new Bundle();
args.putInt(PlanetFragment.ARG_PLANET_NUMBER, position);
fragment.setArguments(args);
FragmentManager fragmentManager = getFragmentManager();
fragmentManager.beginTransaction().replace(R.id.content_frame, fragment).commit();
// update selected item and title, then close the drawer
mDrawerList.setItemChecked(position, true);
setTitle(mPlanetTitles[position]);
mDrawerLayout.closeDrawer(mDrawerList);
*/
}
@Override
public void setTitle(CharSequence title) {
mTitle = title;
//getActionBar().setTitle(mTitle);
}
@Override
public void onBackPressed() {
if (mSlidingPanelLayout.getPanelState() == SlidingUpPanelLayout.PanelState.EXPANDED) {
mSlidingPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
} else {
super.onBackPressed();
}
}
private void hideSoftKeyboard() {
if(getCurrentFocus()!=null) {
InputMethodManager inputMethodManager = (InputMethodManager) getSystemService(INPUT_METHOD_SERVICE);
inputMethodManager.hideSoftInputFromWindow(getCurrentFocus().getWindowToken(), 0);
}
}
private void verifyDates() {
String fromDate = mFromDateEditText.getText().toString().trim();
String untilDate = mUntilDateEditText.getText().toString().trim();
boolean fromDateFilled = (fromDate != null) && (!fromDate.isEmpty());
boolean untilDateFilled = (untilDate != null) && (!untilDate.isEmpty());
boolean bothDatesFilled = fromDateFilled && untilDateFilled;
if (bothDatesFilled && fromDate.equals(untilDate)) {
String[] dateArray = untilDate.split("-");
int year = Integer.valueOf(dateArray[0]);
int month = Integer.valueOf(dateArray[1]) - 1; //months are 0-based
int day = Integer.valueOf(dateArray[2]);
System.out.println("Year: " + year + " Month: " + month + " Day: " + day);
Calendar date = Calendar.getInstance();
date.set(year, month, day);
System.out.println("Before: " + date.toString());
date.add(Calendar.DAY_OF_MONTH, 1);
System.out.println("After: " + date.toString());
System.out.println("Formatted: " + mDateFormatter.format(date.getTime()));
mUntilDateEditText.setText(mDateFormatter.format(date.getTime()));
}
}
}
|
|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.phreak;
import org.drools.core.common.EventFactHandle;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.LeftTupleSets;
import org.drools.core.common.LeftTupleSetsImpl;
import org.drools.core.common.Memory;
import org.drools.core.common.MemoryFactory;
import org.drools.core.common.PropagationContextFactory;
import org.drools.core.common.RightTupleSets;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.reteoo.AbstractTerminalNode;
import org.drools.core.reteoo.AccumulateNode;
import org.drools.core.reteoo.AccumulateNode.AccumulateContext;
import org.drools.core.reteoo.AccumulateNode.AccumulateMemory;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.BetaNode;
import org.drools.core.reteoo.FromNode.FromMemory;
import org.drools.core.reteoo.LeftInputAdapterNode;
import org.drools.core.reteoo.LeftInputAdapterNode.RightTupleSinkAdapter;
import org.drools.core.reteoo.LeftTuple;
import org.drools.core.reteoo.LeftTupleMemory;
import org.drools.core.reteoo.LeftTupleSink;
import org.drools.core.reteoo.LeftTupleSinkNode;
import org.drools.core.reteoo.LeftTupleSource;
import org.drools.core.reteoo.NodeTypeEnums;
import org.drools.core.reteoo.ObjectSource;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.ObjectTypeNode.ObjectTypeNodeMemory;
import org.drools.core.reteoo.PathMemory;
import org.drools.core.reteoo.RightInputAdapterNode;
import org.drools.core.reteoo.RightInputAdapterNode.RiaNodeMemory;
import org.drools.core.reteoo.RightTuple;
import org.drools.core.reteoo.RightTupleMemory;
import org.drools.core.reteoo.SegmentMemory;
import org.drools.core.reteoo.TerminalNode;
import org.drools.core.reteoo.WindowNode;
import org.drools.core.spi.PropagationContext;
import org.drools.core.util.FastIterator;
import org.drools.core.util.LinkedList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class AddRemoveRule {
private static final Logger log = LoggerFactory.getLogger(AddRemoveRule.class);
public static void addRule(TerminalNode tn, InternalWorkingMemory[] wms, InternalKnowledgeBase kBase) {
if ( log.isTraceEnabled() ) {
log.trace("Adding Rule {}", tn.getRule().getName() );
}
LeftTupleSource splitStartLeftTupleSource = getNetworkSplitPoint(tn);
kBase.invalidateSegmentPrototype(splitStartLeftTupleSource);
for (InternalWorkingMemory wm : wms) {
if (splitStartLeftTupleSource.getAssociations().size() > 1) {
List<PathMemory> pathMems = new ArrayList<PathMemory>();
collectRtnPathMemories(splitStartLeftTupleSource, wm, pathMems, tn); // get all PathMemories, except current
PathMemory newPmem = (PathMemory) wm.getNodeMemory((MemoryFactory) tn);
int s = getSegmentPos(splitStartLeftTupleSource, null);
LeftTupleSink[] sinks = splitStartLeftTupleSource.getSinkPropagator().getSinks();
if (sinks.length == 2 || (sinks.length == 3 && NodeTypeEnums.isBetaNode(sinks[2])) && ((BetaNode) sinks[2]).isRightInputIsRiaNode()) {
List<SegmentMemory[]> previousSmems = reInitPathMemories(wm, pathMems, null);
// can only be two if the adding node caused the split to be created
int p = 0;
SegmentMemory splitSmem = null;
for (PathMemory pmem : pathMems) {
SegmentMemory[] smems = previousSmems.get(p);
for (int i = 0; i < smems.length; i++) {
SegmentMemory sm = smems[i];
if (sm == null) {
continue; // SegmentMemory is not yet initialized
}
if (i < s) {
correctSegmentBeforeSplitOnAdd(wm, newPmem, p, pmem, sm);
} else if (i == s) {
splitSmem = correctSegmentOnSplitOnAdd(splitStartLeftTupleSource, wm, newPmem, p, splitSmem, pmem, sm);
} else if (i > s) {
correctSegmentAfterSplitOnAdd(wm, pmem, i, sm);
}
}
p++;
}
} else {
SegmentMemory sm = pathMems.get(0).getSegmentMemories()[s];
if (sm == null) {
continue; // Segments are initialised lazily, so the SM may not yet exist yet, and thus no processing needed
}
initNewSegment(splitStartLeftTupleSource, wm, sm);
correctSegmentBeforeSplitOnAdd(wm, newPmem, 0, pathMems.get(0), sm);
}
}
if (NodeTypeEnums.LeftInputAdapterNode == splitStartLeftTupleSource.getType() && splitStartLeftTupleSource.getAssociations().size() == 1) {
// rule added with no sharing
insertLiaFacts( splitStartLeftTupleSource, wm );
}
insertFacts( splitStartLeftTupleSource.getSinkPropagator().getLastLeftTupleSink(), wm);
}
}
public static void removeRule(TerminalNode tn, InternalWorkingMemory[] wms, InternalKnowledgeBase kBase) {
if ( log.isTraceEnabled() ) {
log.trace("Removing Rule {}", tn.getRule().getName() );
}
LeftTupleSource splitStartNode = getNetworkSplitPoint(tn);
kBase.invalidateSegmentPrototype(splitStartNode);
for ( InternalWorkingMemory wm : wms ) {
PathMemory removedPmem = (PathMemory) wm.getNodeMemory( tn );
int s = getSegmentPos(splitStartNode, null);
// if a segment is going to be merged it is necessary to flush all its staged left tuples before doing any change to the network
flushSegmentIfMerge(wm, tn, splitStartNode, s);
// must be done before segments are mutated
flushStagedTuples(splitStartNode, removedPmem, wm, true);
wm.flushPropagations();
//
if (NodeTypeEnums.LeftInputAdapterNode == splitStartNode.getType() && splitStartNode.getAssociations().size() == 1) {
// rule added with no sharing
deleteLiaFacts(splitStartNode, wm);
}
LeftTupleSink sink;
if ( splitStartNode.getAssociations().size() == 1 ) {
// there is no sharing, so get the node after the root of the only SegmentMemory
SegmentMemory sm = removedPmem.getSegmentMemories()[s];
if ( sm == null ) {
continue; // this rule has not been initialized yet
}
sink = ((LeftInputAdapterNode)sm.getRootNode()).getSinkPropagator().getFirstLeftTupleSink();
} else {
// Sharing exists, get the root of the SegmentMemory after the split
SegmentMemory sm = removedPmem.getSegmentMemories()[s+1];
if ( sm == null ) {
continue; // this rule has not been initialized yet
}
sink = (LeftTupleSink) removedPmem.getSegmentMemories()[s+1].getRootNode();
}
deleteFacts( sink, wm);
if ( splitStartNode.getAssociations().size() > 1 ) {
List<PathMemory> pathMems = new ArrayList<PathMemory>();
collectRtnPathMemories(splitStartNode, wm, pathMems, tn); // get all PathMemories, except current
List<SegmentMemory[]> previousSmems = reInitPathMemories(wm, pathMems, tn.getRule() );
if ( splitStartNode.getSinkPropagator().size() == 2 ) {
// can only be two if the removing node causes the split to be removed
int p = 0;
for ( PathMemory pmem : pathMems) {
SegmentMemory[] smems = previousSmems.get(p);
for (int i = 0; i < smems.length; i++ ) {
SegmentMemory sm = smems[i];
if ( sm == null ) {
continue; // SegmentMemory is not yet initialized
}
if ( i < s ) {
correctSegmentBeforeSplitOnRemove(wm, removedPmem, pmem, sm, p);
} else if ( i == s ) {
if (smems[i+1] != null) {
correctSegmentOnSplitOnRemove(wm, sm, smems[i+1], pmem, removedPmem, p);
i++; // increase to skip merged segment
}
} else if (i > s) {
correctSegmentAfterSplitOnRemove(wm, pmem, i, sm);
}
}
p++;
}
} else {
int p = 0;
for ( PathMemory pmem : pathMems) {
SegmentMemory[] smems = previousSmems.get(p++);
for (int i = 0; i < pmem.getSegmentMemories().length; i++) {
if ( smems[i] == null) {
continue;
}
smems[i].getPathMemories().remove(removedPmem);
pmem.getSegmentMemories()[i] = smems[i];
}
}
}
}
if ( removedPmem.getRuleAgendaItem() != null && removedPmem.getRuleAgendaItem().isQueued() ) {
removedPmem.getRuleAgendaItem().dequeue();
}
}
}
private static void flushSegmentIfMerge(InternalWorkingMemory wm, TerminalNode tn, LeftTupleSource splitStartNode, int segmentPos) {
if ( splitStartNode.getAssociations().size() == 2 ) {
// only handle for the first PathMemory, all others are shared and duplicate until this point
PathMemory pmem = getFirstRtnPathMemory(splitStartNode, wm, tn);
SegmentMemory[] smems = pmem.getSegmentMemories();
SegmentMemory sm1 = smems[segmentPos];
SegmentMemory sm2 = smems[segmentPos+1];
if ( sm1 != null && sm2 != null ) {
if (sm1.getRootNode() == sm1.getTipNode() && NodeTypeEnums.LeftInputAdapterNode == sm1.getTipNode().getType()) {
sm1.setStagedTuples(sm2.getStagedLeftTuples());
} else if ( !sm2.getStagedLeftTuples().isEmpty() ) {
flushStagedTuples(splitStartNode, pmem, wm, false);
}
}
}
}
private static void flushStagedTuples(LeftTupleSource splitStartNode, PathMemory pmem, InternalWorkingMemory wm, boolean removeTuples) {
int smemIndex = getSegmentPos(splitStartNode, null); // index before the segments are merged
SegmentMemory[] smems = pmem.getSegmentMemories();
SegmentMemory sm;
LeftTupleSink sink;
Memory mem;
long bit = 1;
if ( smems.length == 1 ) {
// there is no sharing
sm = smems[0];
if ( sm == null ) {
return; // segment has not yet been initialized
}
sink = ((LeftInputAdapterNode)sm.getRootNode()).getSinkPropagator().getFirstLeftTupleSink();
mem = sm.getNodeMemories().get(1);
bit = 2; // adjust bit to point to next node
} else {
sm = smems[smemIndex+1]; // segment after the split being removed.
if ( sm == null ) {
return; // segment has not yet been initialized
}
sink = (LeftTupleSink) sm.getRootNode();
mem = sm.getNodeMemories().get(0);
}
// stages the LeftTuples for deletion in the target SegmentMemory, if necessary it looks up the nodes to find.
if (removeTuples) {
processLeftTuples(splitStartNode, sink, sm, wm, false);
}
// The graph must be fully updated before SegmentMemory and PathMemories are mutated
if ( !sm.getStagedLeftTuples().isEmpty() && pmem.isRuleLinked() ) {
new RuleNetworkEvaluator().outerEval( ( LeftInputAdapterNode ) smems[0].getRootNode(),
pmem, sink, bit, mem, smems, smemIndex,
sm.getStagedLeftTuples().takeAll(), wm,
new LinkedList<StackEntry>(),
true, pmem.getRuleAgendaItem().getRuleExecutor() );
}
}
public static void forceFlushLeftTuple(PathMemory pmem, SegmentMemory sm, InternalWorkingMemory wm, LeftTuple leftTuple) {
SegmentMemory[] smems = pmem.getSegmentMemories();
if (smems[0] == null) {
return; // segment has not yet been initialized
}
int smemIndex = sm.getPos();
LeftTupleSink sink;
Memory mem;
long bit = 1;
if ( sm.getRootNode() instanceof LeftInputAdapterNode ) {
sink = ((LeftInputAdapterNode)sm.getRootNode()).getSinkPropagator().getFirstLeftTupleSink();
mem = sm.getNodeMemories().get(1);
bit = 2; // adjust bit to point to next node
} else {
sink = (LeftTupleSink) sm.getRootNode();
mem = sm.getNodeMemories().get(0);
}
LeftTupleSets leftTupleSets = new LeftTupleSetsImpl();
if (leftTuple != null) {
leftTupleSets.addInsert(leftTuple);
}
new RuleNetworkEvaluator().outerEval( ( LeftInputAdapterNode ) smems[0].getRootNode(),
pmem, sink, bit, mem, smems, smemIndex, leftTupleSets, wm,
new LinkedList<StackEntry>(),
true, pmem.getOrCreateRuleAgendaItem(wm).getRuleExecutor() );
}
private static List<SegmentMemory[]> reInitPathMemories(InternalWorkingMemory wm, List<PathMemory> pathMems, RuleImpl removingRule) {
List<SegmentMemory[]> previousSmems = new ArrayList<SegmentMemory[]>();
for ( PathMemory pmem : pathMems) {
// Re initialise all the PathMemories
previousSmems.add(pmem.getSegmentMemories());
LeftTupleSource lts;
LeftTupleSource startRianLts = null;
if ( NodeTypeEnums.isTerminalNode(pmem.getNetworkNode())) {
lts = ((TerminalNode)pmem.getNetworkNode()).getLeftTupleSource();
} else {
RightInputAdapterNode rian = (RightInputAdapterNode)pmem.getNetworkNode();
startRianLts = rian.getStartTupleSource();
lts = rian.getLeftTupleSource();
}
AbstractTerminalNode.initPathMemory(pmem, lts, startRianLts, wm, removingRule); // re-initialise the PathMemory
}
return previousSmems;
}
private static void correctSegmentBeforeSplitOnAdd(InternalWorkingMemory wm, PathMemory newPmem, int p, PathMemory pmem, SegmentMemory sm) {
pmem.getSegmentMemories()[sm.getPos()] = sm;
if ( p == 0 ) {
// only handle for the first PathMemory, all others are shared and duplicate until this point
newPmem.getSegmentMemories()[sm.getPos()] = sm;
sm.getPathMemories().add( newPmem );
sm.notifyRuleLinkSegment(wm);
}
}
private static void correctSegmentBeforeSplitOnRemove(InternalWorkingMemory wm, PathMemory removedPmem, PathMemory pmem, SegmentMemory sm, int p) {
pmem.getSegmentMemories()[sm.getPos()] = sm;
if ( p == 0 ) {
// only handle for the first PathMemory, all others are shared and duplicate until this point
sm.getPathMemories().remove(removedPmem);
sm.notifyRuleLinkSegment(wm);
}
}
private static SegmentMemory correctSegmentOnSplitOnAdd(LeftTupleSource splitStartLeftTupleSource, InternalWorkingMemory wm, PathMemory newPmem, int p, SegmentMemory splitSmem, PathMemory pmem, SegmentMemory sm) {
if ( p == 0 ) {
// split is inside of a segment, so the segment must be split in half and a new one created
splitSmem = splitSegment(sm, splitStartLeftTupleSource);
correctSegmentMemoryAfterSplitOnAdd(splitSmem);
pmem.getSegmentMemories()[sm.getPos()] = sm;
pmem.getSegmentMemories()[splitSmem.getPos()] = splitSmem;
newPmem.getSegmentMemories()[sm.getPos()] = sm;
newPmem.getSegmentMemories()[splitSmem.getPos()] = splitSmem;
sm.getPathMemories().add( newPmem );
splitSmem.getPathMemories().add( newPmem );
sm.notifyRuleLinkSegment(wm);
splitSmem.notifyRuleLinkSegment(wm);
initNewSegment(splitStartLeftTupleSource, wm, sm);
} else {
pmem.getSegmentMemories()[sm.getPos()] = sm;
pmem.getSegmentMemories()[splitSmem.getPos()] = splitSmem;
}
return splitSmem;
}
private static void initNewSegment(LeftTupleSource splitStartLeftTupleSource, InternalWorkingMemory wm, SegmentMemory sm) {// Initialise new SegmentMemory
LeftTupleSinkNode peerLts = splitStartLeftTupleSource.getSinkPropagator().getLastLeftTupleSink();
if ( NodeTypeEnums.isBetaNode(peerLts) && ((BetaNode)peerLts).isRightInputIsRiaNode() ) {
LeftTupleSink subNetworkLts = peerLts.getPreviousLeftTupleSinkNode();
Memory memory = wm.getNodeMemory((MemoryFactory) subNetworkLts);
SegmentMemory newSmem = SegmentUtilities.createChildSegment(wm, peerLts, memory);
sm.add(newSmem);
}
Memory memory = wm.getNodeMemory((MemoryFactory) peerLts);
SegmentMemory newSmem = SegmentUtilities.createChildSegment(wm, peerLts, memory);
sm.add(newSmem);
LeftTupleSource lts;
if ( NodeTypeEnums.isTerminalNode(sm.getTipNode() ) ) {
// if tip is RTN, then use parent
lts = ((TerminalNode)sm.getTipNode()).getLeftTupleSource();
} else {
lts = (LeftTupleSource) sm.getTipNode();
}
processLeftTuples(lts, peerLts, newSmem, wm, true);
}
private static void correctSegmentOnSplitOnRemove(InternalWorkingMemory wm, SegmentMemory sm1,SegmentMemory sm2, PathMemory pmem, PathMemory removedPmem, int p) {
if ( p == 0 ) {
mergeSegment(sm1, sm2);
pmem.getSegmentMemories()[sm1.getPos()] = sm1;
sm1.getPathMemories().remove(removedPmem);
sm1.remove( removedPmem.getSegmentMemories()[sm1.getPos()+1]);
sm1.notifyRuleLinkSegment(wm);
} else {
pmem.getSegmentMemories()[sm1.getPos()] = sm1;
}
}
private static void correctSegmentAfterSplitOnAdd(InternalWorkingMemory wm, PathMemory pmem, int i, SegmentMemory sm) {
if ( sm.getPos() == i ) {
// segment has not yet had it's pos and bitmasks updated
correctSegmentMemoryAfterSplitOnAdd(sm);
sm.notifyRuleLinkSegment(wm);
}
pmem.getSegmentMemories()[sm.getPos()] = sm;
}
private static void correctSegmentAfterSplitOnRemove(InternalWorkingMemory wm, PathMemory pmem, int i, SegmentMemory sm) {
if ( sm.getPos() == i ) {
// segment has not yet had it's pos and bitmasks updated
correctSegmentMemoryAfterSplitOnRemove(sm);
sm.notifyRuleLinkSegment(wm);
}
pmem.getSegmentMemories()[sm.getPos()] = sm;
}
public static void correctSegmentMemoryAfterSplitOnAdd(SegmentMemory sm) {
sm.setPos(sm.getPos() + 1);
sm.setSegmentPosMaskBit(sm.getSegmentPosMaskBit() << 1);
}
public static void correctSegmentMemoryAfterSplitOnRemove(SegmentMemory sm) {
sm.setPos(sm.getPos() - 1);
sm.setSegmentPosMaskBit(sm.getSegmentPosMaskBit() >> 1);
}
public static int getSegmentPos(LeftTupleSource lts, RuleImpl removingRule) {
int counter = 0;
while ( lts.getType() != NodeTypeEnums.LeftInputAdapterNode ) {
if ( !SegmentUtilities.parentInSameSegment( lts, removingRule ) ) {
counter++;
}
lts = lts.getLeftTupleSource();
}
return counter;
}
private static void insertLiaFacts(LeftTupleSource startNode, InternalWorkingMemory wm) {
// rule added with no sharing
PropagationContextFactory pctxFactory = wm.getKnowledgeBase().getConfiguration().getComponentFactory().getPropagationContextFactory();
final PropagationContext pctx = pctxFactory.createPropagationContext(wm.getNextPropagationIdCounter(), PropagationContext.RULE_ADDITION, null, null, null);
LeftInputAdapterNode lian = (LeftInputAdapterNode) startNode;
RightTupleSinkAdapter liaAdapter = new RightTupleSinkAdapter(lian);
lian.getObjectSource().updateSink(liaAdapter, pctx, wm);
}
private static void insertFacts(LeftTupleSink startNode, InternalWorkingMemory wm) {
LeftTupleSink lts = startNode;
PropagationContextFactory pctxFactory = wm.getKnowledgeBase().getConfiguration().getComponentFactory().getPropagationContextFactory();
while (!NodeTypeEnums.isTerminalNode(lts) && lts.getLeftTupleSource().getType() != NodeTypeEnums.RightInputAdaterNode ) {
if (NodeTypeEnums.isBetaNode(lts)) {
BetaNode bn = (BetaNode) lts;
if (!bn.isRightInputIsRiaNode() ) {
final PropagationContext pctx = pctxFactory.createPropagationContext(wm.getNextPropagationIdCounter(), PropagationContext.RULE_ADDITION, null, null, null);
bn.getRightInput().updateSink(bn,
pctx,
wm);
} else {
insertSubnetworkFacts(bn, wm);
}
} else if ( lts.getType() == NodeTypeEnums.RightInputAdaterNode ) {
// no need to delete anything, as this gets popagated during the rule evaluation
return;
}
lts = ((LeftTupleSource) lts).getSinkPropagator().getFirstLeftTupleSink();
}
}
private static void insertSubnetworkFacts(BetaNode bn, InternalWorkingMemory wm) {
RightInputAdapterNode rian = ( RightInputAdapterNode ) bn.getRightInput();
LeftTupleSource subLts = rian.getLeftTupleSource();
while ( subLts.getLeftTupleSource() != rian.getStartTupleSource() ) {
subLts = subLts.getLeftTupleSource();
}
insertFacts( ( LeftTupleSink ) subLts, wm);
}
private static void deleteLiaFacts(LeftTupleSource startNode, InternalWorkingMemory wm) {
LeftInputAdapterNode lian = ( LeftInputAdapterNode ) startNode;
ObjectSource os = lian.getObjectSource();
while (os.getType() != NodeTypeEnums.ObjectTypeNode) {
os = os.getParentObjectSource();
}
ObjectTypeNode otn = (ObjectTypeNode) os;
final ObjectTypeNodeMemory omem = (ObjectTypeNodeMemory) wm.getNodeMemory(otn);
Iterator<InternalFactHandle> it = omem.iterator();
while (it.hasNext()) {
InternalFactHandle fh = it.next();
for (LeftTuple childLt = fh.getFirstLeftTuple(); childLt != null; ) {
LeftTuple next = childLt.getLeftParentNext();
//stagedLeftTuples
if ( childLt.getSink() == lian ) {
fh.removeLeftTuple(childLt);
}
childLt = next;
}
}
}
public static void deleteFacts(LeftTupleSink startNode, InternalWorkingMemory wm) {
LeftTupleSink lts = startNode;
while (!NodeTypeEnums.isTerminalNode(lts) && lts.getLeftTupleSource().getType() != NodeTypeEnums.RightInputAdaterNode ) {
if (NodeTypeEnums.isBetaNode(lts)) {
BetaNode bn = (BetaNode) lts;
if (!bn.isRightInputIsRiaNode() ) {
BetaMemory bm;
if ( bn.getType() == NodeTypeEnums.AccumulateNode ) {
bm = ((AccumulateMemory)wm.getNodeMemory( bn )).getBetaMemory();
} else {
bm = ( BetaMemory ) wm.getNodeMemory( bn );
}
RightTupleMemory rtm = bm.getRightTupleMemory();
FastIterator it = rtm.fullFastIterator();
for (RightTuple rightTuple = BetaNode.getFirstRightTuple(rtm, it); rightTuple != null; ) {
RightTuple next = (RightTuple) it.next(rightTuple);
rtm.remove(rightTuple);
rightTuple.unlinkFromRightParent();
rightTuple = next;
}
RightTupleSets srcRightTuples = bm.getStagedRightTuples().takeAll();
unlinkRightTuples(srcRightTuples.getInsertFirst());
unlinkRightTuples(srcRightTuples.getUpdateFirst());
unlinkRightTuples(srcRightTuples.getDeleteFirst());
deleteFactsFromRightInput(bn, wm);
} else {
deleteSubnetworkFacts(bn, wm);
}
} else if ( lts.getType() == NodeTypeEnums.RightInputAdaterNode ) {
// no need to delete anything, as these would have been deleted by the left tuple processing.
return;
}
lts = ((LeftTupleSource) lts).getSinkPropagator().getFirstLeftTupleSink();
}
}
private static void deleteFactsFromRightInput(BetaNode bn, InternalWorkingMemory wm) {
ObjectSource source = bn.getRightInput();
if (source instanceof WindowNode) {
WindowNode.WindowMemory memory = (WindowNode.WindowMemory) wm.getNodeMemory( ( (WindowNode) source ));
for (EventFactHandle factHandle : memory.getFactHandles()) {
for (RightTuple rightTuple = factHandle.getFirstRightTuple(); rightTuple != null; ) {
RightTuple nextRightTuple = rightTuple.getHandleNext();
if (source.equals( rightTuple.getRightTupleSink() )) {
rightTuple.unlinkFromRightParent();
}
rightTuple = nextRightTuple;
}
}
}
}
private static void unlinkRightTuples(RightTuple rightTuple) {
for (RightTuple rt = rightTuple; rt != null; ) {
RightTuple next = rt.getStagedNext();
// this RightTuple could have been already unlinked by the former cycle
if (rt.getFactHandle() != null) {
rt.unlinkFromRightParent();
}
rt = next;
}
}
private static void deleteSubnetworkFacts(BetaNode bn, InternalWorkingMemory wm) {
RightInputAdapterNode rian = ( RightInputAdapterNode ) bn.getRightInput();
LeftTupleSource subLts = rian.getLeftTupleSource();
while ( subLts.getLeftTupleSource() != rian.getStartTupleSource() ) {
subLts = subLts.getLeftTupleSource();
}
deleteFacts((LeftTupleSink) subLts, wm);
}
/**
* Populates the SegmentMemory with staged LeftTuples. If the parent is not a Beta or From node, it iterates up to find the first node with memory. If necessary
* It traverses to the LiaNode's ObjectTypeNode. It then iterates the LeftTuple chain, on a specific path to navigate down to where an existing LeftTuple is staged
* as delete. Or a new LeftTuple is created and staged as an insert.
*/
public static void processLeftTuples(LeftTupleSource node, LeftTupleSink peerNode, SegmentMemory smem, InternalWorkingMemory wm, boolean insert) {
// *** if you make a fix here, it most likely needs to be in PhreakActivationIteratorToo ***
// Must iterate up until a node with memory is found, this can be followed to find the LeftTuples to peer
// creates the propagation path to follow
List<LeftTupleSink> sinks = new ArrayList<LeftTupleSink>();
sinks.add(peerNode);
while (NodeTypeEnums.LeftInputAdapterNode != node.getType()) {
Memory memory = wm.getNodeMemory((MemoryFactory) node);
if (memory.getSegmentMemory() == null) {
// segment has never been initialized, which means the rule has never been linked.
return;
}
if (NodeTypeEnums.isBetaNode(node)) {
BetaMemory bm;
if (NodeTypeEnums.AccumulateNode == node.getType()) {
AccumulateMemory am = (AccumulateMemory) memory;
bm = am.getBetaMemory();
FastIterator it = bm.getLeftTupleMemory().fullFastIterator();
LeftTuple lt = BetaNode.getFirstLeftTuple(bm.getLeftTupleMemory(), it);
for (; lt != null; lt = (LeftTuple) it.next(lt)) {
AccumulateContext accctx = (AccumulateContext) lt.getObject();
followPeer(accctx.getResultLeftTuple(), smem, sinks, sinks.size()-1, insert);
}
} else if ( NodeTypeEnums.ExistsNode == node.getType() ) {
bm = (BetaMemory) wm.getNodeMemory((MemoryFactory) node);
FastIterator it = bm.getRightTupleMemory().fullFastIterator(); // done off the RightTupleMemory, as exists only have unblocked tuples on the left side
RightTuple rt = BetaNode.getFirstRightTuple(bm.getRightTupleMemory(), it);
for (; rt != null; rt = (RightTuple) it.next(rt)) {
for ( LeftTuple lt = rt.getBlocked(); lt != null; lt = lt.getBlockedNext() ) {
if ( lt.getFirstChild() != null ) {
followPeer(lt.getFirstChild(), smem, sinks, sinks.size()-1, insert);
}
}
}
} else {
bm = (BetaMemory) wm.getNodeMemory((MemoryFactory) node);
FastIterator it = bm.getLeftTupleMemory().fullFastIterator();
LeftTuple lt = BetaNode.getFirstLeftTuple(bm.getLeftTupleMemory(), it);
for (; lt != null; lt = (LeftTuple) it.next(lt)) {
if ( lt.getFirstChild() != null ) {
followPeerFromLeftInput(lt.getFirstChild(), smem, sinks, insert);
}
}
}
return;
} else if (NodeTypeEnums.FromNode == node.getType()) {
FromMemory fm = (FromMemory) wm.getNodeMemory((MemoryFactory) node);
LeftTupleMemory ltm = fm.getBetaMemory().getLeftTupleMemory();
FastIterator it = ltm.fullFastIterator();
for (LeftTuple lt = ltm.getFirst(null); lt != null; lt = (LeftTuple) it.next(lt)) {
if ( lt.getFirstChild() != null ) {
followPeerFromLeftInput(lt.getFirstChild(), smem, sinks, insert);
}
}
return;
}
sinks.add((LeftTupleSink) node);
node = node.getLeftTupleSource();
}
// No beta or from nodes, so must retrieve LeftTuples from the LiaNode.
// This is done by scanning all the LeftTuples referenced from the FactHandles in the ObjectTypeNode
LeftInputAdapterNode lian = (LeftInputAdapterNode) node;
Memory memory = wm.getNodeMemory((MemoryFactory) node);
if (memory.getSegmentMemory() == null) {
// segment has never been initialized, which means the rule has never been linked.
return;
}
ObjectSource os = lian.getObjectSource();
while (os.getType() != NodeTypeEnums.ObjectTypeNode) {
os = os.getParentObjectSource();
}
ObjectTypeNode otn = (ObjectTypeNode) os;
final ObjectTypeNodeMemory omem = (ObjectTypeNodeMemory) wm.getNodeMemory(otn);
LeftTupleSink firstLiaSink = lian.getSinkPropagator().getFirstLeftTupleSink();
Iterator<InternalFactHandle> it = omem.iterator();
while (it.hasNext()) {
InternalFactHandle fh = it.next();
if (fh.getFirstLeftTuple() != null ) {
for (LeftTuple childLt = fh.getFirstLeftTuple(); childLt != null; childLt = childLt.getLeftParentNext()) {
if ( childLt.getSink() == firstLiaSink ) {
followPeer(childLt, smem, sinks, sinks.size()-1, insert);
}
}
}
}
}
private static void followPeerFromLeftInput(LeftTuple lt, SegmentMemory smem, List<LeftTupleSink> sinks, boolean insert) {
// *** if you make a fix here, it most likely needs to be in PhreakActivationIteratorToo ***
for (; lt != null; lt = lt.getLeftParentNext()) {
followPeer(lt, smem, sinks, sinks.size() -1, insert);
}
}
private static void followPeer(LeftTuple lt, SegmentMemory smem, List<LeftTupleSink> sinks, int i, boolean insert) {
// *** if you make a fix here, it most likely needs to be in PhreakActivationIteratorToo ***
LeftTupleSink sink = sinks.get(i);
if ( i == 0 ) {
if ( insert ) {
if ( NodeTypeEnums.isBetaNode(sink) ) {
BetaNode bn = ( BetaNode ) sink;
if ( bn.isRightInputIsRiaNode() ) {
// must also create and stage the LeftTuple for the subnetwork
SegmentMemory subSmem = smem.getPrevious(); // Subnetwork segment will be before this one
insertPeerLeftTuple(lt, (LeftTupleSink)subSmem.getRootNode(), subSmem);
}
}
insertPeerLeftTuple(lt, sink, smem);
} else {
if ( NodeTypeEnums.isBetaNode(sink) ) {
BetaNode bn = ( BetaNode ) sink;
if ( bn.isRightInputIsRiaNode() ) {
// must also create and stage the LeftTuple for the subnetwork
SegmentMemory subSmem = smem.getPrevious(); // Subnetwork segment will be before this one
deletePeerLeftTuple(lt, (LeftTupleSink)subSmem.getRootNode(), subSmem);
}
}
deletePeerLeftTuple(lt, sink, smem);
}
} else {
LeftTuple peer = lt;
while (peer.getSink() != sink) {
peer = peer.getPeer();
}
if (NodeTypeEnums.AccumulateNode == peer.getLeftTupleSink().getType()) {
AccumulateContext accctx = (AccumulateContext) peer.getObject();
followPeer(accctx.getResultLeftTuple(), smem, sinks, i-1, insert);
} else if ( peer.getFirstChild() != null ) {
for (LeftTuple childLt = peer.getFirstChild(); childLt != null; childLt = childLt.getLeftParentNext()) {
followPeer(childLt, smem, sinks, i-1, insert);
}
}
}
}
private static void deletePeerLeftTuple(LeftTuple lt, LeftTupleSink newNode, SegmentMemory smem) {
LeftTuple peer = lt;
LeftTuple previousPeer = null;
while (peer.getSink() != newNode) {
previousPeer = peer;
peer = peer.getPeer();
}
switch( peer.getStagedType() ) {
case LeftTuple.INSERT: {
// insert was never propagated, thus has no children, does not need to be staged.
smem.getStagedLeftTuples().removeInsert(peer);
break;
}
case LeftTuple.UPDATE: {
smem.getStagedLeftTuples().removeUpdate(peer);
// don't break, so that this falls through and calls addDelete
}
case LeftTuple.NONE: {
smem.getStagedLeftTuples().addDelete(peer);
}
case LeftTuple.DELETE: {
// do nothing, leave it staged for delete, added for documention help
}
}
if (previousPeer == null) {
// the first sink is being removed, which is the first peer. The next peer must be set as the first peer.
LeftTuple leftPrevious = peer.getLeftParentPrevious();
LeftTuple leftNext = peer.getLeftParentNext();
LeftTuple rightPrevious = peer.getRightParentPrevious();
LeftTuple rightNext = peer.getRightParentNext();
LeftTuple newPeer = peer.getPeer();
if ( newPeer != null ) {
replaceChildLeftTuple(peer, leftPrevious, leftNext, rightPrevious, rightNext, newPeer);
} else {
// no peers to support this, so remove completely.
lt.unlinkFromLeftParent();
lt.unlinkFromRightParent();
}
} else {
// mid or end LeftTuple peer is being removed
previousPeer.setPeer(peer.getPeer());
}
}
private static void replaceChildLeftTuple(LeftTuple peer, LeftTuple leftPrevious, LeftTuple leftNext, LeftTuple rightPrevious, LeftTuple rightNext, LeftTuple newPeer) {boolean isHandle = peer.getLeftParent() == null;
InternalFactHandle fh = peer.getLastHandle();
LeftTuple leftParent = peer.getLeftParent();
RightTuple rightParent = peer.getRightParent();
newPeer.setLeftParent( peer.getLeftParent() );
newPeer.setRightParent( peer.getRightParent() );
// replace left
if ( leftPrevious == null && leftNext == null ) {
// no other tuples, simply replace
if ( isHandle ) {
fh.removeLeftTuple( peer );
fh.addFirstLeftTuple( newPeer );
} else {
peer.unlinkFromLeftParent();
leftParent.setFirstChild(newPeer);
leftParent.setLastChild(newPeer);
}
} else if ( leftNext != null ) {
// replacing first
newPeer.setLeftParentNext(leftNext);
leftNext.setLeftParentPrevious(newPeer);
if ( isHandle ) {
fh.setFirstLeftTuple(newPeer);
} else {
leftParent.setFirstChild(newPeer);
}
} else {
// replacing last
newPeer.setLeftParentPrevious(leftPrevious);
leftPrevious.setLeftParentNext(newPeer);
if ( isHandle ) {
fh.setLastLeftTuple(newPeer);
} else {
leftParent.setLastChild(newPeer);
}
}
// replace right
if ( rightParent != null ) {
// LiaNode LeftTuples have no right parents
if ( rightPrevious == null && rightNext == null ) {
// no other tuples, simply replace
peer.unlinkFromRightParent();
rightParent.setFirstChild(newPeer);
rightParent.setLastChild(newPeer);
} else if ( rightNext != null ) {
// replacing first
newPeer.setRightParentNext(rightNext);
rightNext.setRightParentPrevious(newPeer);
rightParent.setFirstChild(newPeer);
} else {
// replacing last
newPeer.setRightParentPrevious(rightPrevious);
rightPrevious.setRightParentNext(newPeer);
rightParent.setLastChild(newPeer);
}
}
}
private static void insertPeerLeftTuple(LeftTuple lt, LeftTupleSink newNode, SegmentMemory smem) {
// add to end of peer list
LeftTuple peer = lt;
while (peer.getPeer() != null) {
peer = peer.getPeer();
}
LeftTuple newPeer = newNode.createPeer(peer);
smem.getStagedLeftTuples().addInsert(newPeer);
}
private static void collectRtnPathMemories(LeftTupleSource lt,
InternalWorkingMemory wm,
List<PathMemory> pathMems,
TerminalNode excludeTerminalNode) {
for (LeftTupleSink sink : lt.getSinkPropagator().getSinks()) {
if (sink == excludeTerminalNode) {
continue;
}
if (NodeTypeEnums.isLeftTupleSource(sink)) {
collectRtnPathMemories((LeftTupleSource) sink, wm, pathMems, excludeTerminalNode);
} else if (NodeTypeEnums.isTerminalNode(sink)) {
// getting will cause an initialization of rtn, which will recursively initialise rians too.
PathMemory pmem = (PathMemory) wm.getNodeMemory((MemoryFactory) sink);
pathMems.add(pmem);
} else if (NodeTypeEnums.RightInputAdaterNode == sink.getType()) {
RiaNodeMemory riaMem = (RiaNodeMemory) wm.getNodeMemory((MemoryFactory) sink);
pathMems.add(riaMem.getRiaPathMemory());
} else {
throw new RuntimeException("Error: Unknown Node. Defensive programming test..");
}
}
}
private static PathMemory getFirstRtnPathMemory(LeftTupleSource lt,
InternalWorkingMemory wm,
TerminalNode excludeTerminalNode) {
for (LeftTupleSink sink : lt.getSinkPropagator().getSinks()) {
if (sink == excludeTerminalNode) {
continue;
}
if (NodeTypeEnums.isLeftTupleSource(sink)) {
PathMemory result = getFirstRtnPathMemory((LeftTupleSource) sink, wm, excludeTerminalNode);
if (result != null) {
return result;
}
} else if (NodeTypeEnums.isTerminalNode(sink)) {
// getting will cause an initialization of rtn, which will recursively initialise rians too.
return (PathMemory) wm.getNodeMemory((MemoryFactory) sink);
} else if (NodeTypeEnums.RightInputAdaterNode == sink.getType()) {
RiaNodeMemory riaMem = (RiaNodeMemory) wm.getNodeMemory((MemoryFactory) sink);
return riaMem.getRiaPathMemory();
} else {
throw new RuntimeException("Error: Unknown Node. Defensive programming test..");
}
}
return null;
}
public static LeftTupleSource getNetworkSplitPoint(TerminalNode tn) {
LeftTupleSource lt = tn.getLeftTupleSource();
// iterate to find split point, or to the root
while ( lt.getType() != NodeTypeEnums.LeftInputAdapterNode && lt.getAssociations().size() == 1 ) {
lt = lt.getLeftTupleSource();
}
return lt;
}
public static SegmentMemory splitSegment(SegmentMemory sm1, LeftTupleSource splitNode) {
// create new segment, starting after split
SegmentMemory sm2 = new SegmentMemory(splitNode.getSinkPropagator().getFirstLeftTupleSink()); // we know there is only one sink
if ( sm1.getFirst() != null ) {
for ( SegmentMemory sm = sm1.getFirst(); sm != null;) {
SegmentMemory next = sm.getNext();
sm1.remove(sm);
sm2.add(sm);
sm = next;
}
}
sm1.add( sm2 );
sm2.setPos(sm1.getPos()); // clone for now, it's corrected later
sm2.setSegmentPosMaskBit(sm1.getSegmentPosMaskBit()); // clone for now, it's corrected later
sm2.setLinkedNodeMask(sm1.getLinkedNodeMask()); // clone for now, it's corrected later
sm2.getPathMemories().addAll( sm1.getPathMemories() );
// re-assigned tip nodes
sm2.setTipNode(sm1.getTipNode());
sm1.setTipNode( splitNode ); // splitNode is now tip of original segment
if ( sm1.getTipNode().getType() == NodeTypeEnums.LeftInputAdapterNode ) {
if ( !sm1.getStagedLeftTuples().isEmpty() ) {
// Segments with only LiaNode's cannot have staged LeftTuples, so move them down to the new Segment
sm2.getStagedLeftTuples().addAll(sm1.getStagedLeftTuples());
}
}
// find the pos of the node in the segment
int pos = nodeSegmentPosition(sm1, splitNode);
splitNodeMemories(sm1, sm2, pos);
splitBitMasks(sm1, sm2, pos);
return sm2;
}
public static void mergeSegment(SegmentMemory sm1, SegmentMemory sm2) {
sm1.remove( sm2 );
if ( sm2.getFirst() != null ) {
for ( SegmentMemory sm = sm2.getFirst(); sm != null;) {
SegmentMemory next = sm.getNext();
sm1.add(sm);
sm2.remove(sm);
sm = next;
}
}
// re-assigned tip nodes
sm1.setTipNode(sm2.getTipNode());
mergeNodeMemories(sm1, sm2);
mergeBitMasks(sm1, sm2);
}
private static void splitBitMasks(SegmentMemory sm1, SegmentMemory sm2, int pos) {
int splitPos = pos + 1; // +1 as zero based
long currentAllLinkedMaskTest = sm1.getAllLinkedMaskTest();
long currentLinkedNodeMask = sm1.getLinkedNodeMask();
long mask = (1L << splitPos) - 1;
sm1.setAllLinkedMaskTest( mask & currentAllLinkedMaskTest );
sm1.setLinkedNodeMask( sm1.getLinkedNodeMask() & sm1.getAllLinkedMaskTest() );
mask = currentAllLinkedMaskTest >> splitPos;
sm2.setAllLinkedMaskTest( mask );
sm2.setLinkedNodeMask( mask & (currentLinkedNodeMask >> splitPos) );
}
private static void mergeBitMasks(SegmentMemory sm1, SegmentMemory sm2) {
LinkedList<Memory> smNodeMemories2 = sm2.getNodeMemories();
long mask = sm2.getAllLinkedMaskTest() << smNodeMemories2.size();
sm1.setAllLinkedMaskTest( mask & sm1.getAllLinkedMaskTest() );
mask = sm2.getAllLinkedMaskTest() << smNodeMemories2.size();
sm1.setLinkedNodeMask(mask & sm1.getLinkedNodeMask());
}
private static void splitNodeMemories(SegmentMemory sm1, SegmentMemory sm2, int pos) {
LinkedList<Memory> smNodeMemories1 = sm1.getNodeMemories();
LinkedList<Memory> smNodeMemories2 = sm2.getNodeMemories();
Memory mem = smNodeMemories1.getFirst();
int nodePosMask = 1;
for ( int i = 0,length = smNodeMemories1.size(); i < length; i++) {
Memory next = mem.getNext();
if ( i > pos ) {
smNodeMemories1.remove(mem);
smNodeMemories2.add(mem);
mem.setSegmentMemory(sm2);
// correct the NodePosMaskBit
BetaMemory bm = null;
if ( mem instanceof AccumulateNode.AccumulateMemory ) {
bm = ((AccumulateNode.AccumulateMemory) mem).getBetaMemory();
} else if ( mem instanceof BetaMemory ) {
bm = ( BetaMemory ) mem;
}
if ( bm != null ) { // node may not be a beta
bm.setNodePosMaskBit(nodePosMask);
}
nodePosMask = nodePosMask << 1;
}
mem = next;
}
}
private static void mergeNodeMemories(SegmentMemory sm1, SegmentMemory sm2) {
LinkedList<Memory> smNodeMemories1 = sm1.getNodeMemories();
LinkedList<Memory> smNodeMemories2 = sm2.getNodeMemories();
int nodePosMask = 1;
for ( int i = 0,length = smNodeMemories1.size(); i < length; i++) {
nodePosMask = nodePosMask >> 1;
}
for ( Memory mem = smNodeMemories2.getFirst(); mem != null; ) {
Memory next = mem.getNext();
smNodeMemories2.remove(mem);
smNodeMemories1.add(mem);
mem.setSegmentMemory(sm1);
// correct the NodePosMaskBit
BetaMemory bm = null;
if ( mem instanceof AccumulateNode.AccumulateMemory ) {
bm = ((AccumulateNode.AccumulateMemory) mem).getBetaMemory();
} else if ( mem instanceof BetaMemory ) {
bm = ( BetaMemory ) mem;
}
if ( bm != null ) { // node may not be a beta
bm.setNodePosMaskBit(nodePosMask);
}
nodePosMask = nodePosMask >> 1;
mem = next;
}
}
private static int nodeSegmentPosition(SegmentMemory sm1, LeftTupleSource splitNode) {
LeftTupleSource lt = splitNode;
int nodePos = 0;
while ( lt != sm1.getRootNode() ) {
lt = lt.getLeftTupleSource();
nodePos++;
}
return nodePos;
}
}
|
|
package com.thinkgem.jeesite.jms;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import javax.jms.Connection;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TemporaryQueue;
import org.apache.activemq.ActiveMQConnectionFactory;
/**
* The Vendor synchronously, and in a single transaction, receives the
* order from VendorOrderQueue and sends messages to the two Suppliers via
* MonitorOrderQueue and StorageOrderQueue.
* The responses are received asynchronously; when both responses come
* back, the order confirmation message is sent back to the Retailer.
*/
public class Vendor implements Runnable, MessageListener {
private String url;
private String user;
private String password;
private Session asyncSession;
private int numSuppliers = 2;
private Object supplierLock = new Object();
public Vendor(String url, String user, String password) {
this.url = url;
this.user = user;
this.password = password;
}
public void run() {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory(user);
Session session = null;
Destination orderQueue;
Destination monitorOrderQueue;
Destination storageOrderQueue;
TemporaryQueue vendorConfirmQueue;
MessageConsumer orderConsumer = null;
MessageProducer monitorProducer = null;
MessageProducer storageProducer = null;
try {
Connection connection = connectionFactory.createConnection();
session = connection.createSession(true, Session.SESSION_TRANSACTED);
orderQueue = session.createQueue("VendorOrderQueue");
monitorOrderQueue = session.createQueue("MonitorOrderQueue");
storageOrderQueue = session.createQueue("StorageOrderQueue");
orderConsumer = session.createConsumer(orderQueue);
monitorProducer = session.createProducer(monitorOrderQueue);
storageProducer = session.createProducer(storageOrderQueue);
Connection asyncconnection = connectionFactory.createConnection();
asyncSession = asyncconnection.createSession(true, Session.SESSION_TRANSACTED);
vendorConfirmQueue = asyncSession.createTemporaryQueue();
MessageConsumer confirmConsumer = asyncSession.createConsumer(vendorConfirmQueue);
confirmConsumer.setMessageListener(this);
asyncconnection.start();
connection.start();
while (true) {
Order order = null;
try {
Message inMessage = orderConsumer.receive();
MapMessage message;
if (inMessage instanceof MapMessage) {
message = (MapMessage) inMessage;
} else {
// end of stream
Message outMessage = session.createMessage();
outMessage.setJMSReplyTo(vendorConfirmQueue);
monitorProducer.send(outMessage);
storageProducer.send(outMessage);
session.commit();
break;
}
// Randomly throw an exception in here to simulate a Database error
// and trigger a rollback of the transaction
if (new Random().nextInt(3) == 0) {
throw new JMSException("Simulated Database Error.");
}
order = new Order(message);
MapMessage orderMessage = session.createMapMessage();
orderMessage.setJMSReplyTo(vendorConfirmQueue);
orderMessage.setInt("VendorOrderNumber", order.getOrderNumber());
int quantity = message.getInt("Quantity");
System.out.println("Vendor: Retailer ordered " + quantity + " " + message.getString("Item"));
orderMessage.setInt("Quantity", quantity);
orderMessage.setString("Item", "Monitor");
monitorProducer.send(orderMessage);
System.out.println("Vendor: ordered " + quantity + " Monitor(s)");
orderMessage.setString("Item", "HardDrive");
storageProducer.send(orderMessage);
System.out.println("Vendor: ordered " + quantity + " Hard Drive(s)");
session.commit();
System.out.println("Vendor: Comitted Transaction 1");
} catch (JMSException e) {
System.out.println("Vendor: JMSException Occured: " + e.getMessage());
e.printStackTrace();
session.rollback();
System.out.println("Vendor: Rolled Back Transaction.");
}
}
synchronized (supplierLock) {
while (numSuppliers > 0) {
try {
supplierLock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
connection.close();
asyncconnection.close();
} catch (JMSException e) {
e.printStackTrace();
}
}
public void onMessage(Message message) {
if (!(message instanceof MapMessage)) {
synchronized(supplierLock) {
numSuppliers--;
supplierLock.notifyAll();
}
try {
asyncSession.commit();
return;
} catch (JMSException e) {
e.printStackTrace();
}
}
int orderNumber = -1;
try {
MapMessage componentMessage = (MapMessage) message;
orderNumber = componentMessage.getInt("VendorOrderNumber");
Order order = Order.getOrder(orderNumber);
order.processSubOrder(componentMessage);
asyncSession.commit();
if (! "Pending".equals(order.getStatus())) {
System.out.println("Vendor: Completed processing for order " + orderNumber);
MessageProducer replyProducer = asyncSession.createProducer(order.getMessage().getJMSReplyTo());
MapMessage replyMessage = asyncSession.createMapMessage();
if ("Fulfilled".equals(order.getStatus())) {
replyMessage.setBoolean("OrderAccepted", true);
System.out.println("Vendor: sent " + order.quantity + " computer(s)");
} else {
replyMessage.setBoolean("OrderAccepted", false);
System.out.println("Vendor: unable to send " + order.quantity + " computer(s)");
}
replyProducer.send(replyMessage);
asyncSession.commit();
System.out.println("Vender: committed transaction 2");
}
} catch (JMSException e) {
e.printStackTrace();
}
}
public static class Order {
private static Map<Integer, Order> pendingOrders = new HashMap<Integer, Order>();
private static int nextOrderNumber = 1;
private int orderNumber;
private int quantity;
private MapMessage monitor = null;
private MapMessage storage = null;
private MapMessage message;
private String status;
public Order(MapMessage message) {
this.orderNumber = nextOrderNumber++;
this.message = message;
try {
this.quantity = message.getInt("Quantity");
} catch (JMSException e) {
e.printStackTrace();
this.quantity = 0;
}
status = "Pending";
pendingOrders.put(orderNumber, this);
}
public Object getStatus() {
return status;
}
public int getOrderNumber() {
return orderNumber;
}
public static int getOutstandingOrders() {
return pendingOrders.size();
}
public static Order getOrder(int number) {
return pendingOrders.get(number);
}
public MapMessage getMessage() {
return message;
}
public void processSubOrder(MapMessage message) {
String itemName = null;
try {
itemName = message.getString("Item");
} catch (JMSException e) {
e.printStackTrace();
}
if ("Monitor".equals(itemName)) {
monitor = message;
} else if ("HardDrive".equals(itemName)) {
storage = message;
}
if (null != monitor && null != storage) {
// Received both messages
try {
if (quantity > monitor.getInt("Quantity")) {
status = "Cancelled";
} else if (quantity > storage.getInt("Quantity")) {
status = "Cancelled";
} else {
status = "Fulfilled";
}
} catch (JMSException e) {
e.printStackTrace();
status = "Cancelled";
}
}
}
}
public static void main(String[] args) {
String url = "tcp://localhost:61616";
String user = null;
String password = null;
if (args.length >= 1) {
url = args[0];
}
if (args.length >= 2) {
user = args[1];
}
if (args.length >= 3) {
password = args[2];
}
Vendor v = new Vendor(url, user, password);
new Thread(v, "Vendor").start();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.complexscripts.scripts;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.fop.complexscripts.util.CharAssociation;
import org.apache.fop.complexscripts.util.GlyphSequence;
// CSOFF: LineLengthCheck
/**
* <p>The <code>DevanagariScriptProcessor</code> class implements a script processor for
* performing glyph substitution and positioning operations on content associated with the Devanagari script.</p>
*
* <p>This work was originally authored by Glenn Adams (gadams@apache.org).</p>
*/
public class DevanagariScriptProcessor extends IndicScriptProcessor {
/** logging instance */
private static final Log log = LogFactory.getLog(DevanagariScriptProcessor.class);
DevanagariScriptProcessor(String script) {
super(script);
}
@Override
protected Class<? extends DevanagariSyllabizer> getSyllabizerClass() {
return DevanagariSyllabizer.class;
}
@Override
// find rightmost pre-base matra
protected int findPreBaseMatra(GlyphSequence gs) {
int ng = gs.getGlyphCount();
int lk = -1;
for (int i = ng; i > 0; i--) {
int k = i - 1;
if (containsPreBaseMatra(gs, k)) {
lk = k;
break;
}
}
return lk;
}
@Override
// find leftmost pre-base matra target, starting from source
protected int findPreBaseMatraTarget(GlyphSequence gs, int source) {
int ng = gs.getGlyphCount();
int lk = -1;
for (int i = (source < ng) ? source : ng; i > 0; i--) {
int k = i - 1;
if (containsConsonant(gs, k)) {
if (containsHalfConsonant(gs, k)) {
lk = k;
} else if (lk == -1) {
lk = k;
} else {
break;
}
}
}
return lk;
}
private static boolean containsPreBaseMatra(GlyphSequence gs, int k) {
CharAssociation a = gs.getAssociation(k);
int[] ca = gs.getCharacterArray(false);
for (int i = a.getStart(), e = a.getEnd(); i < e; i++) {
if (isPreM(ca [ i ])) {
return true;
}
}
return false;
}
private static boolean containsConsonant(GlyphSequence gs, int k) {
CharAssociation a = gs.getAssociation(k);
int[] ca = gs.getCharacterArray(false);
for (int i = a.getStart(), e = a.getEnd(); i < e; i++) {
if (isC(ca [ i ])) {
return true;
}
}
return false;
}
private static boolean containsHalfConsonant(GlyphSequence gs, int k) {
Boolean half = (Boolean) gs.getAssociation(k) .getPredication("half");
return (half != null) ? half.booleanValue() : false;
}
@Override
protected int findReph(GlyphSequence gs) {
int ng = gs.getGlyphCount();
int li = -1;
for (int i = 0; i < ng; i++) {
if (containsReph(gs, i)) {
li = i;
break;
}
}
return li;
}
@Override
protected int findRephTarget(GlyphSequence gs, int source) {
int ng = gs.getGlyphCount();
int c1 = -1;
int c2 = -1;
// first candidate target is after first non-half consonant
for (int i = 0; i < ng; i++) {
if ((i != source) && containsConsonant(gs, i)) {
if (!containsHalfConsonant(gs, i)) {
c1 = i + 1;
break;
}
}
}
// second candidate target is after last non-prebase matra after first candidate or before first syllable or vedic mark
for (int i = (c1 >= 0) ? c1 : 0; i < ng; i++) {
if (containsMatra(gs, i) && !containsPreBaseMatra(gs, i)) {
c2 = i + 1;
} else if (containsOtherMark(gs, i)) {
c2 = i;
break;
}
}
if (c2 >= 0) {
return c2;
} else if (c1 >= 0) {
return c1;
} else {
return source;
}
}
private static boolean containsReph(GlyphSequence gs, int k) {
Boolean rphf = (Boolean) gs.getAssociation(k) .getPredication("rphf");
return (rphf != null) ? rphf.booleanValue() : false;
}
private static boolean containsMatra(GlyphSequence gs, int k) {
CharAssociation a = gs.getAssociation(k);
int[] ca = gs.getCharacterArray(false);
for (int i = a.getStart(), e = a.getEnd(); i < e; i++) {
if (isM(ca [ i ])) {
return true;
}
}
return false;
}
private static boolean containsOtherMark(GlyphSequence gs, int k) {
CharAssociation a = gs.getAssociation(k);
int[] ca = gs.getCharacterArray(false);
for (int i = a.getStart(), e = a.getEnd(); i < e; i++) {
switch (typeOf(ca [ i ])) {
case C_T: // tone (e.g., udatta, anudatta)
case C_A: // accent (e.g., acute, grave)
case C_O: // other (e.g., candrabindu, anusvara, visarga, etc)
return true;
default:
break;
}
}
return false;
}
private static class DevanagariSyllabizer extends DefaultSyllabizer {
DevanagariSyllabizer(String script, String language) {
super(script, language);
}
@Override
// | C ...
protected int findStartOfSyllable(int[] ca, int s, int e) {
if ((s < 0) || (s >= e)) {
return -1;
} else {
while (s < e) {
int c = ca [ s ];
if (isC(c)) {
break;
} else {
s++;
}
}
return s;
}
}
@Override
// D* L? | ...
protected int findEndOfSyllable(int[] ca, int s, int e) {
if ((s < 0) || (s >= e)) {
return -1;
} else {
int nd = 0;
int nl = 0;
int i;
// consume dead consonants
while ((i = isDeadConsonant(ca, s, e)) > s) {
s = i;
nd++;
}
// consume zero or one live consonant
if ((i = isLiveConsonant(ca, s, e)) > s) {
s = i;
nl++;
}
return ((nd > 0) || (nl > 0)) ? s : -1;
}
}
// D := ( C N? H )?
private int isDeadConsonant(int[] ca, int s, int e) {
if (s < 0) {
return -1;
} else {
int c;
int i = 0;
int nc = 0;
int nh = 0;
do {
// C
if ((s + i) < e) {
c = ca [ s + i ];
if (isC(c)) {
i++;
nc++;
} else {
break;
}
}
// N?
if ((s + i) < e) {
c = ca [ s + 1 ];
if (isN(c)) {
i++;
}
}
// H
if ((s + i) < e) {
c = ca [ s + i ];
if (isH(c)) {
i++;
nh++;
} else {
break;
}
}
} while (false);
return (nc > 0) && (nh > 0) ? s + i : -1;
}
}
// L := ( (C|V) N? X* )?; where X = ( MATRA | ACCENT MARK | TONE MARK | OTHER MARK )
private int isLiveConsonant(int[] ca, int s, int e) {
if (s < 0) {
return -1;
} else {
int c;
int i = 0;
int nc = 0;
int nv = 0;
int nx = 0;
do {
// C
if ((s + i) < e) {
c = ca [ s + i ];
if (isC(c)) {
i++;
nc++;
} else if (isV(c)) {
i++;
nv++;
} else {
break;
}
}
// N?
if ((s + i) < e) {
c = ca [ s + i ];
if (isN(c)) {
i++;
}
}
// X*
while ((s + i) < e) {
c = ca [ s + i ];
if (isX(c)) {
i++;
nx++;
} else {
break;
}
}
} while (false);
// if no X but has H, then ignore C|I
if (nx == 0) {
if ((s + i) < e) {
c = ca [ s + i ];
if (isH(c)) {
if (nc > 0) {
nc--;
} else if (nv > 0) {
nv--;
}
}
}
}
return ((nc > 0) || (nv > 0)) ? s + i : -1;
}
}
}
// devanagari character types
static final short C_U = 0; // unassigned
static final short C_C = 1; // consonant
static final short C_V = 2; // vowel
static final short C_M = 3; // vowel sign (matra)
static final short C_S = 4; // symbol or sign
static final short C_T = 5; // tone mark
static final short C_A = 6; // accent mark
static final short C_P = 7; // punctuation
static final short C_D = 8; // digit
static final short C_H = 9; // halant (virama)
static final short C_O = 10; // other signs
static final short C_N = 0x0100; // nukta(ized)
static final short C_R = 0x0200; // reph(ized)
static final short C_PRE = 0x0400; // pre-base
static final short C_M_TYPE = 0x00FF; // type mask
static final short C_M_FLAGS = 0x7F00; // flag mask
// devanagari block range
static final int CCA_START = 0x0900; // first code point mapped by cca
static final int CCA_END = 0x0980; // last code point + 1 mapped by cca
// devanagari character type lookups
static final short[] CCA = {
C_O, // 0x0900 // INVERTED CANDRABINDU
C_O, // 0x0901 // CANDRABINDU
C_O, // 0x0902 // ANUSVARA
C_O, // 0x0903 // VISARGA
C_V, // 0x0904 // SHORT A
C_V, // 0x0905 // A
C_V, // 0x0906 // AA
C_V, // 0x0907 // I
C_V, // 0x0908 // II
C_V, // 0x0909 // U
C_V, // 0x090A // UU
C_V, // 0x090B // VOCALIC R
C_V, // 0x090C // VOCALIC L
C_V, // 0x090D // CANDRA E
C_V, // 0x090E // SHORT E
C_V, // 0x090F // E
C_V, // 0x0910 // AI
C_V, // 0x0911 // CANDRA O
C_V, // 0x0912 // SHORT O
C_V, // 0x0913 // O
C_V, // 0x0914 // AU
C_C, // 0x0915 // KA
C_C, // 0x0916 // KHA
C_C, // 0x0917 // GA
C_C, // 0x0918 // GHA
C_C, // 0x0919 // NGA
C_C, // 0x091A // CA
C_C, // 0x091B // CHA
C_C, // 0x091C // JA
C_C, // 0x091D // JHA
C_C, // 0x091E // NYA
C_C, // 0x091F // TTA
C_C, // 0x0920 // TTHA
C_C, // 0x0921 // DDA
C_C, // 0x0922 // DDHA
C_C, // 0x0923 // NNA
C_C, // 0x0924 // TA
C_C, // 0x0925 // THA
C_C, // 0x0926 // DA
C_C, // 0x0927 // DHA
C_C, // 0x0928 // NA
C_C, // 0x0929 // NNNA
C_C, // 0x092A // PA
C_C, // 0x092B // PHA
C_C, // 0x092C // BA
C_C, // 0x092D // BHA
C_C, // 0x092E // MA
C_C, // 0x092F // YA
C_C | C_R, // 0x0930 // RA
C_C | C_R | C_N, // 0x0931 // RRA = 0930+093C
C_C, // 0x0932 // LA
C_C, // 0x0933 // LLA
C_C, // 0x0934 // LLLA
C_C, // 0x0935 // VA
C_C, // 0x0936 // SHA
C_C, // 0x0937 // SSA
C_C, // 0x0938 // SA
C_C, // 0x0939 // HA
C_M, // 0x093A // OE (KASHMIRI)
C_M, // 0x093B // OOE (KASHMIRI)
C_N, // 0x093C // NUKTA
C_S, // 0x093D // AVAGRAHA
C_M, // 0x093E // AA
C_M | C_PRE, // 0x093F // I
C_M, // 0x0940 // II
C_M, // 0x0941 // U
C_M, // 0x0942 // UU
C_M, // 0x0943 // VOCALIC R
C_M, // 0x0944 // VOCALIC RR
C_M, // 0x0945 // CANDRA E
C_M, // 0x0946 // SHORT E
C_M, // 0x0947 // E
C_M, // 0x0948 // AI
C_M, // 0x0949 // CANDRA O
C_M, // 0x094A // SHORT O
C_M, // 0x094B // O
C_M, // 0x094C // AU
C_H, // 0x094D // VIRAMA (HALANT)
C_M, // 0x094E // PRISHTHAMATRA E
C_M, // 0x094F // AW
C_S, // 0x0950 // OM
C_T, // 0x0951 // UDATTA
C_T, // 0x0952 // ANUDATTA
C_A, // 0x0953 // GRAVE
C_A, // 0x0954 // ACUTE
C_M, // 0x0955 // CANDRA LONG E
C_M, // 0x0956 // UE
C_M, // 0x0957 // UUE
C_C | C_N, // 0x0958 // QA
C_C | C_N, // 0x0959 // KHHA
C_C | C_N, // 0x095A // GHHA
C_C | C_N, // 0x095B // ZA
C_C | C_N, // 0x095C // DDDHA
C_C | C_N, // 0x095D // RHA
C_C | C_N, // 0x095E // FA
C_C | C_N, // 0x095F // YYA
C_V, // 0x0960 // VOCALIC RR
C_V, // 0x0961 // VOCALIC LL
C_M, // 0x0962 // VOCALIC RR
C_M, // 0x0963 // VOCALIC LL
C_P, // 0x0964 // DANDA
C_P, // 0x0965 // DOUBLE DANDA
C_D, // 0x0966 // ZERO
C_D, // 0x0967 // ONE
C_D, // 0x0968 // TWO
C_D, // 0x0969 // THREE
C_D, // 0x096A // FOUR
C_D, // 0x096B // FIVE
C_D, // 0x096C // SIX
C_D, // 0x096D // SEVEN
C_D, // 0x096E // EIGHT
C_D, // 0x096F // NINE
C_S, // 0x0970 // ABBREVIATION SIGN
C_S, // 0x0971 // HIGH SPACING DOT
C_V, // 0x0972 // CANDRA A (MARATHI)
C_V, // 0x0973 // OE (KASHMIRI)
C_V, // 0x0974 // OOE (KASHMIRI)
C_V, // 0x0975 // AW (KASHMIRI)
C_V, // 0x0976 // UE (KASHMIRI)
C_V, // 0x0977 // UUE (KASHMIRI)
C_U, // 0x0978 // UNASSIGNED
C_C, // 0x0979 // ZHA
C_C, // 0x097A // HEAVY YA
C_C, // 0x097B // GGAA (SINDHI)
C_C, // 0x097C // JJA (SINDHI)
C_C, // 0x097D // GLOTTAL STOP (LIMBU)
C_C, // 0x097E // DDDA (SINDHI)
C_C // 0x097F // BBA (SINDHI)
};
static int typeOf(int c) {
if ((c >= CCA_START) && (c < CCA_END)) {
return CCA [ c - CCA_START ] & C_M_TYPE;
} else {
return C_U;
}
}
static boolean isType(int c, int t) {
return typeOf(c) == t;
}
static boolean hasFlag(int c, int f) {
if ((c >= CCA_START) && (c < CCA_END)) {
return (CCA [ c - CCA_START ] & f) == f;
} else {
return false;
}
}
static boolean isC(int c) {
return isType(c, C_C);
}
static boolean isR(int c) {
return isType(c, C_C) && hasR(c);
}
static boolean isV(int c) {
return isType(c, C_V);
}
static boolean isN(int c) {
return c == 0x093C;
}
static boolean isH(int c) {
return c == 0x094D;
}
static boolean isM(int c) {
return isType(c, C_M);
}
static boolean isPreM(int c) {
return isType(c, C_M) && hasFlag(c, C_PRE);
}
static boolean isX(int c) {
switch (typeOf(c)) {
case C_M: // matra (combining vowel)
case C_A: // accent mark
case C_T: // tone mark
case C_O: // other (modifying) mark
return true;
default:
return false;
}
}
static boolean hasR(int c) {
return hasFlag(c, C_R);
}
static boolean hasN(int c) {
return hasFlag(c, C_N);
}
}
|
|
package com.keivannorouzi.stormy.UI;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.location.Location;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import com.keivannorouzi.stormy.R;
import com.keivannorouzi.stormy.weather.Current;
import com.keivannorouzi.stormy.weather.Daily;
import com.keivannorouzi.stormy.weather.Forecast;
import com.keivannorouzi.stormy.weather.Hourly;
import com.squareup.okhttp.Call;
import com.squareup.okhttp.Callback;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.Response;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class MainActivity extends ActionBarActivity implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener {
public static final String TAG = MainActivity.class.getSimpleName();
public static final Context context = MainActivity.context ;
private GoogleApiClient mGoogleApiClient ;
private Forecast mForecast;
@Bind(R.id.timeLabel) TextView mTimeLabel;
@Bind(R.id.locationLabel) TextView mLocationLabel;
@Bind(R.id.tempretureLabel) TextView mTempretureLabel;
@Bind(R.id.iconImageView) ImageView mIconImageView;
@Bind(R.id.himidityValue) TextView mHimidityValue;
@Bind(R.id.precipValue) TextView mPrecipValue;
@Bind(R.id.summaryLabel) TextView mSummaryLabel ;
@Bind(R.id.RefreshImageView) ImageView mRefreshImageView;
@Bind(R.id.progressBar) ProgressBar mProgressBar;
@Bind(R.id.DegreeType) TextView mDegreeType;
@Bind(R.id.dailyButton) Button mDailyButton;
private Location mLastLocation;
private double mLatitude;
private double mLongitude;
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
// getActionBar().hide();
mProgressBar.setVisibility(View.INVISIBLE);
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
// final double langitude = -79.3941133 ;
// final double latitude = 43.730804 ;
mRefreshImageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getForecast(mLatitude, mLongitude);
}
});
getForecast(mLatitude, mLongitude);
}
@Override
protected void onResume() {
super.onResume();
mGoogleApiClient.connect();
}
private void getForecast(double latitude, double langitude) {
adjustType();
String apiKey = "abaa9b12255d3e6bc8d4140fafbdccec";
String forecastUrl = "https://api.forecast.io/forecast/" + apiKey +
"/" + latitude + "," + langitude;
if(isNetworkAvailable()) {
runOnUiThread(new Runnable() {
@Override
public void run() {
toggleRefresh();
}
});
// implementing the request
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder()
.url(forecastUrl)
.build();
// Asyncronous Implementation
Call call = client.newCall(request);
call.enqueue(new Callback() {
@Override
public void onFailure(Request request, IOException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
toggleRefresh();
}
});
alertUserAboutError();
}
@Override
public void onResponse(Response response) throws IOException {
try {
// Response response = call.execute(); This does not work since it clogs up the main thread, we need a asyncronous approach
runOnUiThread(new Runnable() {
@Override
public void run() {
toggleRefresh();
}
});
if (response.isSuccessful()) {
String JsonResponse = response.body().string();
mForecast = parseForecastDetailes(JsonResponse);
runOnUiThread(new Runnable() {
@Override
public void run() {
updateDisplay();
}
});
} else
alertUserAboutError();
} catch (IOException e) {
Log.e(TAG, "Exception caouth: ", e);
} catch (JSONException e) {
Log.e(TAG, "JSONException", e);
}
}
});
}
else {
Toast.makeText(this, getString(R.string.Network_unavailable), Toast.LENGTH_LONG).show();
}
}
private void toggleRefresh(){
if(mProgressBar.getVisibility()==View.INVISIBLE) {
mProgressBar.setVisibility(View.VISIBLE);
mRefreshImageView.setVisibility(View.INVISIBLE);
}
else {
mProgressBar.setVisibility(View.INVISIBLE);
mRefreshImageView.setVisibility(View.VISIBLE);
}
}
private void updateDisplay() {
mDegreeType.setText("F"+(char) 0x00B0 );
mTempretureLabel.setText(mForecast.getCurrent().getTemperature() + "");
mTimeLabel.setText(mForecast.getCurrent().getFormattedTime() + "");
mHimidityValue.setText(mForecast.getCurrent().getHumidity() + "");
mPrecipValue.setText(mForecast.getCurrent().getPrecipChance() + "%");
String timeZone = mForecast.getCurrent().getTimeZone();
mLocationLabel.setText(timeZone.substring(timeZone.indexOf("/")+1));
mSummaryLabel.setText(mForecast.getCurrent().getSummary());
Drawable drawable = getResources().getDrawable(mForecast.getCurrent().getIconId());
mIconImageView.setImageDrawable(drawable);
}
private Forecast parseForecastDetailes (String JsonData) throws JSONException{
Forecast mForecast = new Forecast();
mForecast.setCurrent(getCurrentForecast(JsonData));
mForecast.setHourlies(getHourlyForecast(JsonData));
mForecast.setDailies(getDailyForecast(JsonData));
return mForecast;
}
private Daily[] getDailyForecast(String jsonData) throws JSONException{
JSONObject forecast = new JSONObject(jsonData);
String timezone = forecast.getString("timezone");
JSONObject daily = forecast.getJSONObject("daily");
JSONArray data = daily.getJSONArray("data");
Daily[] dailyForecast = new Daily[data.length()];
for (int i=0 ; i < data.length() ; i++){
JSONObject jsonDay = data.getJSONObject(i);
Daily day = new Daily();
day.setIcon(jsonDay.getString("icon"));
day.setTime(jsonDay.getLong("time"));
day.setSummary(jsonDay.getString("summary"));
day.setTemperatureMax(jsonDay.getDouble("temperatureMax"));
day.setTimezone(timezone);
dailyForecast[i] = day;
}
return dailyForecast;
}
private Hourly[] getHourlyForecast(String jsonData) throws JSONException{
JSONObject forecast = new JSONObject(jsonData);
String timezone = forecast.getString("timezone");
JSONObject hourly = forecast.getJSONObject("hourly");
JSONArray data = hourly.getJSONArray("data");
Hourly[] hourlyForecast = new Hourly[data.length()];
for (int i=0 ; i < data.length() ; i++){
JSONObject jsonHour = data.getJSONObject(i);
Hourly hour = new Hourly();
hour.setTimezone(timezone);
hour.setSummary(jsonHour.getString("summary"));
hour.setTime(jsonHour.getLong("time"));
hour.setTemperature(jsonHour.getDouble("temperature"));
hour.setIcon(jsonHour.getString("icon"));
hourlyForecast[i] = hour ;
}
return hourlyForecast;
}
private Current getCurrentForecast(String jsonResponse) throws JSONException{
JSONObject forecast = new JSONObject(jsonResponse);
String timezone = forecast.getString("timezone");
JSONObject current = forecast.getJSONObject("currently");
Current currentWeather = new Current();
currentWeather.setHumidity(current.getDouble("humidity"));
currentWeather.setPrecipChance(current.getDouble("precipProbability"));
currentWeather.setTime(current.getLong("time"));
currentWeather.setIcon(current.getString("icon"));
currentWeather.setSummary(current.getString("summary"));
currentWeather.setTemperatureF(current.getDouble("temperature"));
currentWeather.setTimeZone(timezone);
Log.d(TAG, "The time is: " + currentWeather.getFormattedTime());
//String date = currentWeather.getFormattedTime().substring(0,10);
//String time = currentWeather.getFormattedTime().substring(11);
return currentWeather;
}
private boolean isNetworkAvailable() {
boolean isAvailable = false;
ConnectivityManager manager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo info = manager.getActiveNetworkInfo();
if (info != null && info.isConnected())
isAvailable = true;
return isAvailable;
}
private void alertUserAboutError() {
AlertDialogFragment dialog = new AlertDialogFragment();
dialog.show(getFragmentManager(),"Error dialog");
}
@Override
public void onConnected(Bundle bundle) {
mLastLocation = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient);
if (mLastLocation != null) {
mLatitude = mLastLocation.getLatitude();
mLongitude = mLastLocation.getLongitude();
Log.d(TAG, "Location is" + " " + mLatitude + " / " + mLongitude );
}
else Log.d(TAG , "Current Location is NULL!!!!!!");
}
@Override
public void onConnectionSuspended(int i) {
Log.d(TAG , "Connection SUSPENDED!!!!!!!!!!!");
mLatitude = 37.8267;
mLongitude = -122.423;
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
Log.d(TAG, "Connection Failed!!!!!!!!!!!");
}
private void adjustType() {
mDegreeType.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mTempretureLabel.setText(mForecast.getCurrent().getTemperatureF() + "");
mDegreeType.setText("C" + (char) 0x00B0);
mDegreeType.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getForecast(mLatitude, mLongitude);
}
});
}
});
}
@OnClick(R.id.dailyButton)
public void startDailyActivity(View view){
Intent intent = new Intent(this, DailyForecastActivity.class);
intent.putExtra(getString(R.string.DailyForecastKey), mForecast.getDailies());
intent.putExtra(getString(R.string.CURRENT_LOCATION),mForecast.getCurrent().getTimeZone());
startActivity(intent);
}
@OnClick(R.id.hourlyButton)
public void startHourlyActivity(View view){
Intent intent = new Intent (this, HourlyForecastActivity.class);
intent.putExtra(getString(R.string.HourlyForecast), mForecast.getHourlies());
startActivity(intent);
}
}
|
|
package br.com.zynger.cardpicker;
import android.app.Activity;
import android.content.Context;
import android.graphics.drawable.ClipDrawable;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import br.com.zynger.cardpicker.AnimationFactory.AnimationEndListener;
import br.com.zynger.cardpicker.AnimationFactory.AnimationStartListener;
import br.com.zynger.cardpicker.CardAdapter.OnCardClickListener;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.Animator.AnimatorListener;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.animation.ValueAnimator.AnimatorUpdateListener;
import com.nineoldandroids.view.ViewHelper;
import com.nineoldandroids.view.ViewPropertyAnimator;
public class CardPicker extends RelativeLayout {
private final long FADE_IN_DURATION = 300;
private final long FADE_OUT_DURATION = 300;
private final long SLIDE_CARD_DURATION = 300;
private final int CARD_CLIP_MAX_VALUE = 10000;
private final int CARD_CLIP_MIN_VALUE = 5700;
private final int CARD_TRANSLATION_Y_DEFAULT = 70;
private float mDensity;
private float mCardTranslationY;
private AnimationFactory mAnimationFactory;
private CardPagerContainer mCardPager;
private ImageView mCard;
private ImageView mWallet;
private ClipDrawable mWalletClipDrawable;
private boolean isAnimating = false;
private boolean isCardShown = false;
private CardAdapter mCardAdapter;
public CardPicker(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public CardPicker(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
LayoutInflater inflater = LayoutInflater.from(getContext());
inflater.inflate(R.layout.cardpicker, this,
true);
mAnimationFactory = new AnimationFactory();
mDensity = getContext().getResources().getDisplayMetrics().density;
mCardTranslationY = CARD_TRANSLATION_Y_DEFAULT * mDensity;
mWallet = (ImageView) findViewById(R.id.cardpicker_wallet);
mCard = (ImageView) findViewById(R.id.cardpicker_card);
mCardPager = new CardPagerContainer(getContext());
mCardPager.setVisibility(View.GONE);
((ViewGroup) ((Activity) getContext()).getWindow().getDecorView()).addView(mCardPager);
RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams) mCard
.getLayoutParams();
layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT,
RelativeLayout.TRUE);
mCard.setLayoutParams(layoutParams);
mWallet.setLayoutParams(layoutParams);
mCard.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
toggle();
}
});
setInitialState();
}
public void setCardTranslationY(float translationY) {
mCardTranslationY = mDensity * translationY;
}
public void setCardAdapter(CardAdapter cardAdapter) {
mCardAdapter = cardAdapter;
mCardPager.setAdapter(mCardAdapter);
}
public void setOverlayColor(int color) {
mCardPager.setOverlayColor(color);
}
private void setInitialState() {
Drawable blueCard = getContext().getResources().getDrawable(R.drawable.credit_card_blue);
setWalletClipDrawable(blueCard);
mWalletClipDrawable.setLevel(CARD_CLIP_MIN_VALUE);
ViewHelper.setTranslationY(mCard, mCardTranslationY);
}
private void setWalletClipDrawable(Drawable cardDrawable) {
mWalletClipDrawable = new ClipDrawable(cardDrawable, Gravity.TOP,
ClipDrawable.VERTICAL);
mCard.setImageDrawable(mWalletClipDrawable);
mWalletClipDrawable.setLevel(CARD_CLIP_MAX_VALUE);
}
public void show() {
if (isAnimating) {
return;
}
isAnimating = true;
slideUpCard(new AnimationEndListener() {
@Override
public void onAnimationEnd() {
isCardShown = true;
isAnimating = false;
mAnimationFactory.fadeInView(mCardPager, FADE_IN_DURATION,
new AnimationStartListener() {
@Override
public void onAnimationStart() {
mCardPager.setVisibility(View.VISIBLE);
}
});
}
});
}
public void hide() {
if (isAnimating) {
return;
}
setWalletClipDrawable(mCardAdapter.getCardAsDrawable(getContext(), mCardPager.getCurrentItem()));
isAnimating = true;
mAnimationFactory.fadeOutView(mCardPager, FADE_OUT_DURATION,
new AnimationEndListener() {
@Override
public void onAnimationEnd() {
mCardPager.setVisibility(View.GONE);
slideDownCard(new AnimationEndListener() {
@Override
public void onAnimationEnd() {
isCardShown = false;
isAnimating = false;
}
});
}
});
}
private void slideUpCard(final AnimationEndListener endListener) {
ViewPropertyAnimator.animate(mCard).setDuration(SLIDE_CARD_DURATION).translationYBy(-mCardTranslationY)
.setListener(new AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {
ValueAnimator valAnim = ValueAnimator.ofInt(
CARD_CLIP_MIN_VALUE, CARD_CLIP_MAX_VALUE);
valAnim.setDuration(SLIDE_CARD_DURATION);
valAnim.addUpdateListener(new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(
ValueAnimator animation) {
Integer animatedValue = (Integer) animation
.getAnimatedValue();
mWalletClipDrawable.setLevel(animatedValue);
}
});
valAnim.start();
}
@Override
public void onAnimationRepeat(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
endListener.onAnimationEnd();
}
@Override
public void onAnimationCancel(Animator animation) {
}
});
}
private void slideDownCard(final AnimationEndListener endListener) {
ViewPropertyAnimator.animate(mCard).setDuration(SLIDE_CARD_DURATION).translationYBy(mCardTranslationY)
.setListener(new AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {
ValueAnimator valAnim = ValueAnimator.ofInt(
CARD_CLIP_MAX_VALUE, CARD_CLIP_MIN_VALUE);
valAnim.setDuration(SLIDE_CARD_DURATION);
valAnim.addUpdateListener(new AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(
ValueAnimator animation) {
Integer animatedValue = (Integer) animation
.getAnimatedValue();
mWalletClipDrawable.setLevel(animatedValue);
}
});
valAnim.start();
}
@Override
public void onAnimationRepeat(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
endListener.onAnimationEnd();
}
@Override
public void onAnimationCancel(Animator animation) {
}
});
}
public void toggle() {
if (isCardShown) {
hide();
} else {
show();
}
}
public void setOnCardClickListener(final OnCardClickListener onCardClickListener) {
mCardAdapter.setOnCardClickListener(new OnCardClickListener() {
@Override
public boolean onCardClick(View card, int position) {
if (isAnimating) return false;
boolean userListenerReturn = onCardClickListener.onCardClick(card, position);
if (userListenerReturn) {
hide();
}
return userListenerReturn;
}
@Override
public boolean onCardBackgroundClick(View card, int position) {
if (isAnimating) return false;
boolean userListenerReturn = onCardClickListener.onCardBackgroundClick(card, position);
if (userListenerReturn) {
hide();
}
return userListenerReturn;
}
});
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.datastream;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.Utils;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.api.operators.TwoInputStreamOperator;
import org.apache.flink.streaming.api.operators.co.CoBroadcastWithKeyedOperator;
import org.apache.flink.streaming.api.operators.co.CoBroadcastWithNonKeyedOperator;
import org.apache.flink.streaming.api.transformations.TwoInputTransformation;
import org.apache.flink.util.Preconditions;
import java.util.List;
import static java.util.Objects.requireNonNull;
/**
* A BroadcastConnectedStream represents the result of connecting a keyed or non-keyed stream,
* with a {@link BroadcastStream} with {@link org.apache.flink.api.common.state.BroadcastState
* broadcast state(s)}. As in the case of {@link ConnectedStreams} these streams are useful for cases
* where operations on one stream directly affect the operations on the other stream, usually via
* shared state between the streams.
*
* <p>An example for the use of such connected streams would be to apply rules that change over time
* onto another, possibly keyed stream. The stream with the broadcast state has the rules, and will
* store them in the broadcast state, while the other stream will contain the elements to apply the
* rules to. By broadcasting the rules, these will be available in all parallel instances, and
* can be applied to all partitions of the other stream.
*
* @param <IN1> The input type of the non-broadcast side.
* @param <IN2> The input type of the broadcast side.
*/
@PublicEvolving
public class BroadcastConnectedStream<IN1, IN2> {
private final StreamExecutionEnvironment environment;
private final DataStream<IN1> inputStream1;
private final BroadcastStream<IN2> inputStream2;
private final List<MapStateDescriptor<?, ?>> broadcastStateDescriptors;
protected BroadcastConnectedStream(
final StreamExecutionEnvironment env,
final DataStream<IN1> input1,
final BroadcastStream<IN2> input2,
final List<MapStateDescriptor<?, ?>> broadcastStateDescriptors) {
this.environment = requireNonNull(env);
this.inputStream1 = requireNonNull(input1);
this.inputStream2 = requireNonNull(input2);
this.broadcastStateDescriptors = requireNonNull(broadcastStateDescriptors);
}
public StreamExecutionEnvironment getExecutionEnvironment() {
return environment;
}
/**
* Returns the non-broadcast {@link DataStream}.
*
* @return The stream which, by convention, is not broadcasted.
*/
public DataStream<IN1> getFirstInput() {
return inputStream1;
}
/**
* Returns the {@link BroadcastStream}.
*
* @return The stream which, by convention, is the broadcast one.
*/
public BroadcastStream<IN2> getSecondInput() {
return inputStream2;
}
/**
* Gets the type of the first input.
*
* @return The type of the first input
*/
public TypeInformation<IN1> getType1() {
return inputStream1.getType();
}
/**
* Gets the type of the second input.
*
* @return The type of the second input
*/
public TypeInformation<IN2> getType2() {
return inputStream2.getType();
}
/**
* Assumes as inputs a {@link BroadcastStream} and a {@link KeyedStream} and applies the given
* {@link KeyedBroadcastProcessFunction} on them, thereby creating a transformed output stream.
*
* @param function The {@link KeyedBroadcastProcessFunction} that is called for each element in the stream.
* @param <KS> The type of the keys in the keyed stream.
* @param <OUT> The type of the output elements.
* @return The transformed {@link DataStream}.
*/
@PublicEvolving
public <KS, OUT> SingleOutputStreamOperator<OUT> process(final KeyedBroadcastProcessFunction<KS, IN1, IN2, OUT> function) {
TypeInformation<OUT> outTypeInfo = TypeExtractor.getBinaryOperatorReturnType(
function,
KeyedBroadcastProcessFunction.class,
1,
2,
3,
TypeExtractor.NO_INDEX,
getType1(),
getType2(),
Utils.getCallLocationName(),
true);
return process(function, outTypeInfo);
}
/**
* Assumes as inputs a {@link BroadcastStream} and a {@link KeyedStream} and applies the given
* {@link KeyedBroadcastProcessFunction} on them, thereby creating a transformed output stream.
*
* @param function The {@link KeyedBroadcastProcessFunction} that is called for each element in the stream.
* @param outTypeInfo The type of the output elements.
* @param <KS> The type of the keys in the keyed stream.
* @param <OUT> The type of the output elements.
* @return The transformed {@link DataStream}.
*/
@PublicEvolving
public <KS, OUT> SingleOutputStreamOperator<OUT> process(
final KeyedBroadcastProcessFunction<KS, IN1, IN2, OUT> function,
final TypeInformation<OUT> outTypeInfo) {
Preconditions.checkNotNull(function);
Preconditions.checkArgument(inputStream1 instanceof KeyedStream,
"A KeyedBroadcastProcessFunction can only be used on a keyed stream.");
TwoInputStreamOperator<IN1, IN2, OUT> operator =
new CoBroadcastWithKeyedOperator<>(clean(function), broadcastStateDescriptors);
return transform("Co-Process-Broadcast-Keyed", outTypeInfo, operator);
}
/**
* Assumes as inputs a {@link BroadcastStream} and a non-keyed {@link DataStream} and applies the given
* {@link BroadcastProcessFunction} on them, thereby creating a transformed output stream.
*
* @param function The {@link BroadcastProcessFunction} that is called for each element in the stream.
* @param <OUT> The type of the output elements.
* @return The transformed {@link DataStream}.
*/
@PublicEvolving
public <OUT> SingleOutputStreamOperator<OUT> process(final BroadcastProcessFunction<IN1, IN2, OUT> function) {
TypeInformation<OUT> outTypeInfo = TypeExtractor.getBinaryOperatorReturnType(
function,
BroadcastProcessFunction.class,
0,
1,
2,
TypeExtractor.NO_INDEX,
getType1(),
getType2(),
Utils.getCallLocationName(),
true);
return process(function, outTypeInfo);
}
/**
* Assumes as inputs a {@link BroadcastStream} and a non-keyed {@link DataStream} and applies the given
* {@link BroadcastProcessFunction} on them, thereby creating a transformed output stream.
*
* @param function The {@link BroadcastProcessFunction} that is called for each element in the stream.
* @param outTypeInfo The type of the output elements.
* @param <OUT> The type of the output elements.
* @return The transformed {@link DataStream}.
*/
@PublicEvolving
public <OUT> SingleOutputStreamOperator<OUT> process(
final BroadcastProcessFunction<IN1, IN2, OUT> function,
final TypeInformation<OUT> outTypeInfo) {
Preconditions.checkNotNull(function);
Preconditions.checkArgument(!(inputStream1 instanceof KeyedStream),
"A BroadcastProcessFunction can only be used on a non-keyed stream.");
TwoInputStreamOperator<IN1, IN2, OUT> operator =
new CoBroadcastWithNonKeyedOperator<>(clean(function), broadcastStateDescriptors);
return transform("Co-Process-Broadcast", outTypeInfo, operator);
}
@Internal
private <OUT> SingleOutputStreamOperator<OUT> transform(
final String functionName,
final TypeInformation<OUT> outTypeInfo,
final TwoInputStreamOperator<IN1, IN2, OUT> operator) {
// read the output type of the input Transforms to coax out errors about MissingTypeInfo
inputStream1.getType();
inputStream2.getType();
TwoInputTransformation<IN1, IN2, OUT> transform = new TwoInputTransformation<>(
inputStream1.getTransformation(),
inputStream2.getTransformation(),
functionName,
operator,
outTypeInfo,
environment.getParallelism());
if (inputStream1 instanceof KeyedStream) {
KeyedStream<IN1, ?> keyedInput1 = (KeyedStream<IN1, ?>) inputStream1;
TypeInformation<?> keyType1 = keyedInput1.getKeyType();
transform.setStateKeySelectors(keyedInput1.getKeySelector(), null);
transform.setStateKeyType(keyType1);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
SingleOutputStreamOperator<OUT> returnStream = new SingleOutputStreamOperator(environment, transform);
getExecutionEnvironment().addOperator(transform);
return returnStream;
}
protected <F> F clean(F f) {
return getExecutionEnvironment().clean(f);
}
}
|
|
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.orm.jpa;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import javax.transaction.Synchronization;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import javax.transaction.UserTransaction;
import com.zaxxer.hikari.HikariDataSource;
import org.hibernate.boot.model.naming.CamelCaseToUnderscoresNamingStrategy;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
import org.hibernate.boot.model.naming.PhysicalNamingStrategy;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.transaction.jta.platform.internal.NoJtaPlatform;
import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
import org.hibernate.internal.SessionFactoryImpl;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.autoconfigure.TestAutoConfigurationPackage;
import org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.XADataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfigurationTests.JpaUsingApplicationListenerConfiguration.EventCapturingApplicationListener;
import org.springframework.boot.autoconfigure.orm.jpa.mapping.NonAnnotatedEntity;
import org.springframework.boot.autoconfigure.orm.jpa.test.City;
import org.springframework.boot.autoconfigure.transaction.jta.JtaAutoConfiguration;
import org.springframework.boot.orm.jpa.hibernate.SpringImplicitNamingStrategy;
import org.springframework.boot.orm.jpa.hibernate.SpringJtaPlatform;
import org.springframework.boot.sql.init.dependency.DependsOnDatabaseInitialization;
import org.springframework.boot.test.context.assertj.AssertableApplicationContext;
import org.springframework.boot.test.context.runner.ContextConsumer;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatNoException;
import static org.assertj.core.api.Assertions.entry;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link HibernateJpaAutoConfiguration}.
*
* @author Dave Syer
* @author Phillip Webb
* @author Andy Wilkinson
* @author Kazuki Shimizu
* @author Stephane Nicoll
* @author Chris Bono
*/
class HibernateJpaAutoConfigurationTests extends AbstractJpaAutoConfigurationTests {
HibernateJpaAutoConfigurationTests() {
super(HibernateJpaAutoConfiguration.class);
}
@Test
@Deprecated
void testDataScriptWithDeprecatedMissingDdl() {
contextRunner().withPropertyValues("spring.datasource.data:classpath:/city.sql",
// Missing:
"spring.datasource.schema:classpath:/ddl.sql").run((context) -> {
assertThat(context).hasFailed();
assertThat(context.getStartupFailure()).hasMessageContaining("ddl.sql");
});
}
@Test
void testDmlScriptWithMissingDdl() {
contextRunner().withPropertyValues("spring.sql.init.data-locations:classpath:/city.sql",
// Missing:
"spring.sql.init.schema-locations:classpath:/ddl.sql").run((context) -> {
assertThat(context).hasFailed();
assertThat(context.getStartupFailure()).hasMessageContaining("ddl.sql");
});
}
@Test
void testDataScript() {
// This can't succeed because the data SQL is executed immediately after the
// schema and Hibernate hasn't initialized yet at that point
contextRunner().withPropertyValues("spring.datasource.data:classpath:/city.sql").run((context) -> {
assertThat(context).hasFailed();
assertThat(context.getStartupFailure()).isInstanceOf(BeanCreationException.class);
});
}
@Test
void testDmlScript() {
// This can't succeed because the data SQL is executed immediately after the
// schema and Hibernate hasn't initialized yet at that point
contextRunner().withPropertyValues("spring.sql.init.data-locations:/city.sql").run((context) -> {
assertThat(context).hasFailed();
assertThat(context.getStartupFailure()).isInstanceOf(BeanCreationException.class);
});
}
@Test
@Deprecated
void testDataScriptRunsEarly() {
contextRunner().withUserConfiguration(TestInitializedJpaConfiguration.class)
.withClassLoader(new HideDataScriptClassLoader())
.withPropertyValues("spring.jpa.show-sql=true", "spring.jpa.hibernate.ddl-auto:create-drop",
"spring.datasource.data:classpath:/city.sql", "spring.jpa.defer-datasource-initialization=true")
.run((context) -> assertThat(context.getBean(TestInitializedJpaConfiguration.class).called).isTrue());
}
@Test
void testDmlScriptRunsEarly() {
contextRunner().withUserConfiguration(TestInitializedJpaConfiguration.class)
.withClassLoader(new HideDataScriptClassLoader())
.withPropertyValues("spring.jpa.show-sql=true", "spring.jpa.hibernate.ddl-auto:create-drop",
"spring.sql.init.data-locations:/city.sql", "spring.jpa.defer-datasource-initialization=true")
.run((context) -> assertThat(context.getBean(TestInitializedJpaConfiguration.class).called).isTrue());
}
@Test
void testFlywaySwitchOffDdlAuto() {
contextRunner().withPropertyValues("spring.sql.init.mode:never", "spring.flyway.locations:classpath:db/city")
.withConfiguration(AutoConfigurations.of(FlywayAutoConfiguration.class))
.run((context) -> assertThat(context).hasNotFailed());
}
@Test
void testFlywayPlusValidation() {
contextRunner()
.withPropertyValues("spring.sql.init.mode:never", "spring.flyway.locations:classpath:db/city",
"spring.jpa.hibernate.ddl-auto:validate")
.withConfiguration(AutoConfigurations.of(FlywayAutoConfiguration.class))
.run((context) -> assertThat(context).hasNotFailed());
}
@Test
void testLiquibasePlusValidation() {
contextRunner()
.withPropertyValues("spring.liquibase.changeLog:classpath:db/changelog/db.changelog-city.yaml",
"spring.jpa.hibernate.ddl-auto:validate")
.withConfiguration(AutoConfigurations.of(LiquibaseAutoConfiguration.class))
.run((context) -> assertThat(context).hasNotFailed());
}
@Test
void hibernateDialectIsNotSetByDefault() {
contextRunner().run(assertJpaVendorAdapter(
(adapter) -> assertThat(adapter.getJpaPropertyMap()).doesNotContainKeys("hibernate.dialect")));
}
@Test
void hibernateDialectIsSetWhenDatabaseIsSet() {
contextRunner().withPropertyValues("spring.jpa.database=H2")
.run(assertJpaVendorAdapter((adapter) -> assertThat(adapter.getJpaPropertyMap())
.contains(entry("hibernate.dialect", H2Dialect.class.getName()))));
}
@Test
void hibernateDialectIsSetWhenDatabasePlatformIsSet() {
String databasePlatform = TestH2Dialect.class.getName();
contextRunner().withPropertyValues("spring.jpa.database-platform=" + databasePlatform)
.run(assertJpaVendorAdapter((adapter) -> assertThat(adapter.getJpaPropertyMap())
.contains(entry("hibernate.dialect", databasePlatform))));
}
private ContextConsumer<AssertableApplicationContext> assertJpaVendorAdapter(
Consumer<HibernateJpaVendorAdapter> adapter) {
return (context) -> {
assertThat(context).hasSingleBean(JpaVendorAdapter.class);
assertThat(context).hasSingleBean(HibernateJpaVendorAdapter.class);
adapter.accept(context.getBean(HibernateJpaVendorAdapter.class));
};
}
@Test
void jtaDefaultPlatform() {
contextRunner().withConfiguration(AutoConfigurations.of(JtaAutoConfiguration.class))
.run(assertJtaPlatform(SpringJtaPlatform.class));
}
@Test
void jtaCustomPlatform() {
contextRunner()
.withPropertyValues(
"spring.jpa.properties.hibernate.transaction.jta.platform:" + TestJtaPlatform.class.getName())
.withConfiguration(AutoConfigurations.of(JtaAutoConfiguration.class))
.run(assertJtaPlatform(TestJtaPlatform.class));
}
@Test
void jtaNotUsedByTheApplication() {
contextRunner().run(assertJtaPlatform(NoJtaPlatform.class));
}
private ContextConsumer<AssertableApplicationContext> assertJtaPlatform(Class<? extends JtaPlatform> expectedType) {
return (context) -> {
SessionFactoryImpl sessionFactory = context.getBean(LocalContainerEntityManagerFactoryBean.class)
.getNativeEntityManagerFactory().unwrap(SessionFactoryImpl.class);
assertThat(sessionFactory.getServiceRegistry().getService(JtaPlatform.class)).isInstanceOf(expectedType);
};
}
@Test
void jtaCustomTransactionManagerUsingProperties() {
contextRunner().withPropertyValues("spring.transaction.default-timeout:30",
"spring.transaction.rollback-on-commit-failure:true").run((context) -> {
JpaTransactionManager transactionManager = context.getBean(JpaTransactionManager.class);
assertThat(transactionManager.getDefaultTimeout()).isEqualTo(30);
assertThat(transactionManager.isRollbackOnCommitFailure()).isTrue();
});
}
@Test
void autoConfigurationBacksOffWithSeveralDataSources() {
contextRunner()
.withConfiguration(AutoConfigurations.of(DataSourceTransactionManagerAutoConfiguration.class,
XADataSourceAutoConfiguration.class, JtaAutoConfiguration.class))
.withUserConfiguration(TestTwoDataSourcesConfiguration.class).run((context) -> {
assertThat(context).hasNotFailed();
assertThat(context).doesNotHaveBean(EntityManagerFactory.class);
});
}
@Test
void providerDisablesAutoCommitIsConfigured() {
contextRunner().withPropertyValues("spring.datasource.type:" + HikariDataSource.class.getName(),
"spring.datasource.hikari.auto-commit:false").run((context) -> {
Map<String, Object> jpaProperties = context.getBean(LocalContainerEntityManagerFactoryBean.class)
.getJpaPropertyMap();
assertThat(jpaProperties)
.contains(entry("hibernate.connection.provider_disables_autocommit", "true"));
});
}
@Test
void providerDisablesAutoCommitIsNotConfiguredIfAutoCommitIsEnabled() {
contextRunner().withPropertyValues("spring.datasource.type:" + HikariDataSource.class.getName(),
"spring.datasource.hikari.auto-commit:true").run((context) -> {
Map<String, Object> jpaProperties = context.getBean(LocalContainerEntityManagerFactoryBean.class)
.getJpaPropertyMap();
assertThat(jpaProperties).doesNotContainKeys("hibernate.connection.provider_disables_autocommit");
});
}
@Test
void providerDisablesAutoCommitIsNotConfiguredIfPropertyIsSet() {
contextRunner()
.withPropertyValues("spring.datasource.type:" + HikariDataSource.class.getName(),
"spring.datasource.hikari.auto-commit:false",
"spring.jpa.properties.hibernate.connection.provider_disables_autocommit=false")
.run((context) -> {
Map<String, Object> jpaProperties = context.getBean(LocalContainerEntityManagerFactoryBean.class)
.getJpaPropertyMap();
assertThat(jpaProperties)
.contains(entry("hibernate.connection.provider_disables_autocommit", "false"));
});
}
@Test
void providerDisablesAutoCommitIsNotConfiguredWithJta() {
contextRunner().withConfiguration(AutoConfigurations.of(JtaAutoConfiguration.class))
.withPropertyValues("spring.datasource.type:" + HikariDataSource.class.getName(),
"spring.datasource.hikari.auto-commit:false")
.run((context) -> {
Map<String, Object> jpaProperties = context.getBean(LocalContainerEntityManagerFactoryBean.class)
.getJpaPropertyMap();
assertThat(jpaProperties).doesNotContainKeys("hibernate.connection.provider_disables_autocommit");
});
}
@Test
void customResourceMapping() {
contextRunner().withClassLoader(new HideDataScriptClassLoader())
.withPropertyValues("spring.sql.init.data-locations:classpath:/db/non-annotated-data.sql",
"spring.jpa.mapping-resources=META-INF/mappings/non-annotated.xml",
"spring.jpa.defer-datasource-initialization=true")
.run((context) -> {
EntityManager em = context.getBean(EntityManagerFactory.class).createEntityManager();
NonAnnotatedEntity found = em.find(NonAnnotatedEntity.class, 2000L);
assertThat(found).isNotNull();
assertThat(found.getValue()).isEqualTo("Test");
});
}
@Test
void physicalNamingStrategyCanBeUsed() {
contextRunner().withUserConfiguration(TestPhysicalNamingStrategyConfiguration.class).run((context) -> {
Map<String, Object> hibernateProperties = context.getBean(HibernateJpaConfiguration.class)
.getVendorProperties();
assertThat(hibernateProperties).contains(
entry("hibernate.physical_naming_strategy", context.getBean("testPhysicalNamingStrategy")));
assertThat(hibernateProperties).doesNotContainKeys("hibernate.ejb.naming_strategy");
});
}
@Test
void implicitNamingStrategyCanBeUsed() {
contextRunner().withUserConfiguration(TestImplicitNamingStrategyConfiguration.class).run((context) -> {
Map<String, Object> hibernateProperties = context.getBean(HibernateJpaConfiguration.class)
.getVendorProperties();
assertThat(hibernateProperties).contains(
entry("hibernate.implicit_naming_strategy", context.getBean("testImplicitNamingStrategy")));
assertThat(hibernateProperties).doesNotContainKeys("hibernate.ejb.naming_strategy");
});
}
@Test
void namingStrategyInstancesTakePrecedenceOverNamingStrategyProperties() {
contextRunner()
.withUserConfiguration(TestPhysicalNamingStrategyConfiguration.class,
TestImplicitNamingStrategyConfiguration.class)
.withPropertyValues("spring.jpa.hibernate.naming.physical-strategy:com.example.Physical",
"spring.jpa.hibernate.naming.implicit-strategy:com.example.Implicit")
.run((context) -> {
Map<String, Object> hibernateProperties = context.getBean(HibernateJpaConfiguration.class)
.getVendorProperties();
assertThat(hibernateProperties).contains(
entry("hibernate.physical_naming_strategy", context.getBean("testPhysicalNamingStrategy")),
entry("hibernate.implicit_naming_strategy", context.getBean("testImplicitNamingStrategy")));
assertThat(hibernateProperties).doesNotContainKeys("hibernate.ejb.naming_strategy");
});
}
@Test
void hibernatePropertiesCustomizerTakesPrecedenceOverStrategyInstancesAndNamingStrategyProperties() {
contextRunner()
.withUserConfiguration(TestHibernatePropertiesCustomizerConfiguration.class,
TestPhysicalNamingStrategyConfiguration.class, TestImplicitNamingStrategyConfiguration.class)
.withPropertyValues("spring.jpa.hibernate.naming.physical-strategy:com.example.Physical",
"spring.jpa.hibernate.naming.implicit-strategy:com.example.Implicit")
.run((context) -> {
Map<String, Object> hibernateProperties = context.getBean(HibernateJpaConfiguration.class)
.getVendorProperties();
TestHibernatePropertiesCustomizerConfiguration configuration = context
.getBean(TestHibernatePropertiesCustomizerConfiguration.class);
assertThat(hibernateProperties).contains(
entry("hibernate.physical_naming_strategy", configuration.physicalNamingStrategy),
entry("hibernate.implicit_naming_strategy", configuration.implicitNamingStrategy));
assertThat(hibernateProperties).doesNotContainKeys("hibernate.ejb.naming_strategy");
});
}
@Test
void eventListenerCanBeRegisteredAsBeans() {
contextRunner().withUserConfiguration(TestInitializedJpaConfiguration.class)
.withClassLoader(new HideDataScriptClassLoader())
.withPropertyValues("spring.jpa.show-sql=true", "spring.jpa.hibernate.ddl-auto:create-drop",
"spring.sql.init.data-locations:classpath:/city.sql",
"spring.jpa.defer-datasource-initialization=true")
.run((context) -> {
// See CityListener
assertThat(context).hasSingleBean(City.class);
assertThat(context.getBean(City.class).getName()).isEqualTo("Washington");
});
}
@Test
void hibernatePropertiesCustomizerCanDisableBeanContainer() {
contextRunner().withUserConfiguration(DisableBeanContainerConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(City.class));
}
@Test
void vendorPropertiesWithEmbeddedDatabaseAndNoDdlProperty() {
contextRunner().run(vendorProperties((vendorProperties) -> {
assertThat(vendorProperties).doesNotContainKeys(AvailableSettings.HBM2DDL_DATABASE_ACTION);
assertThat(vendorProperties.get(AvailableSettings.HBM2DDL_AUTO)).isEqualTo("create-drop");
}));
}
@Test
void vendorPropertiesWhenDdlAutoPropertyIsSet() {
contextRunner().withPropertyValues("spring.jpa.hibernate.ddl-auto=update")
.run(vendorProperties((vendorProperties) -> {
assertThat(vendorProperties).doesNotContainKeys(AvailableSettings.HBM2DDL_DATABASE_ACTION);
assertThat(vendorProperties.get(AvailableSettings.HBM2DDL_AUTO)).isEqualTo("update");
}));
}
@Test
void vendorPropertiesWhenDdlAutoPropertyAndHibernatePropertiesAreSet() {
contextRunner()
.withPropertyValues("spring.jpa.hibernate.ddl-auto=update",
"spring.jpa.properties.hibernate.hbm2ddl.auto=create-drop")
.run(vendorProperties((vendorProperties) -> {
assertThat(vendorProperties).doesNotContainKeys(AvailableSettings.HBM2DDL_DATABASE_ACTION);
assertThat(vendorProperties.get(AvailableSettings.HBM2DDL_AUTO)).isEqualTo("create-drop");
}));
}
@Test
void vendorPropertiesWhenDdlAutoPropertyIsSetToNone() {
contextRunner().withPropertyValues("spring.jpa.hibernate.ddl-auto=none")
.run(vendorProperties((vendorProperties) -> assertThat(vendorProperties).doesNotContainKeys(
AvailableSettings.HBM2DDL_DATABASE_ACTION, AvailableSettings.HBM2DDL_AUTO)));
}
@Test
void vendorPropertiesWhenJpaDdlActionIsSet() {
contextRunner()
.withPropertyValues("spring.jpa.properties.javax.persistence.schema-generation.database.action=create")
.run(vendorProperties((vendorProperties) -> {
assertThat(vendorProperties.get(AvailableSettings.HBM2DDL_DATABASE_ACTION)).isEqualTo("create");
assertThat(vendorProperties).doesNotContainKeys(AvailableSettings.HBM2DDL_AUTO);
}));
}
@Test
void vendorPropertiesWhenBothDdlAutoPropertiesAreSet() {
contextRunner()
.withPropertyValues("spring.jpa.properties.javax.persistence.schema-generation.database.action=create",
"spring.jpa.hibernate.ddl-auto=create-only")
.run(vendorProperties((vendorProperties) -> {
assertThat(vendorProperties.get(AvailableSettings.HBM2DDL_DATABASE_ACTION)).isEqualTo("create");
assertThat(vendorProperties.get(AvailableSettings.HBM2DDL_AUTO)).isEqualTo("create-only");
}));
}
private ContextConsumer<AssertableApplicationContext> vendorProperties(
Consumer<Map<String, Object>> vendorProperties) {
return (context) -> vendorProperties
.accept(context.getBean(HibernateJpaConfiguration.class).getVendorProperties());
}
@Test
void withSyncBootstrappingAnApplicationListenerThatUsesJpaDoesNotTriggerABeanCurrentlyInCreationException() {
contextRunner().withUserConfiguration(JpaUsingApplicationListenerConfiguration.class).run((context) -> {
assertThat(context).hasNotFailed();
EventCapturingApplicationListener listener = context.getBean(EventCapturingApplicationListener.class);
assertThat(listener.events).hasSize(1);
assertThat(listener.events).hasOnlyElementsOfType(ContextRefreshedEvent.class);
});
}
@Test
void withAsyncBootstrappingAnApplicationListenerThatUsesJpaDoesNotTriggerABeanCurrentlyInCreationException() {
contextRunner().withUserConfiguration(AsyncBootstrappingConfiguration.class,
JpaUsingApplicationListenerConfiguration.class).run((context) -> {
assertThat(context).hasNotFailed();
EventCapturingApplicationListener listener = context
.getBean(EventCapturingApplicationListener.class);
assertThat(listener.events).hasSize(1);
assertThat(listener.events).hasOnlyElementsOfType(ContextRefreshedEvent.class);
// createEntityManager requires Hibernate bootstrapping to be complete
assertThatNoException()
.isThrownBy(() -> context.getBean(EntityManagerFactory.class).createEntityManager());
});
}
@Test
void whenLocalContainerEntityManagerFactoryBeanHasNoJpaVendorAdapterAutoConfigurationSucceeds() {
contextRunner()
.withUserConfiguration(
TestConfigurationWithLocalContainerEntityManagerFactoryBeanWithNoJpaVendorAdapter.class)
.run((context) -> {
EntityManagerFactory factoryBean = context.getBean(EntityManagerFactory.class);
Map<String, Object> map = factoryBean.getProperties();
assertThat(map.get("configured")).isEqualTo("manually");
});
}
@Configuration(proxyBeanMethods = false)
@TestAutoConfigurationPackage(City.class)
@DependsOnDatabaseInitialization
static class TestInitializedJpaConfiguration {
private boolean called;
@Autowired
void validateDataSourceIsInitialized(EntityManagerFactory entityManagerFactory) {
// Inject the entity manager to validate it is initialized at the injection
// point
EntityManager entityManager = entityManagerFactory.createEntityManager();
City city = entityManager.find(City.class, 2000L);
assertThat(city).isNotNull();
assertThat(city.getName()).isEqualTo("Washington");
this.called = true;
}
}
@Configuration(proxyBeanMethods = false)
static class TestImplicitNamingStrategyConfiguration {
@Bean
ImplicitNamingStrategy testImplicitNamingStrategy() {
return new SpringImplicitNamingStrategy();
}
}
@Configuration(proxyBeanMethods = false)
static class TestPhysicalNamingStrategyConfiguration {
@Bean
PhysicalNamingStrategy testPhysicalNamingStrategy() {
return new CamelCaseToUnderscoresNamingStrategy();
}
}
@Configuration(proxyBeanMethods = false)
static class TestHibernatePropertiesCustomizerConfiguration {
private final PhysicalNamingStrategy physicalNamingStrategy = new CamelCaseToUnderscoresNamingStrategy();
private final ImplicitNamingStrategy implicitNamingStrategy = new SpringImplicitNamingStrategy();
@Bean
HibernatePropertiesCustomizer testHibernatePropertiesCustomizer() {
return (hibernateProperties) -> {
hibernateProperties.put("hibernate.physical_naming_strategy", this.physicalNamingStrategy);
hibernateProperties.put("hibernate.implicit_naming_strategy", this.implicitNamingStrategy);
};
}
}
@Configuration(proxyBeanMethods = false)
static class DisableBeanContainerConfiguration {
@Bean
HibernatePropertiesCustomizer disableBeanContainerHibernatePropertiesCustomizer() {
return (hibernateProperties) -> hibernateProperties.remove(AvailableSettings.BEAN_CONTAINER);
}
}
public static class TestJtaPlatform implements JtaPlatform {
@Override
public TransactionManager retrieveTransactionManager() {
return mock(TransactionManager.class);
}
@Override
public UserTransaction retrieveUserTransaction() {
throw new UnsupportedOperationException();
}
@Override
public Object getTransactionIdentifier(Transaction transaction) {
throw new UnsupportedOperationException();
}
@Override
public boolean canRegisterSynchronization() {
throw new UnsupportedOperationException();
}
@Override
public void registerSynchronization(Synchronization synchronization) {
throw new UnsupportedOperationException();
}
@Override
public int getCurrentStatus() {
throw new UnsupportedOperationException();
}
}
static class HideDataScriptClassLoader extends URLClassLoader {
private static final List<String> HIDDEN_RESOURCES = Arrays.asList("schema-all.sql", "schema.sql");
HideDataScriptClassLoader() {
super(new URL[0], HideDataScriptClassLoader.class.getClassLoader());
}
@Override
public Enumeration<URL> getResources(String name) throws IOException {
if (HIDDEN_RESOURCES.contains(name)) {
return Collections.emptyEnumeration();
}
return super.getResources(name);
}
}
@org.springframework.context.annotation.Configuration(proxyBeanMethods = false)
static class JpaUsingApplicationListenerConfiguration {
@Bean
EventCapturingApplicationListener jpaUsingApplicationListener(EntityManagerFactory emf) {
return new EventCapturingApplicationListener();
}
static class EventCapturingApplicationListener implements ApplicationListener<ApplicationEvent> {
private final List<ApplicationEvent> events = new ArrayList<>();
@Override
public void onApplicationEvent(ApplicationEvent event) {
this.events.add(event);
}
}
}
@Configuration(proxyBeanMethods = false)
static class AsyncBootstrappingConfiguration {
@Bean
ThreadPoolTaskExecutor ThreadPoolTaskExecutor() {
return new ThreadPoolTaskExecutor();
}
@Bean
EntityManagerFactoryBuilderCustomizer asyncBootstrappingCustomizer(ThreadPoolTaskExecutor executor) {
return (builder) -> builder.setBootstrapExecutor(executor);
}
}
@Configuration(proxyBeanMethods = false)
static class TestConfigurationWithLocalContainerEntityManagerFactoryBeanWithNoJpaVendorAdapter
extends TestConfiguration {
@Bean
LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource) {
LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean();
factoryBean.setDataSource(dataSource);
factoryBean.setPersistenceUnitName("manually-configured");
factoryBean.setPersistenceProviderClass(HibernatePersistenceProvider.class);
Map<String, Object> properties = new HashMap<>();
properties.put("configured", "manually");
properties.put("hibernate.transaction.jta.platform", NoJtaPlatform.INSTANCE);
factoryBean.setJpaPropertyMap(properties);
return factoryBean;
}
}
public static class TestH2Dialect extends H2Dialect {
}
}
|
|
package io.datakernel.http.decoder;
import io.datakernel.common.collection.Either;
import io.datakernel.common.tuple.*;
import io.datakernel.http.HttpRequest;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* A high-level API that allows declarative definition of HTTP parsers
* that can convert incoming requests to concrete objects.
* This allows complex parsers to be algebraically built from simple ones.
*/
public interface Decoder<T> {
/**
* Either return the parsed type or format
*/
Either<T, DecodeErrors> decode(@NotNull HttpRequest request);
@Nullable
default T decodeOrNull(@NotNull HttpRequest request) {
return decode(request).getLeftOrNull();
}
default T decodeOrThrow(@NotNull HttpRequest request) throws DecodeException {
Either<T, DecodeErrors> either = decode(request);
if (either.isLeft()) return either.getLeft();
throw new DecodeException(either.getRight());
}
/**
* An id that is going to be used in the error-tree if at some point the whole parser fails
*/
String getId();
default Decoder<T> withId(String id) {
return new Decoder<T>() {
@Override
public Either<T, DecodeErrors> decode(@NotNull HttpRequest request) {
return Decoder.this.decode(request);
}
@Override
public String getId() {
return id;
}
};
}
default <V> Decoder<V> map(Function<T, V> fn) {
return mapEx(Mapper.of(fn));
}
default <V> Decoder<V> map(Function<T, V> fn, String message) {
return mapEx(Mapper.of(fn, message));
}
/**
* Enhanced functional 'map' operation.
* If mapped returns an errors, then the returned decoder fails with that error.
*/
default <V> Decoder<V> mapEx(Mapper<T, V> fn) {
return new AbstractDecoder<V>(getId()) {
@Override
public Either<V, DecodeErrors> decode(@NotNull HttpRequest request) {
return Decoder.this.decode(request)
.flatMapLeft(value ->
fn.map(value)
.mapRight(DecodeErrors::of));
}
};
}
default Decoder<T> validate(Predicate<T> predicate, String error) {
return validate(Validator.of(predicate, error));
}
/**
* Enhanced functional 'filter' operation.
* If validator returns non-empty list of errors,
* then the returned decoder fails with these errors.
*/
default Decoder<T> validate(Validator<T> validator) {
return new AbstractDecoder<T>(getId()) {
@Override
public Either<T, DecodeErrors> decode(@NotNull HttpRequest request) {
Either<T, DecodeErrors> decodedValue = Decoder.this.decode(request);
if (decodedValue.isRight()) return decodedValue;
List<DecodeError> errors = validator.validate(decodedValue.getLeft());
if (errors.isEmpty()) return decodedValue;
return Either.right(DecodeErrors.of(errors));
}
};
}
@NotNull
static <V> Decoder<V> create(Function<Object[], V> constructor, String message, Decoder<?>... decoders) {
return createEx(Mapper.of(constructor, message), decoders);
}
@NotNull
static <V> Decoder<V> create(Function<Object[], V> constructor, Decoder<?>... decoders) {
return createEx(Mapper.of(constructor), decoders);
}
/**
* Plainly combines given decoders (they are called on the same request) into one, mapping the result
* with the supplied mapper.
*/
@NotNull
static <V> Decoder<V> createEx(Mapper<Object[], V> constructor, Decoder<?>... decoders) {
return new AbstractDecoder<V>("") {
@Override
public Either<V, DecodeErrors> decode(@NotNull HttpRequest request) {
Object[] args = new Object[decoders.length];
DecodeErrors errors = DecodeErrors.create();
for (int i = 0; i < decoders.length; i++) {
Decoder<?> decoder = decoders[i];
Either<?, DecodeErrors> decoded = decoder.decode(request);
if (decoded.isLeft()) {
args[i] = decoded.getLeft();
} else {
errors.with(decoder.getId(), decoded.getRight());
}
}
if (errors.hasErrors()) {
return Either.right(errors);
}
return constructor.map(args)
.mapRight(DecodeErrors::of);
}
};
}
@SuppressWarnings("unchecked")
@NotNull
static <R, T1> Decoder<R> of(TupleConstructor1<T1, R> constructor, Decoder<T1> param1) {
return create(params -> constructor.create((T1) params[0]),
param1);
}
@SuppressWarnings("unchecked")
@NotNull
static <R, T1, T2> Decoder<R> of(TupleConstructor2<T1, T2, R> constructor,
Decoder<T1> param1,
Decoder<T2> param2) {
return create(params -> constructor.create((T1) params[0], (T2) params[1]),
param1, param2);
}
@SuppressWarnings("unchecked")
@NotNull
static <R, T1, T2, T3> Decoder<R> of(TupleConstructor3<T1, T2, T3, R> constructor,
Decoder<T1> param1,
Decoder<T2> param2,
Decoder<T3> param3) {
return create(params -> constructor.create((T1) params[0], (T2) params[1], (T3) params[2]),
param1, param2, param3);
}
@SuppressWarnings("unchecked")
@NotNull
static <R, T1, T2, T3, T4> Decoder<R> of(TupleConstructor4<T1, T2, T3, T4, R> constructor,
Decoder<T1> param1,
Decoder<T2> param2,
Decoder<T3> param3,
Decoder<T4> param4) {
return create(params -> constructor.create((T1) params[0], (T2) params[1], (T3) params[2], (T4) params[3]),
param1, param2, param3, param4);
}
@SuppressWarnings("unchecked")
@NotNull
static <R, T1, T2, T3, T4, T5> Decoder<R> of(TupleConstructor5<T1, T2, T3, T4, T5, R> constructor,
Decoder<T1> param1,
Decoder<T2> param2,
Decoder<T3> param3,
Decoder<T4> param4,
Decoder<T5> param5) {
return create(params -> constructor.create((T1) params[0], (T2) params[1], (T3) params[2], (T4) params[3], (T5) params[4]),
param1, param2, param3, param4, param5);
}
@SuppressWarnings("unchecked")
@NotNull
static <R, T1, T2, T3, T4, T5, T6> Decoder<R> of(TupleConstructor6<T1, T2, T3, T4, T5, T6, R> constructor,
Decoder<T1> param1,
Decoder<T2> param2,
Decoder<T3> param3,
Decoder<T4> param4,
Decoder<T5> param5,
Decoder<T6> param6) {
return create(params -> constructor.create((T1) params[0], (T2) params[1], (T3) params[2], (T4) params[3], (T5) params[5], (T6) params[6]),
param1, param2, param3, param4, param5, param6);
}
}
|
|
/*
* $Id$
*/
/*
Copyright (c) 2014-2016 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.util;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.UnsupportedCharsetException;
import com.ibm.icu.text.CharsetDetector;
import com.ibm.icu.text.CharsetMatch;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.lockss.plugin.*;
/**
* A class meant to encapsulate static character encoding/decoding using icu4j
*/
public class CharsetUtil {
private static final Logger log = Logger.getLogger(CharsetUtil.class);
public static final String PREFIX = org.lockss.config.Configuration.PREFIX + "crawler.";
/** If true, CharsetUtil will try to infer the proper charset to use,
* falling back to the specified one if it can't. */
public static final String PARAM_INFER_CHARSET = PREFIX + "inferCharset";
public static final boolean DEFAULT_INFER_CHARSET = true;
/** Number of bytes from the stream that will be searched for an HTML or
* XML charset spec, or fed to CharsetDetector (which may not look at all
* of them). */
public static final String PARAM_INFER_CHARSET_BUFSIZE =
PREFIX + "inferCharsetBufSize";
public static final int DEFAULT_INFER_CHARSET_BUFSIZE = 8192;
private static final String UTF8 = "UTF-8";
private static final String UTF16BE = "UTF-16BE";
private static final String UTF16LE = "UTF-16LE";
private static final String UTF32BE = "UTF-32BE";
private static final String UTF32LE = "UTF-32LE";
private static final String UTF7 = "UTF-7";
private static final String UTF1 = "UTF-1";
private static final String ISO_8859_1 = "ISO-8859-1";
private static boolean inferCharset = DEFAULT_INFER_CHARSET;
private static int inferCharsetBufSize = DEFAULT_INFER_CHARSET_BUFSIZE;
public static void setConfig(final org.lockss.config.Configuration config,
final org.lockss.config.Configuration oldConfig,
final org.lockss.config.Configuration.Differences diffs) {
inferCharset =
config.getBoolean(PARAM_INFER_CHARSET,DEFAULT_INFER_CHARSET);
inferCharsetBufSize =
config.getInt(PARAM_INFER_CHARSET_BUFSIZE,
DEFAULT_INFER_CHARSET_BUFSIZE);
}
public static boolean inferCharset() {return inferCharset;}
/**
* This will guess the charset of an inputstream. If the inpust
* @param in an input stream which we will be checking
* @return the charset or null if nothing could be determined with greater
* than 50% accuracy
* @throws IOException if mark() not supported or read fails
*/
public static String guessCharsetName(InputStream in) throws IOException
{
if(!in.markSupported())
throw new IllegalArgumentException("InputStream must support mark.");
ByteArrayOutputStream buffered = new ByteArrayOutputStream();
byte[] buf = new byte[inferCharsetBufSize];
in.mark(inferCharsetBufSize + 1024);
int len = StreamUtil.readBytes(in, buf, buf.length);
if (len <= 0) {
return UTF8; // this is just a default for 0 len stream
}
// If the charset is specified in the document, use that.
String charset = findCharsetInText(buf, len);
if (charset == null) { // we didn't find it check BOM
if (hasUtf8BOM(buf, len)) {
charset = UTF8;
// Check UTF32 before UTF16 since a little endian UTF16 BOM is a prefix of
// a little endian UTF32 BOM.
} else if (hasUtf32BEBOM(buf, len)) {
charset = UTF32BE;
} else if (hasUtf32LEBOM(buf, len)) {
charset = UTF32LE;
} else if (hasUtf16BEBOM(buf, len)) {
charset = UTF16BE;
} else if (hasUtf16LEBOM(buf, len)) {
charset = UTF16LE;
} else if (hasUtf7BOM(buf, len)) {
charset = UTF7;
} else if (hasUtf1BOM(buf, len)) {
charset = UTF1;
} else {
// Use icu4j to guess an encoding.
charset = guessCharsetFromBytes(buf);
}
}
if (charset != null) { charset = supportedCharsetName(charset); }
if (charset == null) { charset = UTF8; }
in.reset();
return charset;
}
/**
* Given a byte stream, figure out an encoding and return a character stream
* and the encoding used to convert bytes to characters. This will look for a
* document based charset statement, then check for BOM, then use text
* analysis to 'guess' the encoding.
* @param inStream the InputStream from which to determine the encoding
* @return a InputStreamAndCharset containing a JoinedStream the consumed bytes and the
* inputstream and a new a String containing the name of the character
* encoding.
* @throws IOException
*/
public static InputStreamAndCharset getCharsetStream(InputStream inStream)
throws IOException {
return getCharsetStream(inStream, UTF8);
}
/**
* Given a byte stream, figure out an encoding and return a character stream
* and the encoding used to convert bytes to characters. This will look for a
* document based charset statement, then check for BOM, then use text
* analysis to 'guess' the encoding.
*
* @param inStream the InputStream from which to determine the encoding
* @param expectedCharset the expected charset
* @return a Pair containing a JoinedStream the consumed bytes and the
* inputstream and a new a String containing the name of the character
* encoding.
* @throws IOException
*/
public static InputStreamAndCharset getCharsetStream(InputStream inStream,
String expectedCharset)
throws IOException {
if (!CharsetUtil.inferCharset()) {
return new InputStreamAndCharset(inStream, expectedCharset);
}
ByteArrayOutputStream buffered = new ByteArrayOutputStream();
int len = 0;
byte[] buf = new byte[inferCharsetBufSize];
if(inStream != null) {
len = StreamUtil.readBytes(inStream, buf, buf.length);
}
if (len <= 0) {
return new InputStreamAndCharset(inStream, expectedCharset);
}
String charset = findCharsetInText(buf, len);
if (charset != null) {
// If the charset is specified in the document, use that.
buffered.write(buf, 0, len);
// Otherwise, look for a BOM at the start of the content.
} else if (hasUtf8BOM(buf, len)) {
charset = UTF8;
buffered.write(buf, 3, len - 3);
// Check UTF32 before UTF16 since a little endian UTF16 BOM is a prefix of
// a little endian UTF32 BOM.
} else if (hasUtf32BEBOM(buf, len)) {
charset = UTF32BE;
buffered.write(buf, 4, len - 4);
} else if (hasUtf32LEBOM(buf, len)) {
charset = UTF32LE;
buffered.write(buf, 4, len - 4);
} else if (hasUtf16BEBOM(buf, len)) {
charset = UTF16BE;
buffered.write(buf, 2, len - 2);
} else if (hasUtf16LEBOM(buf, len)) {
charset = UTF16LE;
buffered.write(buf, 2, len - 2);
} else if (hasUtf7BOM(buf, len)) {
charset = UTF7;
buffered.write(buf, 4, len - 4);
} else if (hasUtf1BOM(buf, len)) {
charset = UTF1;
buffered.write(buf, 3, len - 3);
} else {
// Use icu4j to choose an encoding.
buffered.write(buf, 0, len);
charset = guessCharsetFromBytes(buf);
}
if (charset != null) { charset = supportedCharsetName(charset); }
if (charset == null) { charset = (expectedCharset == null) ? UTF8:expectedCharset; }
InputStream is = joinStreamsWithCharset(buffered.toByteArray(),
inStream,
charset);
return new InputStreamAndCharset(is, charset);
}
public static InputStreamAndCharset getCharsetStream(CachedUrl cu)
throws IOException {
return getCharsetStream(cu.getUncompressedInputStream(),
getAllegedCharset(cu));
}
static String getAllegedCharset(CachedUrl cu) {
return HeaderUtil.getCharsetOrDefaultFromContentType(cu.getContentType());
}
/**
* Given a byte stream, figure out an encoding and return a character stream
* and the encoding used to convert bytes to characters. This will look for a
* document based charset statement, then check for BOM, then use text
* analysis to 'guess' the encoding.
* @param inStream the InputStream from which to determine the encoding
* @return a Pair of a InputStreamReader containing the and a new a String containing
* the name of the character encoding.
* @throws IOException
*/
public static InputStreamReader getReader(InputStream inStream) throws IOException {
return getReader(inStream, UTF8);
}
/**
* Given a byte stream, figure out an encoding and return a character stream
* and the encoding used to convert bytes to characters. This will look for a
* document based charset statement, then check for BOM, then use text
* analysis to 'guess' the encoding.
* @param inStream the InputStream from which to determine the encoding
* @return a Reader containing the inputstream and with the character encoding.
* @throws IOException
*/
public static InputStreamReader getReader(InputStream inStream,
String expectedCharset) throws IOException {
InputStreamAndCharset charsetStream = getCharsetStream(inStream, expectedCharset);
return new InputStreamReader(charsetStream.getInStream(),
charsetStream.getCharset());
}
public static InputStreamReader getReader(CachedUrl cu) throws IOException {
return (InputStreamReader)
getCharsetReader(cu.getUncompressedInputStream(),
getAllegedCharset(cu)).getLeft();
}
/**
* Given a byte stream, figure out an encoding and return a character stream
* and the encoding used to convert bytes to characters. This will look for a
* document based charset statement, then check for BOM, then use text
* analysis to 'guess' the encoding.
* @param inStream the InputStream from which to determine the encoding
* @return a Pair of a JoinedReader containing the consumed bytes and the
* inputstream and a new a String containing the name of the character encoding.
* @throws IOException
* @deprecated
*/
public static Pair<Reader, String> getCharsetReader(InputStream inStream) throws IOException {
InputStreamAndCharset isc = getCharsetStream(inStream, UTF8);
Reader charsetReader = new InputStreamReader(isc.getInStream(),isc.getCharset());
return new ImmutablePair<>(charsetReader, isc.getCharset());
}
/**
* Given a byte stream, figure out an encoding and return a character stream
* and the encoding used to convert bytes to characters. This will look for a
* document based charset statement, then check for BOM, then use text
* analysis to 'guess' the encoding.
* @param inStream the InputStream from which to determine the encoding
* @return a Pair of a JoinedReader containing the consumed bytes and the
* inputstream and a new a String containing the name of the character encoding.
* @throws IOException
* @deprecated
*/
public static Pair<java.io.Reader, String> getCharsetReader(InputStream inStream,
String expectedCharset)
throws IOException {
InputStreamAndCharset isc = getCharsetStream(inStream, expectedCharset);
Reader charsetReader = new InputStreamReader(isc.getInStream(),isc.getCharset());
return new ImmutablePair<>(charsetReader, isc.getCharset());
}
public static Pair<java.io.Reader, String> getCharsetReader(CachedUrl cu)
throws IOException {
return getCharsetReader(cu.getUncompressedInputStream(),
getAllegedCharset(cu));
}
/**
* given a sampling of bytes determine the charset with the best match
* @param bytes the bytes from which to make our guess
* @return the best charset with > 50% confidence or null
*/
public static String guessCharsetFromBytes(byte[] bytes) {
return guessCharsetFromBytes(bytes, null);
}
/**
* given a sampling of bytes determine the charset with the best match
* @param in the byte array containing a sampling of bytes
* @param expected the encoding to give preference to when looking for a
* match, null if unknown
* @return the charset with > 35% confidence with a prefer or null
*/
public static String guessCharsetFromBytes(byte[] in, String expected)
{
CharsetDetector detector = new CharsetDetector();
if(expected != null) {
detector.setDeclaredEncoding(expected);
}
detector.setText(in);
CharsetMatch match = detector.detect();
if(match != null && match.getConfidence() > 35) {// we want at least a 35% match
return match.getName();
}
else {
return null;
}
}
/**
* given an input stream determine the charset with the best match
* @param inStream the inputstream containing the bytes
* @return charset with > 50% confidence or null
*/
public static String guessCharsetFromStream(InputStream inStream)
throws IOException{
return guessCharsetFromStream(inStream, null);
}
/**
* given an input stream determine the charset with the best match
* @param inStream the inputstream containing the bytes m
* @param expected the anticipated match which is given preference when
* determing a match
* @return charset with > 50% confidence or expected
* @throws IOException if InputStream cannot be reset
*/
public static String guessCharsetFromStream(InputStream inStream,
String expected)
throws IOException
{
if(!inStream.markSupported())
throw new IllegalArgumentException("InputStream must support mark.");
CharsetDetector detector = new CharsetDetector();
if(expected != null) {
detector.setDeclaredEncoding(expected);
}
detector.setText(inStream);
CharsetMatch match = detector.detect();
if(match != null && match.getConfidence() > 50) {// we want at least a 50% match
return match.getName();
}
else {
return expected;
}
}
private static final byte[] CHARSET_BYTES;
private static final byte[] ENCODING_BYTES;
static {
try {
CHARSET_BYTES = "charset".getBytes(ISO_8859_1);
ENCODING_BYTES = "encoding".getBytes(ISO_8859_1);
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException("Unsupported Encoding: " +
ISO_8859_1 + " (shouldn't happen)");
}
}
/**
* Looks for sequences like {@code charset="..."} inside angle brackets to
* match {@code <meta value="text/html;charset=...">} and after {@code <?}
* sequences like {@code encoding="..."} to match XML prologs.
*/
public static String findCharsetInText(final byte[] buf, final int len) {
for (int i = 0; i < len; ++i) {
if ('<' != buf[i]) { continue; }
byte lastByte = '<';
byte[] attrBytes = CHARSET_BYTES;
// Now we're inside <, so look for attrBytes.
for (int j = i + 1, n = len; j < n; ++j) {
byte b = buf[j];
if (b == 0) { continue; }
if (b == '?' && lastByte == '<') { attrBytes = ENCODING_BYTES; }
if ((b | 0x20) == attrBytes[0] && !isAlnum(lastByte)) {
int wordLen = attrBytes.length;
int pos = j + 1, k = 1;
// Match attrBytes against buf[pos:]
while (pos < n && k < wordLen) {
b = buf[pos];
if (b == 0 || b == '-') { // Skip over NULs in UTF-16 and UTF-32.
++pos;
} else if ((b | 0x20) == attrBytes[k]) {
++k;
++pos;
} else {
break;
}
}
if (k == wordLen) {
// Now we've found the attribute or parameter name.
// Skip over spaces and NULs looking for '='
while (pos < len) {
b = buf[pos];
if (b == '=') {
// Skip over spaces and NULs looking for alnum or quote.
while (++pos < len) {
b = buf[pos];
if (b == 0 || isSpace(b)) { continue; }
int start;
if (b == '"' || b == '\'') {
start = pos + 1;
} else if (isAlnum(b)) {
start = pos;
} else {
break;
}
int end = start;
boolean sawLetter = false;
// Now, find the end of the charset.
while (end < len) {
b = buf[end];
if (b == 0 || b == '-' || b == '_') {
++end;
} else if (isAlnum(b)) {
sawLetter = true;
++end;
} else {
break;
}
}
if (sawLetter) {
StringBuilder sb = new StringBuilder(end - start);
for (int bi = start; bi < end; ++bi) {
if (buf[bi] != 0) { sb.append((char) buf[bi]); }
}
// Only use the charset if it's recognized.
// Otherwise, we continue looking.
String charset = supportedCharsetName(sb.toString());
if (charset != null) { return charset; }
}
}
break;
}
if (b != 0 && !isSpace(b)) {
break;
}
++pos;
}
}
if (b == '<' || b == '>') {
i = pos - 1;
break;
}
} else if (b == '<' || b == '>') {
i = j - 1;
break;
}
lastByte = buf[j];
}
}
return null;
}
/**
* Produces a character stream from an underlying byte stream.
*
* @param buffered lookahead bytes read from tail.
* @param tail the unread portion of the stream
* @param charset the character set to use to decode the bytes in buffered and
* tail.
* @return a joined input stream.
* @throws IOException
*/
public static InputStream joinStreamsWithCharset(
byte[] buffered, InputStream tail, String charset)
throws IOException {
//return new SequenceInputStream(new ByteArrayInputStream(buffered),tail);
return new JoinedStream(buffered, tail);
}
public static boolean isAlnum(byte b) {
if (b < '0' || b > 'z') { return false; }
if (b < 'A') { return b <= '9'; }
return b >= 'a' || b <= 'Z';
}
public static boolean isSpace(byte b) {
return b <= ' '
&& (b == ' ' || b == '\r' || b == '\n' || b == '\t' || b == '\f');
}
/**
* Return the official java charset name for a string
* @param s the name of the charset
* @return the official name of the charset or null if unsupported or illegal
*/
static String supportedCharsetName(String s) {
try {
return Charset.forName(s).name();
} catch (UnsupportedCharsetException ex) {
return null;
} catch (IllegalCharsetNameException ex) {
return null;
}
}
public static final byte
_00 = (byte) 0,
_2B = (byte) 0x2b,
_2F = (byte) 0x2f,
_38 = (byte) 0x38,
_39 = (byte) 0x39,
_4C = (byte) 0x4c,
_64 = (byte) 0x64,
_76 = (byte) 0x76,
_BB = (byte) 0xbb,
_BF = (byte) 0xbf,
_EF = (byte) 0xef,
_F7 = (byte) 0xf7,
_FE = (byte) 0xfe,
_FF = (byte) 0xff;
// See http://en.wikipedia.org/wiki/Byte_order_mark for a table of byte
// sequences.
public static boolean hasUtf8BOM(byte[] b, int len) {
return len >= 3 && b[0] == _EF && b[1] == _BB && b[2] == _BF;
}
public static boolean hasUtf16BEBOM(byte[] b, int len) {
return len >= 2 && b[0] == _FE && b[1] == _FF;
}
public static boolean hasUtf16LEBOM(byte[] b, int len) {
return len >= 2 && b[0] == _FF && b[1] == _FE;
}
public static boolean hasUtf32BEBOM(byte[] b, int len) {
return len >= 4 && b[0] == _00 && b[1] == _00
&& b[2] == _FE && b[3] == _FF;
}
public static boolean hasUtf32LEBOM(byte[] b, int len) {
return len >= 4 && b[0] == _FF && b[1] == _FE
&& b[2] == _00 && b[3] == _00;
}
public static boolean hasUtf7BOM(byte[] b, int len) {
if (len < 4 || b[0] != _2B || b[1] != _2F || b[2] != _76) {
return false;
}
byte b3 = b[3];
return b3 == _38 || b3 == _39 || b3 == _2B || b3 == _2F;
}
public static boolean hasUtf1BOM(byte[] b, int len) {
return len >= 3 && b[0] == _F7 && b[1] == _64 && b[2] == _4C;
}
public static class JoinedStream extends InputStream {
byte[] buffered;
int pos;
final InputStream tail;
JoinedStream(byte[] buffered, InputStream tail) {
this.buffered = buffered;
this.tail = tail;
}
@Override
public int read() throws IOException {
if (buffered != null) {
if (pos < buffered.length) { return buffered[pos++]; }
buffered = null;
}
return tail.read();
}
@Override
public int available() throws IOException {
int avail = tail.available();
if (buffered != null) {
avail += Math.max(buffered.length - pos,0);
}
return avail;
}
@Override
public int read(byte[] out, int off, int len) throws IOException {
int nRead = 0;
if (buffered != null) {
int avail = buffered.length - pos;
if (avail != 0) {
int k = Math.min(len, avail);
int p1 = pos + k;
int p2 = off + k;
pos = p1;
while (--p2 >= off) { out[p2] = buffered[--p1]; }
off += k;
len -= k;
nRead = k;
} else {
buffered = null;
}
}
if (len == 0) { return nRead; }
int nFromTail = tail.read(out, off, len);
if (nFromTail > 0) { return nFromTail + nRead; }
return nRead != 0 ? nRead : -1;
}
@Override
public void close() throws IOException {
buffered = null;
tail.close();
}
}
public static class InputStreamAndCharset {
private String charset;
private InputStream inStream;
public InputStreamAndCharset(final InputStream inStream,
final String charset) {
this.charset = charset;
this.inStream = inStream;
}
public String getCharset() {
return charset;
}
public InputStream getInStream() {
return inStream;
}
}
}
|
|
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/* Primitive-type-only definitions (values) */
/*
* Copyright (C) 2002-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.longs;
import it.unimi.dsi.fastutil.Hash;
import it.unimi.dsi.fastutil.HashCommon;
import it.unimi.dsi.fastutil.booleans.BooleanArrays;
import static it.unimi.dsi.fastutil.HashCommon.arraySize;
import static it.unimi.dsi.fastutil.HashCommon.maxFill;
import java.util.Map;
import java.util.NoSuchElementException;
import it.unimi.dsi.fastutil.ints.IntCollection;
import it.unimi.dsi.fastutil.ints.AbstractIntCollection;
import it.unimi.dsi.fastutil.ints.IntIterator;
import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
/** A type-specific hash map with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy}
* is specified at creation time.
*
* <P>Instances of this class use a hash table to represent a map. The table is
* enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
* smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
* methods} lets you control the size of the table; this is particularly useful
* if you reuse instances of this class.
*
* <p><strong>Warning:</strong> The implementation of this class has significantly
* changed in <code>fastutil</code> 6.1.0. Please read the
* comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
*
* @see Hash
* @see HashCommon
*/
public class Long2IntOpenCustomHashMap extends AbstractLong2IntMap implements java.io.Serializable, Cloneable, Hash {
private static final long serialVersionUID = 0L;
private static final boolean ASSERTS = false;
/** The array of keys. */
protected transient long key[];
/** The array of values. */
protected transient int value[];
/** The array telling whether a position is used. */
protected transient boolean used[];
/** The acceptable load factor. */
protected final float f;
/** The current table size. */
protected transient int n;
/** Threshold after which we rehash. It must be the table size times {@link #f}. */
protected transient int maxFill;
/** The mask for wrapping a position counter. */
protected transient int mask;
/** Number of entries in the set. */
protected int size;
/** Cached set of entries. */
protected transient volatile FastEntrySet entries;
/** Cached set of keys. */
protected transient volatile LongSet keys;
/** Cached collection of values. */
protected transient volatile IntCollection values;
/** The hash strategy of this custom map. */
protected it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy;
/** Creates a new hash map.
*
* <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
*
* @param expected the expected number of elements in the hash set.
* @param f the load factor.
* @param strategy the strategy.
*/
@SuppressWarnings("unchecked")
public Long2IntOpenCustomHashMap( final int expected, final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this.strategy = strategy;
if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
this.f = f;
n = arraySize( expected, f );
mask = n - 1;
maxFill = maxFill( n, f );
key = new long[ n ];
value = new int[ n ];
used = new boolean[ n ];
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*
* @param expected the expected number of elements in the hash map.
* @param strategy the strategy.
*/
public Long2IntOpenCustomHashMap( final int expected, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( expected, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries
* and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
* @param strategy the strategy.
*/
public Long2IntOpenCustomHashMap( final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map copying a given one.
*
* @param m a {@link Map} to be copied into the new hash map.
* @param f the load factor.
* @param strategy the strategy.
*/
public Long2IntOpenCustomHashMap( final Map<? extends Long, ? extends Integer> m, final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m.size(), f, strategy );
putAll( m );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
*
* @param m a {@link Map} to be copied into the new hash map.
* @param strategy the strategy.
*/
public Long2IntOpenCustomHashMap( final Map<? extends Long, ? extends Integer> m, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map copying a given type-specific one.
*
* @param m a type-specific map to be copied into the new hash map.
* @param f the load factor.
* @param strategy the strategy.
*/
public Long2IntOpenCustomHashMap( final Long2IntMap m, final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m.size(), f, strategy );
putAll( m );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
*
* @param m a type-specific map to be copied into the new hash map.
* @param strategy the strategy.
*/
public Long2IntOpenCustomHashMap( final Long2IntMap m, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map using the elements of two parallel arrays.
*
* @param k the array of keys of the new hash map.
* @param v the array of corresponding values in the new hash map.
* @param f the load factor.
* @param strategy the strategy.
* @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
*/
public Long2IntOpenCustomHashMap( final long[] k, final int v[], final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( k.length, f, strategy );
if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
*
* @param k the array of keys of the new hash map.
* @param v the array of corresponding values in the new hash map.
* @param strategy the strategy.
* @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
*/
public Long2IntOpenCustomHashMap( final long[] k, final int v[], final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( k, v, DEFAULT_LOAD_FACTOR, strategy );
}
/** Returns the hashing strategy.
*
* @return the hashing strategy of this custom hash map.
*/
public it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy() {
return strategy;
}
/*
* The following methods implements some basic building blocks used by
* all accessors. They are (and should be maintained) identical to those used in OpenHashSet.drv.
*/
public int put(final long k, final int v) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
final int oldValue = value[ pos ];
value[ pos ] = v;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return defRetValue;
}
public Integer put( final Long ok, final Integer ov ) {
final int v = ((ov).intValue());
final long k = ((ok).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
final Integer oldValue = (Integer.valueOf(value[ pos ]));
value[ pos ] = v;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return (null);
}
/** Adds an increment to value currently associated with a key.
*
* @param k the key.
* @param incr the increment.
* @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
* @deprecated use <code>addTo()</code> instead; having the same name of a {@link java.util.Set} method turned out to be a recipe for disaster.
*/
@Deprecated
public int add(final long k, final int incr) {
return addTo( k, incr );
}
/** Adds an increment to value currently associated with a key.
*
* <P>Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when
* called with a key that does not currently appears in the map, the key
* will be associated with the default return value plus
* the given increment.
*
* @param k the key.
* @param incr the increment.
* @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
*/
public int addTo(final long k, final int incr) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
final int oldValue = value[ pos ];
value[ pos ] += incr;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = defRetValue + incr;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return defRetValue;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final int shiftKeys( int pos ) {
// Shift entries with the same hash.
int last, slot;
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( used[ pos ] ) {
slot = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(key[ pos ]) ) ) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! used[ pos ] ) break;
key[ last ] = key[ pos ];
value[ last ] = value[ pos ];
}
used[ last ] = false;
return last;
}
@SuppressWarnings("unchecked")
public int remove( final long k ) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
size--;
final int v = value[ pos ];
shiftKeys( pos );
return v;
}
pos = ( pos + 1 ) & mask;
}
return defRetValue;
}
@SuppressWarnings("unchecked")
public Integer remove( final Object ok ) {
final long k = ((((Long)(ok)).longValue()));
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
size--;
final int v = value[ pos ];
shiftKeys( pos );
return (Integer.valueOf(v));
}
pos = ( pos + 1 ) & mask;
}
return (null);
}
public Integer get( final Long ok ) {
final long k = ((ok).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode( k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), ( k) ) ) ) return (Integer.valueOf(value[ pos ]));
pos = ( pos + 1 ) & mask;
}
return (null);
}
@SuppressWarnings("unchecked")
public int get( final long k ) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return value[ pos ];
pos = ( pos + 1 ) & mask;
}
return defRetValue;
}
@SuppressWarnings("unchecked")
public boolean containsKey( final long k ) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return true;
pos = ( pos + 1 ) & mask;
}
return false;
}
public boolean containsValue( final int v ) {
final int value[] = this.value;
final boolean used[] = this.used;
for( int i = n; i-- != 0; ) if ( used[ i ] && ( (value[ i ]) == (v) ) ) return true;
return false;
}
/* Removes all elements from this map.
*
* <P>To increase object reuse, this method does not change the table size.
* If you want to reduce the table size, you must use {@link #trim()}.
*
*/
public void clear() {
if ( size == 0 ) return;
size = 0;
BooleanArrays.fill( used, false );
// We null all object entries so that the garbage collector can do its work.
}
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
/** A no-op for backward compatibility.
*
* @param growthFactor unused.
* @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
*/
@Deprecated
public void growthFactor( int growthFactor ) {}
/** Gets the growth factor (2).
*
* @return the growth factor of this set, which is fixed (2).
* @see #growthFactor(int)
* @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
*/
@Deprecated
public int growthFactor() {
return 16;
}
/** The entry class for a hash map does not record key and value, but
* rather the position in the hash table of the corresponding entry. This
* is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in
* the map */
private final class MapEntry implements Long2IntMap.Entry , Map.Entry<Long, Integer> {
// The table index this entry refers to, or -1 if this entry has been deleted.
private int index;
MapEntry( final int index ) {
this.index = index;
}
public Long getKey() {
return (Long.valueOf(key[ index ]));
}
public long getLongKey() {
return key[ index ];
}
public Integer getValue() {
return (Integer.valueOf(value[ index ]));
}
public int getIntValue() {
return value[ index ];
}
public int setValue( final int v ) {
final int oldValue = value[ index ];
value[ index ] = v;
return oldValue;
}
public Integer setValue( final Integer v ) {
return (Integer.valueOf(setValue( ((v).intValue()) )));
}
@SuppressWarnings("unchecked")
public boolean equals( final Object o ) {
if (!(o instanceof Map.Entry)) return false;
Map.Entry<Long, Integer> e = (Map.Entry<Long, Integer>)o;
return ( strategy.equals( (key[ index ]), (((e.getKey()).longValue())) ) ) && ( (value[ index ]) == (((e.getValue()).intValue())) );
}
public int hashCode() {
return ( strategy.hashCode(key[ index ]) ) ^ (value[ index ]);
}
public String toString() {
return key[ index ] + "=>" + value[ index ];
}
}
/** An iterator over a hash map. */
private class MapIterator {
/** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be
returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */
int pos = Long2IntOpenCustomHashMap.this.n;
/** The index of the last entry that has been returned. It is -1 if either
we did not return an entry yet, or the last returned entry has been removed. */
int last = -1;
/** A downward counter measuring how many entries must still be returned. */
int c = size;
/** A lazily allocated list containing the keys of elements that have wrapped around the table because of removals; such elements
would not be enumerated (other elements would be usually enumerated twice in their place). */
LongArrayList wrapped;
{
final boolean used[] = Long2IntOpenCustomHashMap.this.used;
if ( c != 0 ) while( ! used[ --pos ] );
}
public boolean hasNext() {
return c != 0;
}
public int nextEntry() {
if ( ! hasNext() ) throw new NoSuchElementException();
c--;
// We are just enumerating elements from the wrapped list.
if ( pos < 0 ) {
final long k = wrapped.getLong( - ( last = --pos ) - 2 );
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return pos;
pos = ( pos + 1 ) & mask;
}
}
last = pos;
//System.err.println( "Count: " + c );
if ( c != 0 ) {
final boolean used[] = Long2IntOpenCustomHashMap.this.used;
while ( pos-- != 0 && !used[ pos ] );
// When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
}
return last;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry. If any entry wraps around the table, instantiates
* lazily {@link #wrapped} and stores the entry key.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final int shiftKeys( int pos ) {
// Shift entries with the same hash.
int last, slot;
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( used[ pos ] ) {
slot = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(key[ pos ]) ) ) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! used[ pos ] ) break;
if ( pos < last ) {
// Wrapped entry.
if ( wrapped == null ) wrapped = new LongArrayList ();
wrapped.add( key[ pos ] );
}
key[ last ] = key[ pos ];
value[ last ] = value[ pos ];
}
used[ last ] = false;
return last;
}
@SuppressWarnings("unchecked")
public void remove() {
if ( last == -1 ) throw new IllegalStateException();
if ( pos < -1 ) {
// We're removing wrapped entries.
Long2IntOpenCustomHashMap.this.remove( wrapped.getLong( - pos - 2 ) );
last = -1;
return;
}
size--;
if ( shiftKeys( last ) == pos && c > 0 ) {
c++;
nextEntry();
}
last = -1; // You can no longer remove this entry.
if ( ASSERTS ) checkTable();
}
public int skip( final int n ) {
int i = n;
while( i-- != 0 && hasNext() ) nextEntry();
return n - i - 1;
}
}
private class EntryIterator extends MapIterator implements ObjectIterator<Long2IntMap.Entry > {
private MapEntry entry;
public Long2IntMap.Entry next() {
return entry = new MapEntry( nextEntry() );
}
@Override
public void remove() {
super.remove();
entry.index = -1; // You cannot use a deleted entry.
}
}
private class FastEntryIterator extends MapIterator implements ObjectIterator<Long2IntMap.Entry > {
final BasicEntry entry = new BasicEntry ( ((long)0), (0) );
public BasicEntry next() {
final int e = nextEntry();
entry.key = key[ e ];
entry.value = value[ e ];
return entry;
}
}
private final class MapEntrySet extends AbstractObjectSet<Long2IntMap.Entry > implements FastEntrySet {
public ObjectIterator<Long2IntMap.Entry > iterator() {
return new EntryIterator();
}
public ObjectIterator<Long2IntMap.Entry > fastIterator() {
return new FastEntryIterator();
}
@SuppressWarnings("unchecked")
public boolean contains( final Object o ) {
if ( !( o instanceof Map.Entry ) ) return false;
final Map.Entry<Long, Integer> e = (Map.Entry<Long, Integer>)o;
final long k = ((e.getKey()).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return ( (value[ pos ]) == (((e.getValue()).intValue())) );
pos = ( pos + 1 ) & mask;
}
return false;
}
@SuppressWarnings("unchecked")
public boolean remove( final Object o ) {
if ( !( o instanceof Map.Entry ) ) return false;
final Map.Entry<Long, Integer> e = (Map.Entry<Long, Integer>)o;
final long k = ((e.getKey()).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
Long2IntOpenCustomHashMap.this.remove( e.getKey() );
return true;
}
pos = ( pos + 1 ) & mask;
}
return false;
}
public int size() {
return size;
}
public void clear() {
Long2IntOpenCustomHashMap.this.clear();
}
}
public FastEntrySet long2IntEntrySet() {
if ( entries == null ) entries = new MapEntrySet();
return entries;
}
/** An iterator on keys.
*
* <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
* (and possibly their type-specific counterparts) so that they return keys
* instead of entries.
*/
private final class KeyIterator extends MapIterator implements LongIterator {
public KeyIterator() { super(); }
public long nextLong() { return key[ nextEntry() ]; }
public Long next() { return (Long.valueOf(key[ nextEntry() ])); }
}
private final class KeySet extends AbstractLongSet {
public LongIterator iterator() {
return new KeyIterator();
}
public int size() {
return size;
}
public boolean contains( long k ) {
return containsKey( k );
}
public boolean remove( long k ) {
final int oldSize = size;
Long2IntOpenCustomHashMap.this.remove( k );
return size != oldSize;
}
public void clear() {
Long2IntOpenCustomHashMap.this.clear();
}
}
public LongSet keySet() {
if ( keys == null ) keys = new KeySet();
return keys;
}
/** An iterator on values.
*
* <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
* (and possibly their type-specific counterparts) so that they return values
* instead of entries.
*/
private final class ValueIterator extends MapIterator implements IntIterator {
public ValueIterator() { super(); }
public int nextInt() { return value[ nextEntry() ]; }
public Integer next() { return (Integer.valueOf(value[ nextEntry() ])); }
}
public IntCollection values() {
if ( values == null ) values = new AbstractIntCollection () {
public IntIterator iterator() {
return new ValueIterator();
}
public int size() {
return size;
}
public boolean contains( int v ) {
return containsValue( v );
}
public void clear() {
Long2IntOpenCustomHashMap.this.clear();
}
};
return values;
}
/** A no-op for backward compatibility. The kind of tables implemented by
* this class never need rehashing.
*
* <P>If you need to reduce the table size to fit exactly
* this set, use {@link #trim()}.
*
* @return true.
* @see #trim()
* @deprecated A no-op.
*/
@Deprecated
public boolean rehash() {
return true;
}
/** Rehashes the map, making the table as small as possible.
*
* <P>This method rehashes the table to the smallest size satisfying the
* load factor. It can be used when the set will not be changed anymore, so
* to optimize access speed and size.
*
* <P>If the table size is already the minimum possible, this method
* does nothing.
*
* @return true if there was enough memory to trim the map.
* @see #trim(int)
*/
public boolean trim() {
final int l = arraySize( size, f );
if ( l >= n ) return true;
try {
rehash( l );
}
catch(OutOfMemoryError cantDoIt) { return false; }
return true;
}
/** Rehashes this map if the table is too large.
*
* <P>Let <var>N</var> be the smallest table size that can hold
* <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
* table size is smaller than or equal to <var>N</var>, this method does
* nothing. Otherwise, it rehashes this map in a table of size
* <var>N</var>.
*
* <P>This method is useful when reusing maps. {@linkplain #clear() Clearing a
* map} leaves the table size untouched. If you are reusing a map
* many times, you can call this method with a typical
* size to avoid keeping around a very large table just
* because of a few large transient maps.
*
* @param n the threshold for the trimming.
* @return true if there was enough memory to trim the map.
* @see #trim()
*/
public boolean trim( final int n ) {
final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) );
if ( this.n <= l ) return true;
try {
rehash( l );
}
catch( OutOfMemoryError cantDoIt ) { return false; }
return true;
}
/** Resizes the map.
*
* <P>This method implements the basic rehashing strategy, and may be
* overriden by subclasses implementing different rehashing strategies (e.g.,
* disk-based rehashing). However, you should not override this method
* unless you understand the internal workings of this class.
*
* @param newN the new size
*/
@SuppressWarnings("unchecked")
protected void rehash( final int newN ) {
int i = 0, pos;
final boolean used[] = this.used;
long k;
final long key[] = this.key;
final int value[] = this.value;
final int newMask = newN - 1;
final long newKey[] = new long[ newN ];
final int newValue[] = new int[newN];
final boolean newUsed[] = new boolean[ newN ];
for( int j = size; j-- != 0; ) {
while( ! used[ i ] ) i++;
k = key[ i ];
pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & newMask;
while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask;
newUsed[ pos ] = true;
newKey[ pos ] = k;
newValue[ pos ] = value[ i ];
i++;
}
n = newN;
mask = newMask;
maxFill = maxFill( n, f );
this.key = newKey;
this.value = newValue;
this.used = newUsed;
}
/** Returns a deep copy of this map.
*
* <P>This method performs a deep copy of this hash map; the data stored in the
* map, however, is not cloned. Note that this makes a difference only for object keys.
*
* @return a deep copy of this map.
*/
@SuppressWarnings("unchecked")
public Long2IntOpenCustomHashMap clone() {
Long2IntOpenCustomHashMap c;
try {
c = (Long2IntOpenCustomHashMap )super.clone();
}
catch(CloneNotSupportedException cantHappen) {
throw new InternalError();
}
c.keys = null;
c.values = null;
c.entries = null;
c.key = key.clone();
c.value = value.clone();
c.used = used.clone();
c.strategy = strategy;
return c;
}
/** Returns a hash code for this map.
*
* This method overrides the generic method provided by the superclass.
* Since <code>equals()</code> is not overriden, it is important
* that the value returned by this method is the same value as
* the one returned by the overriden method.
*
* @return a hash code for this map.
*/
public int hashCode() {
int h = 0;
for( int j = size, i = 0, t = 0; j-- != 0; ) {
while( ! used[ i ] ) i++;
t = ( strategy.hashCode(key[ i ]) );
t ^= (value[ i ]);
h += t;
i++;
}
return h;
}
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
final long key[] = this.key;
final int value[] = this.value;
final MapIterator i = new MapIterator();
s.defaultWriteObject();
for( int j = size, e; j-- != 0; ) {
e = i.nextEntry();
s.writeLong( key[ e ] );
s.writeInt( value[ e ] );
}
}
@SuppressWarnings("unchecked")
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
n = arraySize( size, f );
maxFill = maxFill( n, f );
mask = n - 1;
final long key[] = this.key = new long[ n ];
final int value[] = this.value = new int[ n ];
final boolean used[] = this.used = new boolean[ n ];
long k;
int v;
for( int i = size, pos = 0; i-- != 0; ) {
k = s.readLong();
v = s.readInt();
pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
while ( used[ pos ] ) pos = ( pos + 1 ) & mask;
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
}
if ( ASSERTS ) checkTable();
}
private void checkTable() {}
}
|
|
package org.leores.net;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.leores.net.Link.Flag;
import org.leores.util.able.NewInstanceable;
import org.leores.util.able.Processable1;
public class Node extends Element {
private static final long serialVersionUID = -834866406425497004L;
public static class Degree implements Serializable {
private static final long serialVersionUID = -5298089483314315431L;
public Integer in = 0;
public Integer out = 0;
public Integer undirected = 0;
public int all() {
return in + out + undirected;
}
public int max() {
int rtn = -1;
if (in > rtn) {
rtn = in;
}
if (out > rtn) {
rtn = out;
}
if (undirected > rtn) {
rtn = undirected;
}
return rtn;
}
}
public static class NodeLinks implements Serializable {
private static final long serialVersionUID = 3531445922204864535L;
public List<Link> in;
public List<Link> out;
public List<Link> undirected;
}
public static class NewNode implements NewInstanceable<Node> {
private static final long serialVersionUID = 5446695065782547540L;
public static Node node;
/**
* Clone is faster than the new Object construction. As the construction
* has to call a series of constructors, not only the constructor of the
* class itself but also the empty constructors of all its parent
* classes!
*
* @return
*/
public Node newInstance() {
if (node == null) {
node = new Node(null);
}
return node.newClone();
}
}
public static Integer nNode = 0;
protected Integer id2;
protected List<NodeLinks> links;
public Node() {
this(null, null, null);
}
public Node(Integer id) {
this(id, null, null);
}
public Node(Integer id, Integer id2, String info) {// id could be null here
initial(id, id2, info);
links = new ArrayList<NodeLinks>();
nNode++;
}
/**
* newClone set the new copy id to null and links to be a new ArrayList;
*
* @return
*/
public Node newClone() {
Node rtn = null;
try {
rtn = (Node) super.clone();
rtn.id = null;
rtn.links = new ArrayList<NodeLinks>();
} catch (CloneNotSupportedException e) {
log(e);
}
return rtn;
}
public boolean initial(Integer id, Integer id2, String info) {
boolean rtn = true;
this.id = id;
this.id2 = id2;
lInfo(info);
return rtn;
}
public boolean initial() {
return initial(null, null, null);
}
public void clear() {
nNode = 0;
links.clear();
}
/**
* Get the degree of the node.
*
* @param netId
* @return An Degree object.
*/
public Degree getDegree(Network net) {
Degree rtn = null;
if (net != null) {
NodeLinks nLinks = prepareNodeLinks(net.id, false);
if (nLinks != null) {
rtn = new Degree();
if (nLinks.in != null) {
rtn.in = nLinks.in.size();
}
if (nLinks.out != null) {
rtn.out = nLinks.out.size();
}
if (nLinks.undirected != null) {
rtn.undirected = nLinks.undirected.size();
}
}
}
return rtn;
}
protected NodeLinks prepareNodeLinks(Integer netId, boolean createIfNExist) {
NodeLinks rtn = null;
if (netId != null && netId >= 0) {
if (createIfNExist) {
while (links.size() <= netId) {
links.add(null);
}
NodeLinks nodeLinks = links.get(netId);
if (nodeLinks == null) {
nodeLinks = new NodeLinks();
links.set(netId, nodeLinks);
}
rtn = nodeLinks;
} else if (netId < links.size()) {
rtn = links.get(netId);
}
}
return rtn;
}
protected List<Link> prepareLinkList(NodeLinks nodeLinks, Link link, boolean createIfNExist) {
List<Link> rtn = null;
if (nodeLinks != null && link != null) {
if (!link.bDirected) {
if (this == link.from || this == link.to) {
if (createIfNExist && nodeLinks.undirected == null) {
nodeLinks.undirected = new ArrayList<Link>();
}
rtn = nodeLinks.undirected;
}
} else if (this == link.from) {
if (createIfNExist && nodeLinks.out == null) {
nodeLinks.out = new ArrayList<Link>();
}
rtn = nodeLinks.out;
} else if (this == link.to) {
if (createIfNExist && nodeLinks.in == null) {
nodeLinks.in = new ArrayList<Link>();
}
rtn = nodeLinks.in;
}
}
return rtn;
}
public Link getLink(Link link) {
Link rtn = null;
if (link != null && link.net != null) {
NodeLinks nLinks = prepareNodeLinks(link.net.id, false);
List<Link> lLink = prepareLinkList(nLinks, link, false);
if (lLink != null) {
for (int i = 0, size = lLink.size(); i < size; i++) {
Link eLink = lLink.get(i);
if (link.equals(eLink)) {
rtn = eLink;
break;
}
}
}
}
return rtn;
}
public boolean addLink(Link link) {
boolean rtn = false;
if (link != null && link.net != null && getLink(link) == null) {
NodeLinks nLinks = prepareNodeLinks(link.net.id, true);
List<Link> lLink = prepareLinkList(nLinks, link, true);
if (lLink != null) {
lLink.add(link);
rtn = true;
}
}
return rtn;
}
public boolean removeLink(Link link) {
boolean rtn = false;
if (link != null && link.net != null) {
NodeLinks nLinks = prepareNodeLinks(link.net.id, false);
List<Link> lLink = prepareLinkList(nLinks, link, false);
if (lLink != null) {
rtn = lLink.remove(link);
}
}
return rtn;
}
protected boolean removeLink(List<Link> lLink) {
boolean rtn = false;
if (lLink != null) {
for (int i = 0, size = lLink.size(); i < size; i++) {
Link link = lLink.get(i);
//Here we can not use link.net.removeLink(link) as it will remove the link of this node as well.
//It will cause null pointer error here because some of the item of lLink is removed before this loop finishes.
Node node = link.getOtherNode(this);
if (node != null) {
node.removeLink(link);
rtn = true;
}
}
}
return rtn;
}
public boolean bInNetwork(Network net) {
boolean rtn = false;
if (net != null) {
NodeLinks nLinks = prepareNodeLinks(net.id, false);
if (nLinks != null) {
rtn = true;
}
}
return rtn;
}
public boolean addNetwork(Network net) {
boolean rtn = false;
if (net != null) {
NodeLinks nLinks = prepareNodeLinks(net.id, true);
if (nLinks != null) {
rtn = true;
}
}
return rtn;
}
public boolean removeNetwork(Network net) {
boolean rtn = false;
if (net != null) {
NodeLinks nLinks = prepareNodeLinks(net.id, false);
if (nLinks != null) {
removeLink(nLinks.in);
removeLink(nLinks.out);
removeLink(nLinks.undirected);
links.set(net.id, null);
rtn = true;
}
}
return rtn;
}
public boolean changeNetworkId(Integer from, Integer to) {
boolean rtn = false;
NodeLinks nLinks = links.get(from);
if (nLinks != null) {
links.set(from, null);
while (links.size() <= to) {
links.add(null);
}
NodeLinks nLinksTo = links.get(to);
if (nLinksTo == null) {
rtn = true;
links.set(to, nLinks);
} else {
rtn = false;
}
}
return rtn;
}
protected boolean getLinks(List<Link> lTo, List<Link> lFrom, Processable1<Boolean, Link> pa1) {
boolean rtn = false;
if (lFrom != null) {
for (int i = 0, size = lFrom.size(); i < size; i++) {
Link link = lFrom.get(i);
if (pa1 == null || pa1.process(link)) {
lTo.add(link);
}
}
rtn = true;
}
return rtn;
}
public List<Link> getLinks(Network net, Processable1<Boolean, Link> pa1) {
List<Link> rtn = null;
if (net != null) {
NodeLinks nLinks = prepareNodeLinks(net.id, false);
if (nLinks != null) {
rtn = new ArrayList<Link>();
getLinks(rtn, nLinks.in, pa1);
getLinks(rtn, nLinks.out, pa1);
getLinks(rtn, nLinks.undirected, pa1);
}
}
return rtn;
}
/**
* Check whether tNode in the parameter is linked by the links in lLink.
* (tNode could be either From or To node of a link)
*
* @param lLink
* @param tNode
* @return
*/
protected boolean bLinked(List<Link> lLink, Node tNode) {
boolean rtn = false;
if (lLink != null) {
for (int i = 0, size = lLink.size(); i < size; i++) {
Link link = lLink.get(i);
Node node = link.getOtherNode(this);
if (node == tNode) {
rtn = true;
break;
}
}
}
return rtn;
}
protected boolean bLinked(NodeLinks nLinks, Node tNode, int flags) {
boolean rtn = false;
if (nLinks != null) {
if ((!rtn) && (flags & Flag.IN) > 0) {
rtn = bLinked(nLinks.in, tNode);
}
if ((!rtn) && (flags & Flag.OUT) > 0) {
rtn = bLinked(nLinks.out, tNode);
}
if ((!rtn) && (flags & Flag.UNDIRECTED) > 0) {
rtn = bLinked(nLinks.undirected, tNode);
}
}
return rtn;
}
/**
* According to flags, check whether this node has a IN/OUT/UNDIRECTED link
* from/to/with tNode in net
*
* @param net
* net=null means all networks
* @param tNode
* @param flags
* Flag.IN, Flag.OUT, Flag.UNDIRECTED
* @return
*/
public boolean bLinked(Network net, Node tNode, int flags) {
boolean rtn = false;
//flags > 0 means there is at least a IN/OUT/UNDIRECTED flag raised.
if (tNode != null && flags > 0) {
NodeLinks nLinks = null;
if (net != null) {
nLinks = prepareNodeLinks(net.id, false);
rtn = bLinked(nLinks, tNode, flags);
} else if (links != null) {
//search through all networks
for (int i = 0, size = links.size(); i < size; i++) {
nLinks = links.get(i);
rtn = bLinked(nLinks, tNode, flags);
if (rtn) {
break;
}
}
}
}
return rtn;
}
/**
* Check whether the node nDest is reachable by the current node in net.
*
* @param net
* net=null means all networks
* @param nDst
* @return
*/
public boolean bReachable(Network net, Node nDst) {
boolean rtn = bLinked(net, nDst, Flag.OUT | Flag.UNDIRECTED);
return rtn;
}
/**
* Check whether the node nSrc is reachable to the current node in net.
*
* @param net
* net=null means all networks
* @param nSrc
* @return
*/
public boolean bReachableToMe(Network net, Node nSrc) {
boolean rtn = bLinked(net, nSrc, Flag.IN | Flag.UNDIRECTED);
return rtn;
}
protected boolean getLinkedNodes(List<Node> lNode, List<Link> lLink, Processable1<Boolean, Node> pa1) {
boolean rtn = false;
if (lLink != null) {
for (int i = 0, size = lLink.size(); i < size; i++) {
Link link = lLink.get(i);
Node node = link.getOtherNode(this);
if (pa1 == null || pa1.process(node)) {
lNode.add(node);
}
}
rtn = true;
}
return rtn;
}
protected boolean getLinkedNodes(List<Node> lNode, NodeLinks nLinks, Processable1<Boolean, Node> pa1, int flags) {
boolean rtn = false;
if (nLinks != null) {
if ((flags & Flag.IN) > 0) {
getLinkedNodes(lNode, nLinks.in, pa1);
}
if ((flags & Flag.OUT) > 0) {
getLinkedNodes(lNode, nLinks.out, pa1);
}
if ((flags & Flag.UNDIRECTED) > 0) {
getLinkedNodes(lNode, nLinks.undirected, pa1);
}
rtn = true;
}
return rtn;
}
/**
* According to flags, get the nodes that this node has a IN/OUT/UNDIRECTED
* link from/to/with in net
*
* @param net
* net=null means all networks
* @param pa1
* @param flags
* Flag.IN, Flag.OUT, Flag.UNDIRECTED
* @return
*/
public List<Node> getLinkedNodes(Network net, Processable1<Boolean, Node> pa1, int flags) {
List<Node> rtn = null;
if (flags > 0) {
NodeLinks nLinks = null;
rtn = new ArrayList<Node>();
if (net != null) {
nLinks = prepareNodeLinks(net.id, false);
getLinkedNodes(rtn, nLinks, pa1, flags);
} else if (links != null) {
//search through all networks
for (int i = 0, size = links.size(); i < size; i++) {
nLinks = links.get(i);
getLinkedNodes(rtn, nLinks, pa1, flags);
}
}
}
return rtn;
}
/**
* Get the nodes that is reachable by the the current node, including out
* and undirected linked nodes, satisfying the condition that pa1 == null ||
* pa1.process(node) == true.
*
* @param net
* net==null means all networks
* @param pa1
* pa1==null means all reachable nodes.
* @return
*/
public List<Node> getReachableNodes(Network net, Processable1<Boolean, Node> pa1) {
List<Node> rtn = getLinkedNodes(net, pa1, Flag.OUT | Flag.UNDIRECTED);
return rtn;
}
/**
* Get the nodes that is reachable to the current node, including in and
* undirected linked nodes, satisfying the condition that pa1 == null ||
* pa1.process(node) == true.
*
* @param net
* net=null means all networks
* @param pa1
* pa1==null means all nodes reachable TO me.
* @return
*/
public List<Node> getReachableToMeNodes(Network net, Processable1<Boolean, Node> pa1) {
List<Node> rtn = getLinkedNodes(net, pa1, Flag.IN | Flag.UNDIRECTED);
return rtn;
}
public String toString() {
String rtn = "Node" + id;
return rtn;
}
public String toStr() {
String rtn = id + sDe + id2;
String sInfo = sInfo();
if (sInfo != null) {
rtn += sDe + sInfo;
}
return rtn;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.connectable.Connectable;
import org.apache.nifi.controller.scheduling.ScheduleState;
import org.apache.nifi.controller.scheduling.SchedulingAgent;
import org.apache.nifi.controller.service.ControllerServiceNode;
import org.apache.nifi.controller.service.ControllerServiceProvider;
import org.apache.nifi.logging.LogLevel;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.registry.VariableRegistry;
import org.apache.nifi.scheduling.ExecutionNode;
import org.apache.nifi.scheduling.SchedulingStrategy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
public abstract class ProcessorNode extends AbstractConfiguredComponent implements Connectable {
private static final Logger logger = LoggerFactory.getLogger(ProcessorNode.class);
protected final AtomicReference<ScheduledState> scheduledState;
public ProcessorNode(final String id,
final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider,
final String componentType, final String componentCanonicalClass, final VariableRegistry variableRegistry,
final ReloadComponent reloadComponent, final boolean isExtensionMissing) {
super(id, validationContextFactory, serviceProvider, componentType, componentCanonicalClass, variableRegistry, reloadComponent, isExtensionMissing);
this.scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
}
public abstract boolean isIsolated();
public abstract boolean isTriggerWhenAnyDestinationAvailable();
@Override
public abstract boolean isSideEffectFree();
public abstract boolean isTriggeredSerially();
public abstract boolean isEventDrivenSupported();
public abstract boolean isHighThroughputSupported();
public abstract Requirement getInputRequirement();
@Override
public abstract boolean isValid();
public abstract void setBulletinLevel(LogLevel bulletinLevel);
public abstract LogLevel getBulletinLevel();
public abstract Processor getProcessor();
public abstract void setProcessor(LoggableComponent<Processor> processor);
public abstract void yield(long period, TimeUnit timeUnit);
public abstract void setAutoTerminatedRelationships(Set<Relationship> relationships);
public abstract Set<Relationship> getAutoTerminatedRelationships();
public abstract void setSchedulingStrategy(SchedulingStrategy schedulingStrategy);
@Override
public abstract SchedulingStrategy getSchedulingStrategy();
public abstract void setExecutionNode(ExecutionNode executionNode);
public abstract ExecutionNode getExecutionNode();
public abstract void setRunDuration(long duration, TimeUnit timeUnit);
public abstract long getRunDuration(TimeUnit timeUnit);
public abstract Map<String, String> getStyle();
public abstract void setStyle(Map<String, String> style);
/**
* @return the number of threads (concurrent tasks) currently being used by
* this Processor
*/
public abstract int getActiveThreadCount();
/**
* Verifies that this Processor can be started if the provided set of
* services are enabled. This is introduced because we need to verify that
* all components can be started before starting any of them. In order to do
* that, we need to know that this component can be started if the given
* services are enabled, as we will then enable the given services before
* starting this component.
*
* @param ignoredReferences to ignore
*/
public abstract void verifyCanStart(Set<ControllerServiceNode> ignoredReferences);
/**
*
*/
@Override
public ScheduledState getScheduledState() {
ScheduledState sc = this.scheduledState.get();
if (sc == ScheduledState.STARTING) {
return ScheduledState.RUNNING;
} else if (sc == ScheduledState.STOPPING) {
return ScheduledState.STOPPED;
}
return sc;
}
/**
* Returns the physical state of this processor which includes transition
* states such as STOPPING and STARTING.
*
* @return the physical state of this processor [DISABLED, STOPPED, RUNNING,
* STARTIING, STOPIING]
*/
public ScheduledState getPhysicalScheduledState() {
return this.scheduledState.get();
}
/**
* Will start the {@link Processor} represented by this
* {@link ProcessorNode}. Starting processor typically means invoking its
* operation that is annotated with @OnScheduled and then executing a
* callback provided by the {@link ProcessScheduler} to which typically
* initiates
* {@link Processor#onTrigger(ProcessContext, org.apache.nifi.processor.ProcessSessionFactory)}
* cycle.
*
* @param scheduler
* implementation of {@link ScheduledExecutorService} used to
* initiate processor <i>start</i> task
* @param administrativeYieldMillis
* the amount of milliseconds to wait for administrative yield
* @param processContext
* the instance of {@link ProcessContext} and
* {@link ControllerServiceLookup}
* @param schedulingAgentCallback
* the callback provided by the {@link ProcessScheduler} to
* execute upon successful start of the Processor
*/
public abstract <T extends ProcessContext & ControllerServiceLookup> void start(ScheduledExecutorService scheduler,
long administrativeYieldMillis, T processContext, SchedulingAgentCallback schedulingAgentCallback);
/**
* Will stop the {@link Processor} represented by this {@link ProcessorNode}.
* Stopping processor typically means invoking its operation that is
* annotated with @OnUnschedule and then @OnStopped.
*
* @param scheduler
* implementation of {@link ScheduledExecutorService} used to
* initiate processor <i>stop</i> task
* @param processContext
* the instance of {@link ProcessContext} and
* {@link ControllerServiceLookup}
* @param schedulingAgent
* the SchedulingAgent that is responsible for managing the scheduling of the ProcessorNode
* @param scheduleState
* the ScheduleState that can be used to ensure that the running state (STOPPED, RUNNING, etc.)
* as well as the active thread counts are kept in sync
*/
public abstract <T extends ProcessContext & ControllerServiceLookup> void stop(ScheduledExecutorService scheduler,
T processContext, SchedulingAgent schedulingAgent, ScheduleState scheduleState);
/**
* Will set the state of the processor to STOPPED which essentially implies
* that this processor can be started. This is idempotent operation and will
* result in the WARN message if processor can not be enabled.
*/
public void enable() {
if (!this.scheduledState.compareAndSet(ScheduledState.DISABLED, ScheduledState.STOPPED)) {
logger.warn("Processor cannot be enabled because it is not disabled");
}
}
/**
* Will set the state of the processor to DISABLED which essentially implies
* that this processor can NOT be started. This is idempotent operation and
* will result in the WARN message if processor can not be enabled.
*/
public void disable() {
if (!this.scheduledState.compareAndSet(ScheduledState.STOPPED, ScheduledState.DISABLED)) {
logger.warn("Processor cannot be disabled because its state is set to " + this.scheduledState);
}
}
}
|
|
package org.apereo.cas.adaptors.jdbc;
import com.google.common.collect.Multimap;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.authentication.HandlerResult;
import org.apereo.cas.authentication.PreventedException;
import org.apereo.cas.authentication.exceptions.AccountDisabledException;
import org.apereo.cas.authentication.exceptions.AccountPasswordMustChangeException;
import org.apereo.cas.configuration.support.Beans;
import org.apereo.cas.util.CollectionUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.security.auth.login.AccountNotFoundException;
import javax.security.auth.login.FailedLoginException;
import javax.sql.DataSource;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Collections;
import static org.junit.Assert.*;
/**
* This is tests for {@link QueryDatabaseAuthenticationHandler}.
*
* @author Misagh Moayyed mmoayyed@unicon.net
* @since 4.0.0
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {RefreshAutoConfiguration.class})
@ContextConfiguration(locations = {"classpath:/jpaTestApplicationContext.xml"})
public class QueryDatabaseAuthenticationHandlerTests {
private static final String SQL = "SELECT * FROM casusers where username=?";
private static final String PASSWORD_FIELD = "password";
@Rule
public ExpectedException thrown = ExpectedException.none();
@Autowired
@Qualifier("dataSource")
private DataSource dataSource;
@Before
public void setUp() throws Exception {
final Connection c = this.dataSource.getConnection();
final Statement s = c.createStatement();
c.setAutoCommit(true);
s.execute(getSqlInsertStatementToCreateUserAccount(0, Boolean.FALSE.toString(), Boolean.FALSE.toString()));
for (int i = 0; i < 10; i++) {
s.execute(getSqlInsertStatementToCreateUserAccount(i, Boolean.FALSE.toString(), Boolean.FALSE.toString()));
}
s.execute(getSqlInsertStatementToCreateUserAccount(20, Boolean.TRUE.toString(), Boolean.FALSE.toString()));
s.execute(getSqlInsertStatementToCreateUserAccount(21, Boolean.FALSE.toString(), Boolean.TRUE.toString()));
c.close();
}
@After
public void tearDown() throws Exception {
final Connection c = this.dataSource.getConnection();
final Statement s = c.createStatement();
c.setAutoCommit(true);
for (int i = 0; i < 5; i++) {
s.execute("delete from casusers;");
}
c.close();
}
private static String getSqlInsertStatementToCreateUserAccount(final int i, final String expired, final String disabled) {
return String.format("insert into casusers (username, password, expired, disabled, phone) values('%s', '%s', '%s', '%s', '%s');",
"user" + i, "psw" + i, expired, disabled, "123456789");
}
@Entity(name = "casusers")
public static class UsersTable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column
private String username;
@Column
private String password;
@Column
private String expired;
@Column
private String disabled;
@Column
private String phone;
}
@Test
public void verifyAuthenticationFailsToFindUser() throws Exception {
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, SQL, PASSWORD_FIELD, null,
null, Collections.emptyMap());
this.thrown.expect(AccountNotFoundException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("usernotfound", "psw1"));
}
@Test
public void verifyPasswordInvalid() throws Exception {
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, SQL, PASSWORD_FIELD,
null, null, Collections.emptyMap());
this.thrown.expect(FailedLoginException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user1", "psw11"));
}
@Test
public void verifyMultipleRecords() throws Exception {
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, SQL, PASSWORD_FIELD,
null, null, Collections.emptyMap());
this.thrown.expect(FailedLoginException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user0", "psw0"));
}
@Test
public void verifyBadQuery() throws Exception {
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, SQL.replace("*", "error"),
PASSWORD_FIELD, null, null, Collections.emptyMap());
this.thrown.expect(PreventedException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user0", "psw0"));
}
@Test
public void verifySuccess() throws Exception {
final Multimap<String, String> map = Beans.transformPrincipalAttributesListIntoMultiMap(Arrays.asList("phone:phoneNumber"));
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null,
this.dataSource, SQL, PASSWORD_FIELD,
null, null,
CollectionUtils.wrap(map));
final HandlerResult result = q.authenticate(
CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user3", "psw3"));
assertNotNull(result);
assertNotNull(result.getPrincipal());
assertTrue(result.getPrincipal().getAttributes().containsKey("phoneNumber"));
}
@Test
public void verifyFindUserAndExpired() throws Exception {
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, SQL, PASSWORD_FIELD,
"expired", null, Collections.emptyMap());
this.thrown.expect(AccountPasswordMustChangeException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user20", "psw20"));
fail("Shouldn't get here");
}
@Test
public void verifyFindUserAndDisabled() throws Exception {
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, SQL, PASSWORD_FIELD,
null, "disabled", Collections.emptyMap());
this.thrown.expect(AccountDisabledException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user21", "psw21"));
fail("Shouldn't get here");
}
/**
* This test proves that in case BCRYPT is used authentication using encoded password always fail
* with FailedLoginException
*
* @throws Exception in case encoding fails
*/
@Test
public void verifyBCryptFail() throws Exception {
final BCryptPasswordEncoder encoder = new BCryptPasswordEncoder(8, new SecureRandom("secret".getBytes(StandardCharsets.UTF_8)));
final String sql = SQL.replace("*", "'" + encoder.encode("pswbc1") + "' password");
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, sql, PASSWORD_FIELD,
null, null, Collections.emptyMap());
q.setPasswordEncoder(encoder);
this.thrown.expect(FailedLoginException.class);
q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user0", "pswbc1"));
}
/**
* This test proves that in case BCRYPT and
* using raw password test can authenticate
*/
@Test
public void verifyBCryptSuccess() throws Exception {
final BCryptPasswordEncoder encoder = new BCryptPasswordEncoder(6, new SecureRandom("secret2".getBytes(StandardCharsets.UTF_8)));
final String sql = SQL.replace("*", "'" + encoder.encode("pswbc2") + "' password");
final QueryDatabaseAuthenticationHandler q = new QueryDatabaseAuthenticationHandler("", null, null, null, this.dataSource, sql, PASSWORD_FIELD,
null, null, Collections.emptyMap());
q.setPasswordEncoder(encoder);
assertNotNull(q.authenticate(CoreAuthenticationTestUtils.getCredentialsWithDifferentUsernameAndPassword("user3", "pswbc2")));
}
}
|
|
package org.jabref.logic.bibtexkeypattern;
import java.util.Optional;
import org.jabref.model.bibtexkeypattern.DatabaseBibtexKeyPattern;
import org.jabref.model.bibtexkeypattern.GlobalBibtexKeyPattern;
import org.jabref.model.database.BibDatabase;
import org.jabref.model.entry.BibEntry;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.Assert.assertEquals;
public class MakeLabelWithDatabaseTest {
private BibDatabase database;
private BibtexKeyPatternPreferences preferences;
private GlobalBibtexKeyPattern pattern;
private DatabaseBibtexKeyPattern bibtexKeyPattern;
private BibEntry entry;
@BeforeEach
public void setUp() {
database = new BibDatabase();
entry = new BibEntry();
entry.setField("author", "John Doe");
entry.setField("year", "2016");
entry.setField("title", "An awesome paper on JabRef");
database.insertEntry(entry);
pattern = GlobalBibtexKeyPattern.fromPattern("[auth][year]");
bibtexKeyPattern = new DatabaseBibtexKeyPattern(pattern);
preferences = new BibtexKeyPatternPreferences("", "", false, true, true, pattern, ',');
}
@Test
public void generateDefaultKey() {
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe2016"), entry.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyAlreadyExistsDuplicatesStartAtA() {
BibtexKeyGenerator keyGenerator = new BibtexKeyGenerator(bibtexKeyPattern, database, preferences);
keyGenerator.generateAndSetKey(entry);
BibEntry entry2 = new BibEntry();
entry2.setField("author", "John Doe");
entry2.setField("year", "2016");
keyGenerator.generateAndSetKey(entry2);
assertEquals(Optional.of("Doe2016a"), entry2.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyAlwaysLetter() {
preferences = new BibtexKeyPatternPreferences("", "", true, true, true, pattern, ',');
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe2016a"), entry.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyAlwaysLetterAlreadyExistsDuplicatesStartAtB() {
preferences = new BibtexKeyPatternPreferences("", "", true, true, true, pattern, ',');
BibtexKeyGenerator keyGenerator = new BibtexKeyGenerator(bibtexKeyPattern, database, preferences);
keyGenerator.generateAndSetKey(entry);
BibEntry entry2 = new BibEntry();
entry2.setField("author", "John Doe");
entry2.setField("year", "2016");
keyGenerator.generateAndSetKey(entry2);
assertEquals(Optional.of("Doe2016b"), entry2.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyStartDuplicatesAtB() {
preferences = new BibtexKeyPatternPreferences("", "", false, false, true, pattern, ',');
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe2016"), entry.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyAlreadyExistsDuplicatesStartAtB() {
preferences = new BibtexKeyPatternPreferences("", "", false, false, true, pattern, ',');
BibtexKeyGenerator keyGenerator = new BibtexKeyGenerator(bibtexKeyPattern, database, preferences);
keyGenerator.generateAndSetKey(entry);
BibEntry entry2 = new BibEntry();
entry2.setField("author", "John Doe");
entry2.setField("year", "2016");
keyGenerator.generateAndSetKey(entry2);
assertEquals(Optional.of("Doe2016b"), entry2.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyAlreadyExistsManyDuplicates() {
BibtexKeyGenerator keyGenerator = new BibtexKeyGenerator(bibtexKeyPattern, database, preferences);
keyGenerator.generateAndSetKey(entry);
BibEntry entry2 = new BibEntry();
entry2.setField("author", "John Doe");
entry2.setField("year", "2016");
entry2.setCiteKey(entry.getCiteKeyOptional().get());
database.insertEntry(entry2);
BibEntry entry3 = new BibEntry();
entry3.setField("author", "John Doe");
entry3.setField("year", "2016");
entry3.setCiteKey(entry.getCiteKeyOptional().get());
database.insertEntry(entry3);
keyGenerator.generateAndSetKey(entry3);
assertEquals(Optional.of("Doe2016a"), entry3.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyFirstTwoAlreadyExists() {
BibtexKeyGenerator keyGenerator = new BibtexKeyGenerator(bibtexKeyPattern, database, preferences);
keyGenerator.generateAndSetKey(entry);
BibEntry entry2 = new BibEntry();
entry2.setField("author", "John Doe");
entry2.setField("year", "2016");
keyGenerator.generateAndSetKey(entry2);
database.insertEntry(entry2);
BibEntry entry3 = new BibEntry();
entry3.setField("author", "John Doe");
entry3.setField("year", "2016");
entry3.setCiteKey(entry.getCiteKeyOptional().get());
database.insertEntry(entry3);
keyGenerator.generateAndSetKey(entry3);
assertEquals(Optional.of("Doe2016b"), entry3.getCiteKeyOptional());
}
@Test
public void generateKeyAuthLowerModified() {
bibtexKeyPattern.setDefaultValue("[auth:lower][year]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("doe2016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthUpperModified() {
bibtexKeyPattern.setDefaultValue("[auth:upper][year]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("DOE2016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthTitleCaseModified() {
bibtexKeyPattern.setDefaultValue("[auth:title_case][year]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe2016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthSentenceCaseModified() {
bibtexKeyPattern.setDefaultValue("[auth:sentence_case][year]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe2016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthCapitalizeModified() {
bibtexKeyPattern.setDefaultValue("[auth:capitalize][year]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe2016"), entry.getCiteKeyOptional());
}
@Test
public void generateDefaultKeyFixedValue() {
bibtexKeyPattern.setDefaultValue("[auth]Test[year]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("DoeTest2016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShortYear() {
bibtexKeyPattern.setDefaultValue("[shortyear]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("16"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthN() {
bibtexKeyPattern.setDefaultValue("[auth2]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Do"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthNShortName() {
bibtexKeyPattern.setDefaultValue("[auth10]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyEmptyField() {
entry = new BibEntry();
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.empty(), entry.getCiteKeyOptional());
}
@Test
public void generateKeyEmptyFieldDefaultText() {
bibtexKeyPattern.setDefaultValue("[author:(No Author Provided)]");
entry.clearField("author");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("NoAuthorProvided"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyEmptyFieldNoColonInDefaultText() {
bibtexKeyPattern.setDefaultValue("[author:(Problem:No Author Provided)]");
entry.clearField("author");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("ProblemNoAuthorProvided"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitle() {
bibtexKeyPattern.setDefaultValue("[title]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AnAwesomePaperonJabRef"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleAbbr() {
bibtexKeyPattern.setDefaultValue("[title:abbr]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AAPoJ"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitle() {
bibtexKeyPattern.setDefaultValue("[shorttitle]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("awesomepaperJabRef"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitleLowerModified() {
bibtexKeyPattern.setDefaultValue("[shorttitle:lower]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("awesomepaperjabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitleUpperModified() {
bibtexKeyPattern.setDefaultValue("[shorttitle:upper]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AWESOMEPAPERJABREF"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitleTitleCaseModified() {
bibtexKeyPattern.setDefaultValue("[shorttitle:title_case]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AwesomePaperJabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitleSentenceCaseModified() {
bibtexKeyPattern.setDefaultValue("[shorttitle:sentence_case]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Awesomepaperjabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitleCapitalizeModified() {
bibtexKeyPattern.setDefaultValue("[shorttitle:capitalize]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AwesomePaperJabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyVeryshorttitle() {
bibtexKeyPattern.setDefaultValue("[veryshorttitle]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("awesome"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyVeryshorttitleLowerModified() {
bibtexKeyPattern.setDefaultValue("[veryshorttitle:lower]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("awesome"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyVeryshorttitleUpperModified() {
bibtexKeyPattern.setDefaultValue("[veryshorttitle:upper]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AWESOME"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyVeryshorttitleTitleCaseModified() {
bibtexKeyPattern.setDefaultValue("[veryshorttitle:title_case]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Awesome"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyVeryshorttitleSentenceCaseModified() {
bibtexKeyPattern.setDefaultValue("[veryshorttitle:sentence_case]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Awesome"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyVeryshorttitleCapitalizeModified() {
bibtexKeyPattern.setDefaultValue("[veryshorttitle:capitalize]");
entry.setField("title", "An aweSOme Paper on JabRef");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Awesome"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyShorttitleINI() {
bibtexKeyPattern.setDefaultValue("[shorttitleINI]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Aap"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyCamel() {
bibtexKeyPattern.setDefaultValue("[camel]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AnAwesomePaperOnJabRef"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthNM() {
bibtexKeyPattern.setDefaultValue("[auth4_3]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Wond"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthNMLargeN() {
bibtexKeyPattern.setDefaultValue("[auth20_3]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Wonder"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthNMLargeM() {
bibtexKeyPattern.setDefaultValue("[auth2_4]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.empty(), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthNMLargeMReallyReturnsEmptyString() {
bibtexKeyPattern.setDefaultValue("[auth2_4][year]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("2016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyRegExReplace() {
preferences = new BibtexKeyPatternPreferences("2", "3", false, true, true, pattern, ',');
bibtexKeyPattern.setDefaultValue("[auth][year]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Doe3016"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthIni() {
bibtexKeyPattern.setDefaultValue("[authIni2]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("DS"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyAuthIniMany() {
bibtexKeyPattern.setDefaultValue("[authIni10]");
entry.setField("author", "John Doe and Donald Smith and Will Wonder");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("DoeSmiWon"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleRegexe() {
bibtexKeyPattern.setDefaultValue("[title:regex(\" \",\"-\")]");
entry.setField("title", "Please replace the spaces");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Please-Replace-the-Spaces"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleTitleCase() {
bibtexKeyPattern.setDefaultValue("[title:title_case]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AnAwesomePaperonJabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleCapitalize() {
bibtexKeyPattern.setDefaultValue("[title:capitalize]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AnAwesomePaperOnJabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleSentenceCase() {
bibtexKeyPattern.setDefaultValue("[title:sentence_case]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Anawesomepaperonjabref"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleTitleCaseAbbr() {
bibtexKeyPattern.setDefaultValue("[title:title_case:abbr]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AAPoJ"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleCapitalizeAbbr() {
bibtexKeyPattern.setDefaultValue("[title:capitalize:abbr]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("AAPOJ"), entry.getCiteKeyOptional());
}
@Test
public void generateKeyTitleSentenceCaseAbbr() {
bibtexKeyPattern.setDefaultValue("[title:sentence_case:abbr]");
new BibtexKeyGenerator(bibtexKeyPattern, database, preferences).generateAndSetKey(entry);
assertEquals(Optional.of("Aapoj"), entry.getCiteKeyOptional());
}
}
|
|
package org.mediterraneancoin.miner;
import static java.lang.System.arraycopy;
import java.math.BigInteger;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import javax.crypto.Mac;
import org.mediterraneancoin.miner.scrypt.SCrypt;
/**
*
* @author test
*/
public class SuperHasher {
private Mac mac;
public SuperHasher() throws GeneralSecurityException {
mac = Mac.getInstance("HmacSHA256");
}
public byte [] subarray(byte [] b, int len) {
if (b.length < len)
throw new RuntimeException("b.length < len");
byte [] result = new byte[len];
arraycopy(b, 0, result, 0, len);
return result;
}
public byte [] xor(byte [] a, byte [] b) {
if (a.length != b.length)
throw new RuntimeException("a.length != b.length");
byte [] result = new byte[a.length];
for (int i = 0; i < result.length; i++) {
result[i] = (byte)((a[i] ^ b[i]) & 0xFF);
}
return result;
}
public byte [] and(byte [] a, byte [] b) {
if (a.length != b.length)
throw new RuntimeException("a.length != b.length");
byte [] result = new byte[a.length];
for (int i = 0; i < result.length; i++) {
result[i] = (byte)((a[i] & b[i]) & 0xFF);
}
return result;
}
public void checkLen(byte [] a, int len) {
if (a.length != len)
throw new RuntimeException("checkLen: a.length != len, " + a.length + ", " + len);
}
public static int countTopmostZeroBits(byte v) {
// Count the consecutive zero bits (trailing) on the right in parallel
// http://graphics.stanford.edu/~seander/bithacks.html#ZerosOnRightLinear
if (v == 0)
return 8;
for (int x = 7; x >= 0; x--) {
if ((v & (1L << x)) != 0)
return (7-x);
}
return 8;
/*
if ((v & (1L << 7)) != 0)
return 0;
if ((v & (1L << 6)) != 0)
return 1;
*/
}
byte [] mask;
private int getMaskByte(int v) {
switch (v) {
case 8: return 0;
case 7: return 1;
case 6: return 3;
case 5: return 7;
case 4: return 15;
case 3: return 31;
case 2: return 63;
case 1: return 127;
case 0: return 255;
default: System.out.println("!!!!"); return 255;
}
}
public int countTopmostZeroBits(byte []arr) {
int result = 0;
mask = new byte[arr.length];
for (int i = arr.length-1; i >= 0; i--) {
int v = countTopmostZeroBits(arr[i]);
result += v;
if (v < 8) {
mask[i] = (byte) getMaskByte(v);
for (int h = i-1; h >= 0; h-- ) {
if (h >= 0)
mask[h] = (byte)255;
}
break;
}
}
return result;
}
public static String byteSwap(String datas) {
String newStr = "";
for (int i = 0; i < datas.length() ; i += 8) {
String s0 = datas.substring(i, i + 2);
String s1 = datas.substring(i + 2, i + 4);
String s2 = datas.substring(i + 4, i + 6);
String s3 = datas.substring(i + 6, i + 8);
newStr += s3 + s2 + s1 + s0;
}
return newStr;
}
public byte [] hash256(byte [] a) throws NoSuchAlgorithmException {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
// hash1 size: 32 bytes
byte [] hash1 = digest.digest(a);
digest.reset();
// hash2 size: 32 bytes
byte [] hash2 = digest.digest(hash1);
return hash2;
//System.out.println("hash2 len: " + hash2.length);
}
public static byte [] swap(byte [] a) {
byte [] res = new byte[a.length];
for (int i = 0; i < a.length; i++) {
res[a.length - 1 - i] = a[i];
}
return res;
}
public static boolean DEBUG = false;
public byte [] firstPartHash(byte[] header) throws GeneralSecurityException {
if (header.length != 80)
throw new RuntimeException("wrong header len! must be 80");
//
byte a, b, c, d;
a = header[75];
b = header[74];
c = header[73];
d = header[72];
int nSize;
nSize = a & 0xFF;
boolean negative = (b & 0x80) != 0;
int nWord = ((b & 0x7F) << 16) + ((c & 0xFF) << 8) + (d & 0xFF);
if (DEBUG) {
System.out.println("size=" + nSize);
System.out.println("negative=" + negative);
System.out.println("nWord=" + nWord);
}
BigInteger hashTarget = new BigInteger("" + nWord, 10);
hashTarget = hashTarget.shiftLeft( 8 * (nSize -3));
double [] dd = HasherData.getRow(nSize);
int multiplier = (int) dd[0];
int rParam = (int) dd[1];
int pParam = (int) dd[2];
//
if (DEBUG)
System.out.println("multiplier: " + multiplier + ", rParam: " + rParam + ", pParam: " + pParam);
if (header.length != 80) {
throw new RuntimeException("header.length != 80");
}
// get first 68 bytes of array, out of 80
byte [] h68 = subarray(header,68);
//
if (DEBUG) {
System.out.println("h68");
printHex2(h68);
}
//
long t1,t2;
t1 = System.currentTimeMillis();
// s76 has a size of 76 bytes
byte [] s68;
s68 = SCrypt.scryptJ(h68, h68, 1024 * multiplier, rParam, pParam, 68);
if (DEBUG) {
System.out.print("scryptJ: ");
printHex2(s68);
}
t2 = System.currentTimeMillis();
checkLen(s68, 68);
//System.out.println("hashTarget: " + hashTarget);
//System.out.print("hashTarget(binary): ");
//print( swap( hashTarget.toByteArray() ) );
int topmostZeroBits1 = countTopmostZeroBits( swap( hashTarget.toByteArray() ) );
if (DEBUG) {
System.out.println("topmostZeroBits=" + topmostZeroBits1);
System.out.println("hashTarget bit len=" + hashTarget.bitLength());
}
// max length: 512 bit
if (DEBUG)
System.out.println("***dt(scrypt1)=" + (t2-t1));
//
// s76 size: 76 bytes
s68 = xor(h68, s68);
checkLen(s68, 68);
if (DEBUG) {
System.out.print("xor: ");
printHex2(s68);
}
//
byte [] s68nonce = new byte[80];
arraycopy(s68, 0, s68nonce, 0, 68);
// keep nTime and nBits fields
arraycopy(header,68, s68nonce, 68, 8);
return s68nonce;
}
public byte [] secondPartHash(byte[] header, byte [] targetBits) throws NoSuchAlgorithmException, GeneralSecurityException {
if (header.length != 80)
throw new RuntimeException("wrong header len! must be 80. instead it is " + header.length);
//
byte a, b, c, d;
a = targetBits[3];
b = targetBits[2];
c = targetBits[1];
d = targetBits[0];
int nSize;
nSize = a & 0xFF;
boolean negative = (b & 0x80) != 0;
int nWord = ((b & 0x7F) << 16) + ((c & 0xFF) << 8) + (d & 0xFF);
if (DEBUG) {
System.out.println("size=" + nSize);
System.out.println("negative=" + negative);
System.out.println("nWord=" + nWord);
}
BigInteger hashTarget = new BigInteger("" + nWord, 10);
hashTarget = hashTarget.shiftLeft( 8 * (nSize -3));
if (DEBUG) {
System.out.println("hashTarget: " + hashTarget.toString(16));
}
double [] dd = HasherData.getRow(nSize);
int multiplier = (int) dd[0];
int rParam = (int) dd[1];
int pParam = (int) dd[2];
//
byte [] s256 = hash256(header);
//BigInteger hash = new BigInteger( swap(s256) );
BigInteger hash = new BigInteger(1, swap(s256));
if (hash.compareTo(BigInteger.ZERO) <= 0) {
//System.out.println();
System.out.print("NEG: ");
printHex2(s256);
throw new RuntimeException();
}
if (DEBUG) {
System.out.print("hash256: ");
printHex2(s256);
}
//
// this prepares also the mask byte array
int topmostZeroBits = countTopmostZeroBits(s256);
//
if (DEBUG) {
System.out.println("mask");
printHex2(mask);
System.out.println("topmostZeroBits=" + topmostZeroBits);
}
byte [] sc256 = SCrypt.scryptJ(s256, s256, 1024 * multiplier, rParam, pParam, 32);
byte [] maskedSc256 = and(sc256, mask);
//
if (DEBUG) {
System.out.println("sc256 = SCrypt.scryptJ(s256, s256, 1024, 1, 4, 32)");
printHex2(sc256);
System.out.println("verify: topmostZeroBits=" + countTopmostZeroBits(maskedSc256));
}
byte [] finalHash = xor(s256, maskedSc256 );
return finalHash;
}
public byte[] singleHash(byte[] header, int nonce) throws GeneralSecurityException {
byte a, b, c, d;
a = header[75];
b = header[74];
c = header[73];
d = header[72];
int nSize;
nSize = a & 0xFF;
boolean negative = (b & 0x80) != 0;
int nWord = ((b & 0x7F) << 16) + ((c & 0xFF) << 8) + (d & 0xFF);
if (DEBUG) {
System.out.println("size=" + nSize);
System.out.println("negative=" + negative);
System.out.println("nWord=" + nWord);
}
BigInteger hashTarget = new BigInteger("" + nWord, 10);
hashTarget = hashTarget.shiftLeft( 8 * (nSize -3));
/////////////////////////////////////////////////////////////////////
double [] dd = HasherData.getRow(nSize);
int multiplier = (int) dd[0];
int rParam = (int) dd[1];
int pParam = (int) dd[2];
if (DEBUG)
System.out.println("multiplier: " + multiplier + ", rParam: " + rParam + ", pParam: " + pParam);
if (header.length != 80) {
throw new RuntimeException("header.length != 80");
}
if (DEBUG) {
System.out.println("header");
printHex2(header);
}
// get first 68 bytes of array, out of 80
byte [] h68 = subarray(header,68);
//
if (DEBUG) {
System.out.println("h68");
printHex2(h68);
}
//
long t1,t2;
t1 = System.currentTimeMillis();
// s76 has a size of 76 bytes
byte [] s68;
s68 = SCrypt.scryptJ(h68, h68, 1024 * multiplier, rParam, pParam, 68);
if (DEBUG) {
System.out.print("scryptJ: ");
printHex2(s68);
}
t2 = System.currentTimeMillis();
checkLen(s68, 68);
int topmostZeroBits1 = countTopmostZeroBits( swap( hashTarget.toByteArray() ) );
if (DEBUG) {
System.out.println("topmostZeroBits=" + topmostZeroBits1);
System.out.println("hashTarget bit len=" + hashTarget.bitLength());
}
// max length: 512 bit
if (DEBUG)
System.out.println("***dt(scrypt1)=" + (t2-t1));
//
// s76 size: 76 bytes
s68 = xor(h68, s68);
checkLen(s68, 68);
if (DEBUG) {
System.out.print("xor: ");
printHex2(s68);
}
//
byte [] s68nonce = new byte[80];
arraycopy(s68, 0, s68nonce, 0, 68);
// keep nTime and nBits fields
arraycopy(header,68, s68nonce, 68, 12);
// nTime field, keep it
/*
s76nonce[68] = header[68];
s76nonce[69] = header[69];
s76nonce[70] = header[70];
s76nonce[71] = header[71];
*/
if (DEBUG) {
System.out.print("s68nonce: ");
printHex2(s68nonce);
}
//
//
//System.out.println("s76nonce = SCrypt.scryptJ(h76, h76, 1024, 1, 1, 76) | nonce");
//print(s76nonce);
//
byte [] s256 = null;
//boolean found = false;
t1 = System.currentTimeMillis();
if (true) {
s68nonce[76] = (byte) (nonce >> 0);
s68nonce[77] = (byte) (nonce >> 8);
s68nonce[78] = (byte) (nonce >> 16);
s68nonce[79] = (byte) (nonce >> 24);
s256 = hash256(s68nonce);
//BigInteger hash = new BigInteger( swap(s256) );
BigInteger hash = new BigInteger(1, swap(s256));
if (hash.compareTo(BigInteger.ZERO) <= 0) {
//System.out.println();
System.out.print("NEG: ");
printHex2(s256);
throw new RuntimeException();
//continue;
}
}
t2 = System.currentTimeMillis();
if (DEBUG)
System.out.println("***dt(hash256)=" + (t2-t1));
//checkLen(s256, 32);
//
//System.out.println("s256 = hash256(s76nonce)");
//print(s256);
if (DEBUG) {
System.out.print("hash256: ");
printHex2(s256);
}
//
// this prepares also the mask byte array
int topmostZeroBits = countTopmostZeroBits(s256);
//
if (DEBUG) {
System.out.println("mask");
printHex2(mask);
System.out.println("topmostZeroBits=" + topmostZeroBits);
}
t1 = System.currentTimeMillis();
byte [] sc256 = SCrypt.scryptJ(s256, s256, 1024 * multiplier, rParam, pParam, 32);
t2 = System.currentTimeMillis();
if (DEBUG)
System.out.println("***dt(scrypt2)=" + (t2-t1));
byte [] maskedSc256 = and(sc256, mask);
//
if (DEBUG) {
System.out.println("sc256 = SCrypt.scryptJ(s256, s256, 1024, 1, 4, 32)");
printHex2(sc256);
System.out.println("verify: topmostZeroBits=" + countTopmostZeroBits(maskedSc256));
}
byte [] finalHash = xor(s256, maskedSc256 );
//
//System.out.println("s256");
//print(s256);
//
if (DEBUG) {
System.out.println("maskedSc256 = and(sc256, mask)");
printHex2(maskedSc256);
//
System.out.println("finalHash = xor(s256, maskedSc256 )");
printHex2(finalHash);
//print(finalHash);
System.out.println("hashTarget");
printHex2( swap(hashTarget.toByteArray()) );
}
return finalHash;
//////////////////////////////////////////////////////////////////////
}
public static BigInteger readCompact(byte a, byte b, byte c, byte d) {
int nSize = a & 0xFF;
boolean negative = (b & 0x80) != 0;
int nWord = ((b & 0x7F) << 16) + ((c & 0xFF) << 8) + (d & 0xFF);
if (DEBUG) {
System.out.println("size=" + nSize);
System.out.println("negative=" + negative);
System.out.println("nWord=" + nWord);
}
BigInteger result = new BigInteger("" + nWord, 10);
result = result.shiftLeft( 8 * (nSize -3));
if (DEBUG)
System.out.println("result=" + result.toString(16));
return result;
}
private static void printHex2(byte [] b) {
for (int i = 0; i < b.length ; i++) {
System.out.print( ff2( Integer.toHexString(((int) b[i]) & 0xFF )) + " ");
}
System.out.println();
}
private static String ff2(String a) {
while (a.length() < 2)
a = "0" + a;
return a;
}
}
|
|
package p2;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.event.*;
/**
* This class displays a GUI for the Barbershop example, and also contains the
* startup method. It implements the Gui interface containing methods publicly
* available to be used by the Doorman, Barber and CustomerQueue classes.
*
* It should not be necessary to edit this class (but feel free to do so if you wish).
*/
public class BarbershopGui extends JFrame implements Gui, ChangeListener {
/** Various images used by the GUI */
public static Image tableImage;
public static Image deskImage;
public static Image loungeChairImage;
public static Image barberChairImage;
public static Image floorImage;
public static Image wallsImage;
public static Image barberImage;
public static Image sleepImage;
public static Image[] customerImages;
/** The text area displaying textual output to the user */
private TextArea display;
/** The panel showing the barbershop salon */
private RoomPanel roomPanel;
/** The panel containing sliders and the output area */
private JPanel controlPanel;
/** The sliders controlling the speeds of different tasks */
private JSlider barberSleepSlider, barberWorkSlider, doormanSleepSlider;
/** A reference to the doorman */
private Doorman doorman;
/** An array of references to the barbers */
private Barber barbers[];
/**
* Creates a new GUI.
* @param title The tile of the GUI window.
*/
public BarbershopGui(String title) {
super(title);
loadImages();
placeComponents();
setSize(706,427);
setResizable(false);
// Add an anonymous WindowListener which calls quit() when the window is closing
addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
quit();
}
});
setVisible(true);
}
/**
* Creates the customer queue, the doorman, and the barbers,
* and starts the simulation by starting the doorman and barber
* threads.
*/
public void startSimulation() {
CustomerQueue queue = new CustomerQueue(Constants.NOF_CHAIRS, this);
doorman = new Doorman(queue, this);
doorman.startThread();
barbers = new Barber[Constants.NOF_BARBERS];
for(int i = 0; i < Constants.NOF_BARBERS; i++) {
barbers[i] = new Barber(queue, this, i);
barbers[i].startThread();
}
}
/**
* Stops all threads and exits the program.
*/
private void quit() {
doorman.stopThread();
for(int i = 0; i < Constants.NOF_BARBERS; i++)
barbers[i].stopThread();
System.exit(0);
}
/**
* Loads an image from a file.
* @param tk The toolkit to be used to load the image.
* @param file The name of the file containing the image.
* @param tracker The media tracker tracking the progress of the load.
* @return The image that was loaded, as an Image object.
*/
private Image loadImage(Toolkit tk, String file, MediaTracker tracker) {
Image result = tk.createImage(file);
tracker.addImage(result, 0);
return result;
}
/**
* Loads all images to be used by the GUI, and waits for them to
* be fully loaded before returning.
*/
private void loadImages() {
MediaTracker tracker = new MediaTracker(this);
Toolkit tk = Toolkit.getDefaultToolkit();
wallsImage = loadImage(tk, "src/images/walls.gif", tracker);
floorImage = loadImage(tk, "src/images/floor.gif", tracker);
loungeChairImage = loadImage(tk, "src/images/loungechair.gif", tracker);
barberChairImage = loadImage(tk, "src/images/barberchair.gif", tracker);
barberImage = loadImage(tk, "src/images/barber.gif", tracker);
customerImages = new Image[Constants.NOF_CUSTOMER_LOOKS];
for(int i = 0; i < Constants.NOF_CUSTOMER_LOOKS; i++) {
customerImages[i] = loadImage(tk, "src/images/customer"+i+".gif", tracker);
}
tableImage = loadImage(tk, "src/images/table.gif", tracker);
deskImage = loadImage(tk, "src/images/desk.gif", tracker);
sleepImage = loadImage(tk, "src/images/sleep.gif", tracker);
try {
tracker.waitForID(0);
} catch (InterruptedException ie) {}
}
/**
* Creates and places all components of the GUI.
*/
private void placeComponents(){
display = new TextArea(4,30);
display.setEditable(false);
roomPanel = new RoomPanel();
controlPanel = new JPanel();
barberSleepSlider = new JSlider(Constants.MIN_BARBER_SLEEP, Constants.MAX_BARBER_SLEEP, Globals.barberSleep);
barberWorkSlider = new JSlider(Constants.MIN_BARBER_WORK, Constants.MAX_BARBER_WORK, Globals.barberWork);
doormanSleepSlider = new JSlider(Constants.MIN_DOORMAN_SLEEP, Constants.MAX_DOORMAN_SLEEP, Globals.doormanSleep);
barberSleepSlider.addChangeListener(this);
barberWorkSlider.addChangeListener(this);
doormanSleepSlider.addChangeListener(this);
controlPanel.setLayout(null);
controlPanel.add(doormanSleepSlider);
controlPanel.add(barberSleepSlider);
controlPanel.add(barberWorkSlider);
controlPanel.add(display);
addSliderLabels(controlPanel,10,10,280,20,Constants.MIN_DOORMAN_SLEEP,Constants.MAX_DOORMAN_SLEEP,"Doorman sleep time");
doormanSleepSlider.setBounds(10,30,280,20);
addSliderLabels(controlPanel,10,50,280,20,Constants.MIN_BARBER_SLEEP,Constants.MAX_BARBER_SLEEP,"Barber sleep time");
barberSleepSlider.setBounds(10,70,280,20);
addSliderLabels(controlPanel,10,90,280,20,Constants.MIN_BARBER_WORK,Constants.MAX_BARBER_WORK,"Barber work time");
barberWorkSlider.setBounds(10,110,280,20);
display.setBounds(10,150,280,240);
controlPanel.setPreferredSize(new Dimension(300,400));
Container cp = getContentPane();
cp.setLayout(new BorderLayout());
cp.add(roomPanel, BorderLayout.CENTER);
cp.add(controlPanel, BorderLayout.EAST);
}
/**
* Creates, adds and positions labels above a slider.
* @param p The panel to add the labels to.
* @param x The x position of the leftmost label.
* @param y The y position of the topmost label.
* @param w The width from leftmost label to rightmost pixel of the rightmost label
* @param h The height of the labels.
* @param minValue The value to be displayed on the left label.
* @param maxValue The value to be displayed on the right label.
* @param text The text to be displayed in the central label.
*/
private void addSliderLabels(JPanel p, int x, int y, int w, int h, int minValue, int maxValue, String text) {
JLabel left, middle, right;
left = new JLabel(""+minValue);
left.setHorizontalAlignment(JLabel.LEFT);
left.setOpaque(false);
p.add(left);
left.setBounds(x,y,w,h);
middle = new JLabel(text);
middle.setHorizontalAlignment(JLabel.CENTER);
middle.setOpaque(false);
p.add(middle);
middle.setBounds(x,y,w,h);
right = new JLabel(""+maxValue);
right.setHorizontalAlignment(JLabel.RIGHT);
right.setOpaque(false);
p.add(right);
right.setBounds(x,y,w,h);
}
/**
* Called when one of the sliders' knobs has been moved.
* @param e The ChangeEvent describing the change.
*/
public void stateChanged(ChangeEvent e) {
Globals.barberWork = barberWorkSlider.getValue();
Globals.barberSleep = barberSleepSlider.getValue();
Globals.doormanSleep = doormanSleepSlider.getValue();
}
/**
* Outputs a text string to the user.
* @param text The text to be outputted.
*/
public synchronized void println(String text) {
display.append(text+"\n");
}
/**
* Shows a customer sitting in a waiting lounge chair.
* @param pos The position of the chair.
* @param customer The customer that is sitting in that chair.
*/
public void fillLoungeChair(int pos, Customer customer) {
roomPanel.fillLoungeChair(pos, customer);
repaint();
}
/**
* Shows a waiting lounge chair as being unoccupied.
* @param pos The position of the chair.
*/
public void emptyLoungeChair(int pos) {
roomPanel.emptyLoungeChair(pos);
repaint();
}
/**
* Shows a customer sitting in a barber's chair.
* @param pos The position of the barber chair.
*/
public void fillBarberChair(int pos, Customer customer) {
roomPanel.fillBarberChair(pos, customer);
repaint();
}
/**
* Shows a barber chair as being unoccupied.
* @param pos The position of the barber chair.
*/
public void emptyBarberChair(int pos) {
roomPanel.emptyBarberChair(pos);
repaint();
}
/**
* Shows a barber sleeping.
* @param pos The position of the barber's chair.
*/
public void barberIsSleeping(int pos) {
roomPanel.setBarberSleep(pos, true);
repaint();
}
/**
* Shows a barber as being awake.
* @param pos The position of the barber's chair.
*/
public void barberIsAwake(int pos) {
roomPanel.setBarberSleep(pos, false);
repaint();
}
/**
* The startup method.
* @param args Parameters passed to the program from the command line, none expected.
*/
public static void main(String args[]) {
BarbershopGui gui = new BarbershopGui("Solution to P2");
gui.startSimulation();
}
}
|
|
package org.tensorflow.demo;
/*
* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
public class LegacyCameraConnectionFragment extends Fragment {
private Camera camera;
private static final Logger LOGGER = new Logger();
private Camera.PreviewCallback imageListener;
private Size desiredSize;
/**
* The layout identifier to inflate for this Fragment.
*/
private int layout;
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
Size[] sizes = new Size[cameraSizes.size()];
int i = 0;
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(s.height, s.width);
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView textureView;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread backgroundThread;
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
camera.startPreview();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
return i;
}
return -1; // No camera found
}
}
|
|
/*
* Copyright (C) 2014 Kalin Maldzhanski
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apptik.json.test;
import junit.framework.TestCase;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import io.apptik.json.JsonArray;
import io.apptik.json.JsonObject;
import io.apptik.json.exception.JsonException;
import static io.apptik.json.JsonNull.JSON_NULL;
/**
* This black box test was written without inspecting the non-free org.json sourcecode.
*/
@RunWith(JUnit4.class)
public class JsonArrayTest extends TestCase {
@Test
public void testEmptyArray() throws JsonException {
JsonArray array = new JsonArray();
assertEquals(0, array.length());
try {
array.get(0);
fail();
} catch (JsonException e) {
}
try {
array.getBoolean(0);
fail();
} catch (JsonException e) {
}
assertEquals("[]", array.toString());
// out of bounds is co-opted with defaulting
assertTrue(array.isNull(0));
assertNull(array.opt(0));
assertNull(array.optBoolean(0));
assertTrue(array.optBoolean(0, Boolean.TRUE));
// bogus (but documented) behaviour: returns null rather than an empty object!
assertNull(array.toJsonObject(new JsonArray()));
}
@Test
public void testEqualsAndHashCode() throws JsonException {
JsonArray a = new JsonArray();
JsonArray b = new JsonArray();
assertTrue(a.equals(b));
assertEquals("equals() not consistent with hashCode()", a.hashCode(), b.hashCode());
a.put(Boolean.TRUE);
a.put(Boolean.FALSE);
b.put(Boolean.TRUE);
b.put(Boolean.FALSE);
assertTrue(a.equals(b));
assertEquals(a.hashCode(), b.hashCode());
b.put(Boolean.TRUE);
assertFalse(a.equals(b));
assertTrue(a.hashCode() != b.hashCode());
}
@Test
public void testBooleans() throws JsonException {
JsonArray array = new JsonArray();
array.put(Boolean.TRUE);
array.put(Boolean.FALSE);
array.put(2, Boolean.FALSE);
array.put(3, Boolean.FALSE);
array.put(2, Boolean.TRUE);
assertEquals("[true,false,true,false]", array.toString());
assertEquals(4, array.length());
assertTrue(array.getBoolean(0));
assertFalse(array.getBoolean(1));
assertTrue(array.getBoolean(2));
assertFalse(array.getBoolean(3));
assertFalse(array.isNull(0));
assertFalse(array.isNull(1));
assertFalse(array.isNull(2));
assertFalse(array.isNull(3));
assertEquals(Boolean.TRUE, array.optBoolean(0));
assertEquals(Boolean.FALSE, array.optBoolean(1, Boolean.TRUE));
assertEquals(Boolean.TRUE, array.optBoolean(2, Boolean.FALSE));
assertEquals(Boolean.FALSE, array.optBoolean(3));
assertEquals("true", array.getString(0));
assertEquals("false", array.getString(1));
assertEquals("true", array.optString(2));
assertEquals("false", array.optString(3, "x"));
JsonArray other = new JsonArray();
other.put(Boolean.TRUE);
other.put(Boolean.FALSE);
other.put(Boolean.TRUE);
other.put(Boolean.FALSE);
assertTrue(array.equals(other));
other.put(Boolean.TRUE);
assertFalse(array.equals(other));
other = new JsonArray();
other.put("true");
other.put("false");
other.put("truE");
other.put("FALSE");
assertFalse(array.equals(other));
assertFalse(other.equals(array));
assertEquals(Boolean.TRUE, other.getBoolean(0));
assertEquals(Boolean.FALSE, other.optBoolean(1, Boolean.TRUE));
assertEquals(Boolean.TRUE, other.optBoolean(2));
assertEquals(Boolean.FALSE, other.getBoolean(3));
}
// http://code.google.com/p/android/issues/detail?id=16411
@Test
public void testCoerceStringToBoolean() throws JsonException {
JsonArray array = new JsonArray();
array.put("maybe");
try {
array.getBoolean(0);
fail();
} catch (JsonException expected) {
}
assertNull(array.optBoolean(0));
assertTrue(array.optBoolean(0, Boolean.TRUE));
}
@Test
public void testNulls() throws JsonException {
JsonArray array = new JsonArray();
array.put(3, JSON_NULL);
array.put(0, JSON_NULL);
assertEquals(4, array.length());
assertEquals("[null,null,null,null]", array.toString());
assertEquals(array.get(0), null);
assertEquals(array.get(1), null);
assertEquals(array.get(2), null);
assertEquals(array.get(3), null);
assertEquals(array.opt(0), null);
assertEquals(array.opt(1), null);
assertEquals(array.opt(2), null);
assertEquals(array.opt(3), null);
assertTrue(array.isNull(0));
assertTrue(array.isNull(1));
assertTrue(array.isNull(2));
assertTrue(array.isNull(3));
assertEquals("null", array.optString(0));
}
@Test
public void testParseNullDoNotYieldJsonObjectNull() throws JsonException, IOException {
JsonArray array = JsonArray.readFrom( "[\"null\",null]").asJsonArray();
array.put(null);
assertEquals(array.get(0),"null");
assertEquals(array.get(1), null);
assertEquals(array.get(1), JSON_NULL);
assertEquals(2,array.length());
assertEquals("null", array.get(0).toString());
assertEquals("null", array.get(1).toString());
}
@Test
public void testNumbers() throws JsonException {
JsonArray array = new JsonArray();
array.put(Double.MIN_VALUE);
array.put(9223372036854775806L);
array.put(Double.MAX_VALUE);
array.put(-0d);
assertEquals(4, array.length());
// toString() and getString(int) return the same values for -0d
assertEquals("[4.9E-324,9223372036854775806,1.7976931348623157E308,-0.0]", array.toString());
assertEquals(array.get(0),Double.MIN_VALUE);
assertEquals(array.get(1),9223372036854775806L);
assertEquals(array.get(2), Double.MAX_VALUE);
assertEquals(array.get(3), -0d);
assertEquals(array.getDouble(0), Double.MIN_VALUE);
assertEquals(array.getDouble(1), 9.223372036854776E18);
assertEquals(array.getDouble(2), Double.MAX_VALUE);
assertEquals(array.getDouble(3), -0d);
assertEquals(Long.valueOf(0), array.getLong(0));
assertEquals(Long.valueOf(9223372036854775806l), array.getLong(1));
assertEquals((Long)Long.MAX_VALUE, array.getLong(2));
assertEquals(Long.valueOf(0), array.getLong(3));
assertEquals((Integer)0, array.getInt(0));
assertEquals(Integer.valueOf(-2), array.getInt(1));
assertEquals((Integer)Integer.MAX_VALUE, array.getInt(2));
assertEquals(Integer.valueOf(0), array.getInt(3));
assertEquals(array.opt(0), Double.MIN_VALUE);
assertEquals(Double.MIN_VALUE, array.optDouble(0));
assertEquals(Long.valueOf(0), array.optLong(0, 1L));
assertEquals(Integer.valueOf(0), array.optInt(0, 1));
assertEquals("4.9E-324", array.getString(0));
assertEquals("9223372036854775806", array.getString(1));
assertEquals("1.7976931348623157E308", array.getString(2));
assertEquals("-0.0", array.getString(3));
JsonArray other = new JsonArray();
other.put(Double.MIN_VALUE);
other.put(9223372036854775806L);
other.put(Double.MAX_VALUE);
other.put(-0d);
assertTrue(array.equals(other));
other.put(0, 0L);
assertFalse(array.equals(other));
}
@Test
public void testStrings() throws JsonException {
JsonArray array = new JsonArray();
array.put("true");
array.put("5.5");
array.put("9223372036854775806");
array.put("null");
array.put("5\"8' tall");
assertEquals(5, array.length());
assertEquals("[\"true\",\"5.5\",\"9223372036854775806\",\"null\",\"5\\\"8' tall\"]",
array.toString());
assertEquals(array.get(0), "true");
assertEquals("null", array.getString(3));
assertEquals("5\"8' tall", array.getString(4));
assertEquals(array.opt(0), "true");
assertEquals("5.5", array.optString(1));
assertEquals("9223372036854775806", array.optString(2, null));
assertEquals("null", array.optString(3, "-1"));
assertFalse(array.isNull(0));
assertFalse(array.isNull(3));
assertEquals(Boolean.TRUE, array.getBoolean(0));
assertEquals(Boolean.TRUE, array.optBoolean(0));
assertEquals(Boolean.TRUE, array.optBoolean(0, Boolean.FALSE));
assertNull(array.optInt(0));
assertEquals(Integer.valueOf(-2), array.optInt(0, -2));
assertEquals(5.5d, array.getDouble(1));
assertEquals(Long.valueOf(5L), array.getLong(1));
assertEquals(Integer.valueOf(5), array.getInt(1));
assertEquals(Integer.valueOf(5), array.optInt(1, 3));
// The last digit of the string is a 6 but getLong returns a 7. It's probably parsing as a
// double and then converting that to a long. This is consistent with JavaScript.
assertEquals(Long.valueOf(9223372036854775807L), array.getLong(2));
assertEquals(9.223372036854776E18, array.getDouble(2));
assertEquals((Integer)Integer.MAX_VALUE, array.getInt(2));
assertFalse(array.isNull(3));
try {
array.getDouble(3);
fail();
} catch (JsonException e) {
}
assertNull(array.optDouble(3));
assertEquals(-1.0d, array.optDouble(3, -1.0d));
}
@Test
public void testToJsonObject() throws JsonException {
JsonArray keys = new JsonArray();
keys.put("a");
keys.put("b");
JsonArray values = new JsonArray();
values.put(5.5d);
values.put(Boolean.FALSE);
JsonObject object = values.toJsonObject(keys);
assertEquals(object.get("a"), 5.5d);
assertEquals(object.get("b"), Boolean.FALSE);
keys.put(0, "a");
values.put(0, 11.0d);
assertEquals(object.get("a"), 5.5d);
}
@Test
public void testToJsonObjectWithNulls() throws JsonException {
JsonArray keys = new JsonArray();
keys.put("a");
keys.put("b");
JsonArray values = new JsonArray();
values.put(5.5d);
values.put(null);
// null values are stripped!
JsonObject object = values.toJsonObject(keys);
assertEquals(1, object.length());
assertFalse(object.has("b"));
assertEquals("{\"a\":5.5}", object.toString());
}
@Test
public void testToJsonObjectMoreNamesThanValues() throws JsonException {
JsonArray keys = new JsonArray();
keys.put("a");
keys.put("b");
JsonArray values = new JsonArray();
values.put(5.5d);
JsonObject object = values.toJsonObject(keys);
assertEquals(1, object.length());
assertEquals(object.get("a"), 5.5d);
}
@Test
public void testToJsonObjectMoreValuesThanNames() throws JsonException {
JsonArray keys = new JsonArray();
keys.put("a");
JsonArray values = new JsonArray();
values.put(5.5d);
values.put(11.0d);
JsonObject object = values.toJsonObject(keys);
assertEquals(1, object.length());
assertEquals(object.get("a"), 5.5d);
}
@Test
public void testToJsonObjectNullKey() throws JsonException {
JsonArray keys = new JsonArray();
keys.put(JSON_NULL);
JsonArray values = new JsonArray();
values.put(5.5d);
JsonObject object = values.toJsonObject(keys);
assertEquals(1, object.length());
assertEquals(5.5d, object.getDouble("null"));
}
@Test
public void testPutUnsupportedNumbers() throws JsonException {
JsonArray array = new JsonArray();
try {
array.put(Double.NaN);
fail();
} catch (IllegalArgumentException e) {
}
try {
array.put(0, Double.NEGATIVE_INFINITY);
fail();
} catch (IllegalArgumentException e) {
}
try {
array.put(0, Double.POSITIVE_INFINITY);
fail();
} catch (IllegalArgumentException e) {
}
}
@Test(expected=IllegalArgumentException.class)
public void testPutUnsupportedNumbersAsObject() throws JsonException {
JsonArray array = new JsonArray();
array.put(Double.valueOf(Double.NaN));
array.put(Double.valueOf(Double.NEGATIVE_INFINITY));
array.put(Double.valueOf(Double.POSITIVE_INFINITY));
assertEquals(null, array.toString());
}
/**
* Although JsonArray constructor fails
*/
@Test(expected=IllegalArgumentException.class)
public void testCreateWithUnsupportedNumbers() throws JsonException {
JsonArray array = new JsonArray(Arrays.asList(5.5, Double.NaN));
}
@Test(expected=IllegalArgumentException.class)
public void testToStringWithUnsupportedNumbers() throws JsonException {
JsonArray array = new JsonArray(Arrays.asList(5.5, Double.NaN));
// when the array contains an unsupported number, toString fails
array.toString();
}
@Test
public void testListConstructorCopiesContents() throws JsonException {
List<Object> contents = Arrays.<Object>asList(5);
JsonArray array = new JsonArray(contents);
contents.set(0, 10);
assertEquals(array.get(0), 5);
}
@Test
public void testCreate() throws JsonException {
JsonArray array = new JsonArray(Arrays.asList(5.5, Boolean.TRUE));
assertEquals(2, array.length());
assertEquals(5.5, array.getDouble(0));
assertEquals(array.get(1), Boolean.TRUE);
assertEquals("[5.5,true]", array.toString());
}
@Test
public void testAccessOutOfBounds() throws JsonException {
JsonArray array = new JsonArray();
array.put("foo");
assertNull(array.opt(3));
assertNull(array.opt(-3));
assertNull(array.optString(3));
assertNull(array.optString(-3));
try {
array.get(3);
fail();
} catch (JsonException e) {
}
try {
array.get(-3);
fail();
} catch (JsonException e) {
}
try {
array.getString(3);
fail();
} catch (JsonException e) {
}
try {
array.getString(-3);
fail();
} catch (JsonException e) {
}
}
@Test
public void test_remove() throws Exception {
JsonArray a = new JsonArray();
assertEquals(a.remove(-1), null);
assertEquals(a.remove(0), null);
a.put("hello");
assertEquals(a.remove(-1), null);
assertEquals(a.remove(1), null);
assertEquals(a.remove(0), "hello");
assertEquals(a.remove(0), null);
}
enum MyEnum { A, B, C }
// https://code.google.com/p/android/issues/detail?id=62539
@Test
public void testEnums() throws Exception {
// This works because it's in java.* and any class in there falls back to toString.
JsonArray a1 = new JsonArray(java.lang.annotation.RetentionPolicy.values());
assertEquals("[\"SOURCE\",\"CLASS\",\"RUNTIME\"]", a1.toString());
// This should also
JsonArray a2 = new JsonArray(MyEnum.values());
assertEquals("[\"A\",\"B\",\"C\"]", a2.toString());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.rest.controller;
import java.util.List;
import java.util.Set;
import javax.persistence.EntityExistsException;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.syncope.common.to.BulkAction;
import org.apache.syncope.common.to.BulkActionRes;
import org.apache.syncope.common.to.ConnObjectTO;
import org.apache.syncope.common.to.ResourceTO;
import org.apache.syncope.common.types.AttributableType;
import org.apache.syncope.common.types.AuditElements;
import org.apache.syncope.common.types.AuditElements.Category;
import org.apache.syncope.common.types.AuditElements.ResourceSubCategory;
import org.apache.syncope.common.types.AuditElements.Result;
import org.apache.syncope.common.types.MappingPurpose;
import org.apache.syncope.common.types.SyncopeClientExceptionType;
import org.apache.syncope.common.validation.SyncopeClientCompositeErrorException;
import org.apache.syncope.common.validation.SyncopeClientException;
import org.apache.syncope.core.audit.AuditManager;
import org.apache.syncope.core.connid.ConnObjectUtil;
import org.apache.syncope.core.init.ImplementationClassNamesLoader;
import org.apache.syncope.core.persistence.beans.AbstractAttributable;
import org.apache.syncope.core.persistence.beans.ConnInstance;
import org.apache.syncope.core.persistence.beans.ExternalResource;
import org.apache.syncope.core.persistence.dao.ConnInstanceDAO;
import org.apache.syncope.core.persistence.dao.NotFoundException;
import org.apache.syncope.core.persistence.dao.ResourceDAO;
import org.apache.syncope.core.persistence.dao.RoleDAO;
import org.apache.syncope.core.persistence.dao.UserDAO;
import org.apache.syncope.core.propagation.ConnectorFactory;
import org.apache.syncope.core.propagation.Connector;
import org.apache.syncope.core.rest.data.ResourceDataBinder;
import org.apache.syncope.core.util.AttributableUtil;
import org.apache.syncope.core.util.MappingUtil;
import org.identityconnectors.framework.common.objects.Attribute;
import org.identityconnectors.framework.common.objects.AttributeUtil;
import org.identityconnectors.framework.common.objects.ConnectorObject;
import org.identityconnectors.framework.common.objects.Name;
import org.identityconnectors.framework.common.objects.ObjectClass;
import org.identityconnectors.framework.common.objects.Uid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping("/resource")
public class ResourceController extends AbstractController {
@Autowired
private AuditManager auditManager;
@Autowired
private ResourceDAO resourceDAO;
@Autowired
private ConnInstanceDAO connInstanceDAO;
@Autowired
private UserDAO userDAO;
@Autowired
private RoleDAO roleDAO;
@Autowired
private ResourceDataBinder binder;
@Autowired
private ImplementationClassNamesLoader classNamesLoader;
/**
* ConnectorObject util.
*/
@Autowired
private ConnObjectUtil connObjectUtil;
@Autowired
private ConnectorFactory connFactory;
@PreAuthorize("hasRole('RESOURCE_CREATE')")
@RequestMapping(method = RequestMethod.POST, value = "/create")
public ResourceTO create(final HttpServletResponse response, @RequestBody final ResourceTO resourceTO) {
LOG.debug("Resource creation: {}", resourceTO);
if (StringUtils.isBlank(resourceTO.getName())) {
SyncopeClientCompositeErrorException sccee =
new SyncopeClientCompositeErrorException(HttpStatus.BAD_REQUEST);
SyncopeClientException sce = new SyncopeClientException(SyncopeClientExceptionType.RequiredValuesMissing);
sce.addElement("Resource name");
sccee.addException(sce);
throw sccee;
}
if (resourceDAO.find(resourceTO.getName()) != null) {
throw new EntityExistsException("Resource '" + resourceTO.getName() + "'");
}
ExternalResource resource = resourceDAO.save(binder.create(resourceTO));
auditManager.audit(Category.resource, ResourceSubCategory.create, Result.success,
"Successfully created resource: " + resource.getName());
response.setStatus(HttpServletResponse.SC_CREATED);
return binder.getResourceTO(resource);
}
@PreAuthorize("hasRole('RESOURCE_UPDATE')")
@RequestMapping(method = RequestMethod.POST, value = "/update")
public ResourceTO update(@RequestBody final ResourceTO resourceTO) {
LOG.debug("Role update request: {}", resourceTO);
ExternalResource resource = resourceDAO.find(resourceTO.getName());
if (resource == null) {
throw new NotFoundException("Resource '" + resourceTO.getName() + "'");
}
resource = binder.update(resource, resourceTO);
resource = resourceDAO.save(resource);
auditManager.audit(Category.resource, ResourceSubCategory.update, Result.success,
"Successfully updated resource: " + resource.getName());
return binder.getResourceTO(resource);
}
@PreAuthorize("hasRole('RESOURCE_DELETE')")
@RequestMapping(method = RequestMethod.GET, value = "/delete/{resourceName}")
public ResourceTO delete(@PathVariable("resourceName") final String resourceName) {
ExternalResource resource = resourceDAO.find(resourceName);
if (resource == null) {
throw new NotFoundException("Resource '" + resourceName + "'");
}
ResourceTO resourceToDelete = binder.getResourceTO(resource);
auditManager.audit(Category.resource, ResourceSubCategory.delete, Result.success,
"Successfully deleted resource: " + resource.getName());
resourceDAO.delete(resourceName);
return resourceToDelete;
}
@PreAuthorize("hasRole('RESOURCE_READ')")
@Transactional(readOnly = true)
@RequestMapping(method = RequestMethod.GET, value = "/read/{resourceName}")
public ResourceTO read(@PathVariable("resourceName") final String resourceName) {
ExternalResource resource = resourceDAO.find(resourceName);
if (resource == null) {
throw new NotFoundException("Resource '" + resourceName + "'");
}
auditManager.audit(Category.resource, ResourceSubCategory.read, Result.success,
"Successfully read resource: " + resource.getName());
return binder.getResourceTO(resource);
}
@PreAuthorize("hasRole('RESOURCE_READ')")
@RequestMapping(method = RequestMethod.GET, value = "/propagationActionsClasses")
public ModelAndView getPropagationActionsClasses() {
Set<String> actionsClasses = classNamesLoader.getClassNames(
ImplementationClassNamesLoader.Type.PROPAGATION_ACTIONS);
auditManager.audit(Category.resource, AuditElements.ResourceSubCategory.getPropagationActionsClasses,
Result.success, "Successfully listed all PropagationActions classes: " + actionsClasses.size());
return new ModelAndView().addObject(actionsClasses);
}
@Transactional(readOnly = true)
@RequestMapping(method = RequestMethod.GET, value = "/list")
public List<ResourceTO> list(@RequestParam(required = false, value = "connInstanceId") final Long connInstanceId) {
List<ExternalResource> resources;
if (connInstanceId == null) {
resources = resourceDAO.findAll();
} else {
ConnInstance connInstance = connInstanceDAO.find(connInstanceId);
resources = connInstance.getResources();
}
List<ResourceTO> result = binder.getResourceTOs(resources);
auditManager.audit(Category.resource, ResourceSubCategory.list, Result.success,
connInstanceId == null
? "Successfully listed all resources: " + result.size()
: "Successfully listed resources for connector " + connInstanceId + ": " + result.size());
return result;
}
@PreAuthorize("hasRole('RESOURCE_GETCONNECTOROBJECT')")
@Transactional(readOnly = true)
@RequestMapping(method = RequestMethod.GET, value = "/{resourceName}/read/{type}/{id}")
public ConnObjectTO getConnectorObject(@PathVariable("resourceName") final String resourceName,
@PathVariable("type") final AttributableType type, @PathVariable("id") final Long id) {
ExternalResource resource = resourceDAO.find(resourceName);
if (resource == null) {
throw new NotFoundException("Resource '" + resourceName + "'");
}
AbstractAttributable attributable = null;
switch (type) {
case USER:
attributable = userDAO.find(id);
break;
case ROLE:
attributable = roleDAO.find(id);
break;
case MEMBERSHIP:
default:
throw new IllegalArgumentException("Not supported for MEMBERSHIP");
}
if (attributable == null) {
throw new NotFoundException(type + " '" + id + "'");
}
final AttributableUtil attrUtil = AttributableUtil.getInstance(type);
final String accountIdValue =
MappingUtil.getAccountIdValue(attributable, resource, attrUtil.getAccountIdItem(resource));
final ObjectClass objectClass = AttributableType.USER == type ? ObjectClass.ACCOUNT : ObjectClass.GROUP;
final Connector connector = connFactory.getConnector(resource);
final ConnectorObject connectorObject = connector.getObject(objectClass, new Uid(accountIdValue),
connector.getOperationOptions(attrUtil.getMappingItems(resource, MappingPurpose.BOTH)));
if (connectorObject == null) {
throw new NotFoundException("Object " + accountIdValue + " with class " + objectClass
+ "not found on resource " + resourceName);
}
final Set<Attribute> attributes = connectorObject.getAttributes();
if (AttributeUtil.find(Uid.NAME, attributes) == null) {
attributes.add(connectorObject.getUid());
}
if (AttributeUtil.find(Name.NAME, attributes) == null) {
attributes.add(connectorObject.getName());
}
auditManager.audit(Category.resource, ResourceSubCategory.getObject, Result.success,
"Successfully read object " + accountIdValue + " with class " + objectClass
+ " from resource " + resourceName);
return connObjectUtil.getConnObjectTO(connectorObject);
}
@PreAuthorize("hasRole('CONNECTOR_READ')")
@RequestMapping(method = RequestMethod.POST, value = "/check")
@Transactional(readOnly = true)
public ModelAndView check(@RequestBody final ResourceTO resourceTO) {
final ConnInstance connInstance = binder.getConnInstance(resourceTO);
final Connector connector = connFactory.createConnector(connInstance, connInstance.getConfiguration());
boolean result;
try {
connector.test();
result = true;
auditManager.audit(Category.connector, AuditElements.ConnectorSubCategory.check, Result.success,
"Successfully checked connector: " + resourceTO);
} catch (Exception e) {
auditManager.audit(Category.connector, AuditElements.ConnectorSubCategory.check, Result.failure,
"Unsuccessful check for connector: " + resourceTO, e);
LOG.error("Test connection failure {}", e);
result = false;
}
return new ModelAndView().addObject(result);
}
@PreAuthorize("hasRole('RESOURCE_DELETE') and #bulkAction.operation == #bulkAction.operation.DELETE")
@RequestMapping(method = RequestMethod.POST, value = "/bulk")
public BulkActionRes bulkAction(@RequestBody final BulkAction bulkAction) {
LOG.debug("Bulk action '{}' called on '{}'", bulkAction.getOperation(), bulkAction.getTargets());
BulkActionRes res = new BulkActionRes();
switch (bulkAction.getOperation()) {
case DELETE:
for (String name : bulkAction.getTargets()) {
try {
res.add(delete(name).getName(), BulkActionRes.Status.SUCCESS);
} catch (Exception e) {
LOG.error("Error performing delete for resource {}", name, e);
res.add(name, BulkActionRes.Status.FAILURE);
}
}
break;
default:
}
return res;
}
}
|
|
package com.cloud.server;
import com.cloud.agent.AgentManager;
import com.cloud.cluster.ManagementServerHostVO;
import com.cloud.cluster.dao.ManagementServerHostDao;
import com.cloud.engine.subsystem.api.storage.DataStore;
import com.cloud.engine.subsystem.api.storage.DataStoreManager;
import com.cloud.engine.subsystem.api.storage.EndPoint;
import com.cloud.engine.subsystem.api.storage.EndPointSelector;
import com.cloud.framework.config.dao.ConfigurationDao;
import com.cloud.gpu.dao.HostGpuGroupsDao;
import com.cloud.host.HostVO;
import com.cloud.host.dao.HostDao;
import com.cloud.legacymodel.communication.answer.Answer;
import com.cloud.legacymodel.communication.command.GetStorageStatsCommand;
import com.cloud.legacymodel.dc.HostStats;
import com.cloud.legacymodel.dc.HostStatsEntry;
import com.cloud.legacymodel.dc.HostStatus;
import com.cloud.legacymodel.exceptions.StorageUnavailableException;
import com.cloud.legacymodel.resource.ResourceState;
import com.cloud.legacymodel.storage.StorageStats;
import com.cloud.legacymodel.storage.VmDiskStatsEntry;
import com.cloud.legacymodel.storage.VolumeStats;
import com.cloud.legacymodel.vm.VgpuTypesInfo;
import com.cloud.legacymodel.vm.VmStats;
import com.cloud.legacymodel.vm.VmStatsEntry;
import com.cloud.common.managed.context.ManagedContextRunnable;
import com.cloud.model.enumeration.HostType;
import com.cloud.model.enumeration.HypervisorType;
import com.cloud.model.enumeration.VirtualMachineType;
import com.cloud.resource.ResourceManager;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.StorageManager;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.datastore.db.ImageStoreDao;
import com.cloud.storage.datastore.db.PrimaryDataStoreDao;
import com.cloud.storage.datastore.db.StoragePoolVO;
import com.cloud.user.VmDiskStatisticsVO;
import com.cloud.user.dao.VmDiskStatisticsDao;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.component.ComponentMethodInterceptable;
import com.cloud.utils.component.ManagerBase;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.db.TransactionCallbackNoReturn;
import com.cloud.utils.db.TransactionStatus;
import com.cloud.utils.graphite.GraphiteClient;
import com.cloud.utils.graphite.GraphiteException;
import com.cloud.utils.net.MacAddress;
import com.cloud.utils.usage.UsageUtils;
import com.cloud.vm.UserVmManager;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.VMInstanceDao;
import javax.inject.Inject;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Provides real time stats for various agent resources up to x seconds
*/
@Component
public class StatsCollector extends ManagerBase implements ComponentMethodInterceptable {
public static final Logger s_logger = LoggerFactory.getLogger(StatsCollector.class.getName());
private static final int ACQUIRE_GLOBAL_LOCK_TIMEOUT_FOR_COOPERATION = 5; // 5 seconds
private static StatsCollector s_instance = null;
private final ConcurrentHashMap<Long, VmStats> _VmStats = new ConcurrentHashMap<>();
private final ConcurrentHashMap<Long, VolumeStats> _volumeStats = new ConcurrentHashMap<>();
private final double _imageStoreCapacityThreshold = 0.90;
private final long mgmtSrvrId = MacAddress.getMacAddress().toLong();
long hostStatsInterval = -1L;
long hostAndVmStatsInterval = -1L;
long storageStatsInterval = -1L;
long volumeStatsInterval = -1L;
int vmDiskStatsInterval = 0;
List<Long> hostIds = null;
String externalStatsPrefix = "";
String externalStatsHost = null;
int externalStatsPort = -1;
boolean externalStatsEnabled = false;
ExternalStatsProtocol externalStatsType = ExternalStatsProtocol.NONE;
private ScheduledExecutorService _executor = null;
@Inject
private AgentManager _agentMgr;
@Inject
private UserVmManager _userVmMgr;
@Inject
private HostDao _hostDao;
@Inject
private UserVmDao _userVmDao;
@Inject
private VolumeDao _volsDao;
@Inject
private PrimaryDataStoreDao _storagePoolDao;
@Inject
private ImageStoreDao _imageStoreDao;
@Inject
private StorageManager _storageManager;
@Inject
private StoragePoolHostDao _storagePoolHostDao;
@Inject
private DataStoreManager _dataStoreMgr;
@Inject
private ResourceManager _resourceMgr;
@Inject
private ConfigurationDao _configDao;
@Inject
private EndPointSelector _epSelector;
@Inject
private VmDiskStatisticsDao _vmDiskStatsDao;
@Inject
private ManagementServerHostDao _msHostDao;
@Inject
private VMInstanceDao _vmInstance;
@Inject
private ServiceOfferingDao _serviceOfferingDao;
@Inject
private HostGpuGroupsDao _hostGpuGroupsDao;
private ConcurrentHashMap<Long, HostStats> _hostStats = new ConcurrentHashMap<>();
private ConcurrentHashMap<Long, StorageStats> _storageStats = new ConcurrentHashMap<>();
private ConcurrentHashMap<Long, StorageStats> _storagePoolStats = new ConcurrentHashMap<>();
private ScheduledExecutorService _diskStatsUpdateExecutor;
private int _usageAggregationRange = 1440;
private String _usageTimeZone = "GMT";
private boolean _dailyOrHourly = false;
public StatsCollector() {
s_instance = this;
}
//private final GlobalLock m_capacityCheckLock = GlobalLock.getInternLock("capacity.check");
public static StatsCollector getInstance() {
return s_instance;
}
public static StatsCollector getInstance(final Map<String, String> configs) {
s_instance.init(configs);
return s_instance;
}
private void init(final Map<String, String> configs) {
_executor = Executors.newScheduledThreadPool(4, new NamedThreadFactory("StatsCollector"));
hostStatsInterval = NumbersUtil.parseLong(configs.get("host.stats.interval"), 60000L);
hostAndVmStatsInterval = NumbersUtil.parseLong(configs.get("vm.stats.interval"), 60000L);
storageStatsInterval = NumbersUtil.parseLong(configs.get("storage.stats.interval"), 60000L);
volumeStatsInterval = NumbersUtil.parseLong(configs.get("volume.stats.interval"), -1L);
vmDiskStatsInterval = NumbersUtil.parseInt(configs.get("vm.disk.stats.interval"), 0);
/* URI to send statistics to. Currently only Graphite is supported */
final String externalStatsUri = configs.get("stats.output.uri");
if (externalStatsUri != null && !externalStatsUri.equals("")) {
try {
final URI uri = new URI(externalStatsUri);
final String scheme = uri.getScheme();
try {
externalStatsType = ExternalStatsProtocol.valueOf(scheme.toUpperCase());
} catch (final IllegalArgumentException e) {
s_logger.info(scheme + " is not a valid protocol for external statistics. No statistics will be send.");
}
externalStatsHost = uri.getHost();
externalStatsPort = uri.getPort();
externalStatsPrefix = uri.getPath().substring(1);
/* Append a dot (.) to the prefix if it is set */
if (externalStatsPrefix != null && !externalStatsPrefix.equals("")) {
externalStatsPrefix += ".";
} else {
externalStatsPrefix = "";
}
externalStatsEnabled = true;
} catch (final URISyntaxException e) {
s_logger.debug("Failed to parse external statistics URI: " + e.getMessage());
}
}
if (hostStatsInterval > 0) {
_executor.scheduleWithFixedDelay(new HostCollector(), 15000L, hostStatsInterval, TimeUnit.MILLISECONDS);
}
if (hostAndVmStatsInterval > 0) {
_executor.scheduleWithFixedDelay(new VmStatsCollector(), 15000L, hostAndVmStatsInterval, TimeUnit.MILLISECONDS);
}
if (storageStatsInterval > 0) {
_executor.scheduleWithFixedDelay(new StorageCollector(), 15000L, storageStatsInterval, TimeUnit.MILLISECONDS);
}
if (vmDiskStatsInterval > 0) {
if (vmDiskStatsInterval < 300) {
vmDiskStatsInterval = 300;
}
_executor.scheduleAtFixedRate(new VmDiskStatsTask(), vmDiskStatsInterval, vmDiskStatsInterval, TimeUnit.SECONDS);
}
//Schedule disk stats update task
_diskStatsUpdateExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("DiskStatsUpdater"));
final String aggregationRange = configs.get("usage.stats.job.aggregation.range");
_usageAggregationRange = NumbersUtil.parseInt(aggregationRange, 1440);
_usageTimeZone = configs.get("usage.aggregation.timezone");
if (_usageTimeZone == null) {
_usageTimeZone = "GMT";
}
final TimeZone usageTimezone = TimeZone.getTimeZone(_usageTimeZone);
final Calendar cal = Calendar.getInstance(usageTimezone);
cal.setTime(new Date());
final long endDate;
final int HOURLY_TIME = 60;
final int DAILY_TIME = 60 * 24;
if (_usageAggregationRange == DAILY_TIME) {
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.roll(Calendar.DAY_OF_YEAR, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
_dailyOrHourly = true;
} else if (_usageAggregationRange == HOURLY_TIME) {
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.roll(Calendar.HOUR_OF_DAY, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
_dailyOrHourly = true;
} else {
endDate = cal.getTime().getTime();
_dailyOrHourly = false;
}
if (_usageAggregationRange < UsageUtils.USAGE_AGGREGATION_RANGE_MIN) {
s_logger.warn("Usage stats job aggregation range is to small, using the minimum value of " + UsageUtils.USAGE_AGGREGATION_RANGE_MIN);
_usageAggregationRange = UsageUtils.USAGE_AGGREGATION_RANGE_MIN;
}
_diskStatsUpdateExecutor.scheduleAtFixedRate(new VmDiskStatsUpdaterTask(), (endDate - System.currentTimeMillis()), (_usageAggregationRange * 60 * 1000),
TimeUnit.MILLISECONDS);
}
@Override
public boolean start() {
init(_configDao.getConfiguration());
return true;
}
public VmStats getVmStats(final long id) {
return _VmStats.get(id);
}
public boolean imageStoreHasEnoughCapacity(final DataStore imageStore) {
final StorageStats imageStoreStats = _storageStats.get(imageStore.getId());
if (imageStoreStats != null && (imageStoreStats.getByteUsed() / (imageStoreStats.getCapacityBytes() * 1.0)) <= _imageStoreCapacityThreshold) {
return true;
}
return false;
}
public StorageStats getStorageStats(final long id) {
return _storageStats.get(id);
}
public HostStats getHostStats(final long hostId) {
return _hostStats.get(hostId);
}
public StorageStats getStoragePoolStats(final long id) {
return _storagePoolStats.get(id);
}
public enum ExternalStatsProtocol {
NONE("none"), GRAPHITE("graphite");
String _type;
ExternalStatsProtocol(final String type) {
_type = type;
}
@Override
public String toString() {
return _type;
}
}
class HostCollector extends ManagedContextRunnable {
@Override
protected void runInContext() {
s_logger.debug("HostStatsCollector is running...");
final SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
sc.addAnd("status", SearchCriteria.Op.EQ, HostStatus.Up.toString());
sc.addAnd("resourceState", SearchCriteria.Op.NIN, ResourceState.Maintenance, ResourceState.PrepareForMaintenance, ResourceState.ErrorInMaintenance);
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.Storage.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.ConsoleProxy.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.SecondaryStorage.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.LocalSecondaryStorage.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.TrafficMonitor.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.SecondaryStorageVM.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.ExternalLoadBalancer.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.L2Networking.toString());
final ConcurrentHashMap<Long, HostStats> hostStats = new ConcurrentHashMap<>();
final List<HostVO> hosts = _hostDao.search(sc, null);
for (final HostVO host : hosts) {
final HostStatsEntry stats = (HostStatsEntry) _resourceMgr.getHostStatistics(host.getId());
if (stats != null) {
hostStats.put(host.getId(), stats);
} else {
s_logger.warn("Received invalid host stats for host: " + host.getId());
}
}
_hostStats = hostStats;
// Get a subset of hosts with GPU support from the list of "hosts"
List<HostVO> gpuEnabledHosts = new ArrayList<>();
if (hostIds != null) {
for (final HostVO host : hosts) {
if (hostIds.contains(host.getId())) {
gpuEnabledHosts.add(host);
}
}
} else {
// Check for all the hosts managed by CloudStack.
gpuEnabledHosts = hosts;
}
for (final HostVO host : gpuEnabledHosts) {
final HashMap<String, HashMap<String, VgpuTypesInfo>> groupDetails = _resourceMgr.getGPUStatistics(host);
if (groupDetails != null) {
_resourceMgr.updateGPUDetails(host.getId(), groupDetails);
}
}
hostIds = _hostGpuGroupsDao.listHostIds();
}
}
class VmStatsCollector extends ManagedContextRunnable {
@Override
protected void runInContext() {
s_logger.debug("VmStatsCollector is running...");
final SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
sc.addAnd("status", SearchCriteria.Op.EQ, HostStatus.Up.toString());
sc.addAnd("resourceState", SearchCriteria.Op.NIN, ResourceState.Maintenance, ResourceState.PrepareForMaintenance, ResourceState.ErrorInMaintenance);
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.Storage.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.ConsoleProxy.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.SecondaryStorage.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.LocalSecondaryStorage.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.TrafficMonitor.toString());
sc.addAnd("type", SearchCriteria.Op.NEQ, HostType.SecondaryStorageVM.toString());
final List<HostVO> hosts = _hostDao.search(sc, null);
/* HashMap for metrics to be send to Graphite */
final HashMap metrics = new HashMap<>();
for (final HostVO host : hosts) {
final List<UserVmVO> vms = _userVmDao.listRunningByHostId(host.getId());
final List<Long> vmIds = new ArrayList<>();
for (final UserVmVO vm : vms) {
vmIds.add(vm.getId());
}
final HashMap<Long, VmStatsEntry> vmStatsById = _userVmMgr.getVirtualMachineStatistics(host.getId(), host.getName(), vmIds);
if (vmStatsById != null) {
VmStatsEntry statsInMemory;
final Set<Long> vmIdSet = vmStatsById.keySet();
for (final Long vmId : vmIdSet) {
final VmStatsEntry statsForCurrentIteration = vmStatsById.get(vmId);
statsInMemory = (VmStatsEntry) _VmStats.get(vmId);
if (statsInMemory == null) {
//no stats exist for this vm, directly persist
_VmStats.put(vmId, statsForCurrentIteration);
} else {
//update each field
statsInMemory.setCPUUtilization(statsForCurrentIteration.getCPUUtilization());
statsInMemory.setNumCPUs(statsForCurrentIteration.getNumCPUs());
statsInMemory.setNetworkReadKBs(statsInMemory.getNetworkReadKBs() + statsForCurrentIteration.getNetworkReadKBs());
statsInMemory.setNetworkWriteKBs(statsInMemory.getNetworkWriteKBs() + statsForCurrentIteration.getNetworkWriteKBs());
statsInMemory.setDiskWriteKBs(statsInMemory.getDiskWriteKBs() + statsForCurrentIteration.getDiskWriteKBs());
statsInMemory.setDiskReadIOs(statsInMemory.getDiskReadIOs() + statsForCurrentIteration.getDiskReadIOs());
statsInMemory.setDiskWriteIOs(statsInMemory.getDiskWriteIOs() + statsForCurrentIteration.getDiskWriteIOs());
statsInMemory.setDiskReadKBs(statsInMemory.getDiskReadKBs() + statsForCurrentIteration.getDiskReadKBs());
_VmStats.put(vmId, statsInMemory);
}
/**
* Add statistics to HashMap only when they should be send to a external stats collector
* Performance wise it seems best to only append to the HashMap when needed
*/
if (externalStatsEnabled) {
final VMInstanceVO vmVO = _vmInstance.findById(vmId);
final String vmName = vmVO.getUuid();
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".cpu.num", statsForCurrentIteration.getNumCPUs());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".cpu.utilization", statsForCurrentIteration.getCPUUtilization());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".network.read_kbs", statsForCurrentIteration.getNetworkReadKBs());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".network.write_kbs", statsForCurrentIteration.getNetworkWriteKBs());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".disk.write_kbs", statsForCurrentIteration.getDiskWriteKBs());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".disk.read_kbs", statsForCurrentIteration.getDiskReadKBs());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".disk.write_iops", statsForCurrentIteration.getDiskWriteIOs());
metrics.put(externalStatsPrefix + "cloudstack.stats.instances." + vmName + ".disk.read_iops", statsForCurrentIteration.getDiskReadIOs());
}
}
/**
* Send the metrics to a external stats collector
* We send it on a per-host basis to prevent that we flood the host
* Currently only Graphite is supported
*/
if (!metrics.isEmpty()) {
if (externalStatsType != null && externalStatsType == ExternalStatsProtocol.GRAPHITE) {
if (externalStatsPort == -1) {
externalStatsPort = 2003;
}
s_logger.debug("Sending VmStats of host " + host.getId() + " to Graphite host " + externalStatsHost + ":" + externalStatsPort);
try {
final GraphiteClient g = new GraphiteClient(externalStatsHost, externalStatsPort);
g.sendMetrics(metrics);
} catch (final GraphiteException e) {
s_logger.debug("Failed sending VmStats to Graphite host " + externalStatsHost + ":" + externalStatsPort + ": " + e.getMessage());
}
metrics.clear();
}
}
}
}
}
}
class VmDiskStatsUpdaterTask extends ManagedContextRunnable {
@Override
protected void runInContext() {
final GlobalLock scanLock = GlobalLock.getInternLock("vm.disk.stats");
try {
if (scanLock.lock(ACQUIRE_GLOBAL_LOCK_TIMEOUT_FOR_COOPERATION)) {
//Check for ownership
//msHost in UP state with min id should run the job
final ManagementServerHostVO msHost = _msHostDao.findOneInUpState(new Filter(ManagementServerHostVO.class, "id", true, 0L, 1L));
if (msHost == null || (msHost.getMsid() != mgmtSrvrId)) {
s_logger.debug("Skipping aggregate disk stats update");
scanLock.unlock();
return;
}
try {
Transaction.execute(new TransactionCallbackNoReturn() {
@Override
public void doInTransactionWithoutResult(final TransactionStatus status) {
//get all stats with delta > 0
final List<VmDiskStatisticsVO> updatedVmNetStats = _vmDiskStatsDao.listUpdatedStats();
for (final VmDiskStatisticsVO stat : updatedVmNetStats) {
if (_dailyOrHourly) {
//update agg bytes
stat.setAggBytesRead(stat.getCurrentBytesRead() + stat.getNetBytesRead());
stat.setAggBytesWrite(stat.getCurrentBytesWrite() + stat.getNetBytesWrite());
stat.setAggIORead(stat.getCurrentIORead() + stat.getNetIORead());
stat.setAggIOWrite(stat.getCurrentIOWrite() + stat.getNetIOWrite());
_vmDiskStatsDao.update(stat.getId(), stat);
}
}
s_logger.debug("Successfully updated aggregate vm disk stats");
}
});
} catch (final Exception e) {
s_logger.debug("Failed to update aggregate disk stats", e);
} finally {
scanLock.unlock();
}
}
} catch (final Exception e) {
s_logger.debug("Exception while trying to acquire disk stats lock", e);
} finally {
scanLock.releaseRef();
}
}
}
class VmDiskStatsTask extends ManagedContextRunnable {
@Override
protected void runInContext() {
// collect the vm disk statistics(total) from hypervisor. added by weizhou, 2013.03.
try {
Transaction.execute(new TransactionCallbackNoReturn() {
@Override
public void doInTransactionWithoutResult(final TransactionStatus status) {
final SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
sc.addAnd("status", SearchCriteria.Op.EQ, HostStatus.Up.toString());
sc.addAnd("resourceState", SearchCriteria.Op.NIN, ResourceState.Maintenance, ResourceState.PrepareForMaintenance,
ResourceState.ErrorInMaintenance);
sc.addAnd("type", SearchCriteria.Op.EQ, HostType.Routing.toString());
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, HypervisorType.KVM); // support KVM only util 2013.06.25
final List<HostVO> hosts = _hostDao.search(sc, null);
for (final HostVO host : hosts) {
final List<UserVmVO> vms = _userVmDao.listRunningByHostId(host.getId());
final List<Long> vmIds = new ArrayList<>();
for (final UserVmVO vm : vms) {
if (vm.getType() == VirtualMachineType.User) // user vm
{
vmIds.add(vm.getId());
}
}
final HashMap<Long, List<VmDiskStatsEntry>> vmDiskStatsById = _userVmMgr.getVmDiskStatistics(host.getId(), host.getName(), vmIds);
if (vmDiskStatsById == null) {
continue;
}
final Set<Long> vmIdSet = vmDiskStatsById.keySet();
for (final Long vmId : vmIdSet) {
final List<VmDiskStatsEntry> vmDiskStats = vmDiskStatsById.get(vmId);
if (vmDiskStats == null) {
continue;
}
final UserVmVO userVm = _userVmDao.findById(vmId);
for (final VmDiskStatsEntry vmDiskStat : vmDiskStats) {
final SearchCriteria<VolumeVO> sc_volume = _volsDao.createSearchCriteria();
sc_volume.addAnd("path", SearchCriteria.Op.EQ, vmDiskStat.getPath());
final List<VolumeVO> volumes = _volsDao.search(sc_volume, null);
if ((volumes == null) || (volumes.size() == 0)) {
break;
}
final VolumeVO volume = volumes.get(0);
final VmDiskStatisticsVO previousVmDiskStats =
_vmDiskStatsDao.findBy(userVm.getAccountId(), userVm.getDataCenterId(), vmId, volume.getId());
final VmDiskStatisticsVO vmDiskStat_lock = _vmDiskStatsDao.lock(userVm.getAccountId(), userVm.getDataCenterId(), vmId, volume.getId());
if ((vmDiskStat.getBytesRead() == 0) && (vmDiskStat.getBytesWrite() == 0) && (vmDiskStat.getIORead() == 0) &&
(vmDiskStat.getIOWrite() == 0)) {
s_logger.debug("IO/bytes read and write are all 0. Not updating vm_disk_statistics");
continue;
}
if (vmDiskStat_lock == null) {
s_logger.warn("unable to find vm disk stats from host for account: " + userVm.getAccountId() + " with vmId: " + userVm.getId() +
" and volumeId:" + volume.getId());
continue;
}
if (previousVmDiskStats != null &&
((previousVmDiskStats.getCurrentBytesRead() != vmDiskStat_lock.getCurrentBytesRead()) ||
(previousVmDiskStats.getCurrentBytesWrite() != vmDiskStat_lock.getCurrentBytesWrite()) ||
(previousVmDiskStats.getCurrentIORead() != vmDiskStat_lock.getCurrentIORead()) || (previousVmDiskStats.getCurrentIOWrite() !=
vmDiskStat_lock.getCurrentIOWrite()))) {
s_logger.debug("vm disk stats changed from the time GetVmDiskStatsCommand was sent. " + "Ignoring current answer. Host: " +
host.getName() + " . VM: " + vmDiskStat.getVmName() + " Read(Bytes): " + vmDiskStat.getBytesRead() + " write(Bytes): " +
vmDiskStat.getBytesWrite() + " Read(IO): " + vmDiskStat.getIORead() + " write(IO): " + vmDiskStat.getIOWrite());
continue;
}
if (vmDiskStat_lock.getCurrentBytesRead() > vmDiskStat.getBytesRead()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Read # of bytes that's less than the last one. " +
"Assuming something went wrong and persisting it. Host: " + host.getName() + " . VM: " + vmDiskStat.getVmName() +
" Reported: " + vmDiskStat.getBytesRead() + " Stored: " + vmDiskStat_lock.getCurrentBytesRead());
}
vmDiskStat_lock.setNetBytesRead(vmDiskStat_lock.getNetBytesRead() + vmDiskStat_lock.getCurrentBytesRead());
}
vmDiskStat_lock.setCurrentBytesRead(vmDiskStat.getBytesRead());
if (vmDiskStat_lock.getCurrentBytesWrite() > vmDiskStat.getBytesWrite()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Write # of bytes that's less than the last one. " +
"Assuming something went wrong and persisting it. Host: " + host.getName() + " . VM: " + vmDiskStat.getVmName() +
" Reported: " + vmDiskStat.getBytesWrite() + " Stored: " + vmDiskStat_lock.getCurrentBytesWrite());
}
vmDiskStat_lock.setNetBytesWrite(vmDiskStat_lock.getNetBytesWrite() + vmDiskStat_lock.getCurrentBytesWrite());
}
vmDiskStat_lock.setCurrentBytesWrite(vmDiskStat.getBytesWrite());
if (vmDiskStat_lock.getCurrentIORead() > vmDiskStat.getIORead()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Read # of IO that's less than the last one. " + "Assuming something went wrong and persisting it. Host: " +
host.getName() + " . VM: " + vmDiskStat.getVmName() + " Reported: " + vmDiskStat.getIORead() + " Stored: " +
vmDiskStat_lock.getCurrentIORead());
}
vmDiskStat_lock.setNetIORead(vmDiskStat_lock.getNetIORead() + vmDiskStat_lock.getCurrentIORead());
}
vmDiskStat_lock.setCurrentIORead(vmDiskStat.getIORead());
if (vmDiskStat_lock.getCurrentIOWrite() > vmDiskStat.getIOWrite()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Write # of IO that's less than the last one. " + "Assuming something went wrong and persisting it. Host: " +
host.getName() + " . VM: " + vmDiskStat.getVmName() + " Reported: " + vmDiskStat.getIOWrite() + " Stored: " +
vmDiskStat_lock.getCurrentIOWrite());
}
vmDiskStat_lock.setNetIOWrite(vmDiskStat_lock.getNetIOWrite() + vmDiskStat_lock.getCurrentIOWrite());
}
vmDiskStat_lock.setCurrentIOWrite(vmDiskStat.getIOWrite());
if (!_dailyOrHourly) {
//update agg bytes
vmDiskStat_lock.setAggBytesWrite(vmDiskStat_lock.getNetBytesWrite() + vmDiskStat_lock.getCurrentBytesWrite());
vmDiskStat_lock.setAggBytesRead(vmDiskStat_lock.getNetBytesRead() + vmDiskStat_lock.getCurrentBytesRead());
vmDiskStat_lock.setAggIOWrite(vmDiskStat_lock.getNetIOWrite() + vmDiskStat_lock.getCurrentIOWrite());
vmDiskStat_lock.setAggIORead(vmDiskStat_lock.getNetIORead() + vmDiskStat_lock.getCurrentIORead());
}
_vmDiskStatsDao.update(vmDiskStat_lock.getId(), vmDiskStat_lock);
}
}
}
}
});
} catch (final Exception e) {
s_logger.warn("Error while collecting vm disk stats from hosts", e);
}
}
}
class StorageCollector extends ManagedContextRunnable {
@Override
protected void runInContext() {
if (s_logger.isDebugEnabled()) {
s_logger.debug("StorageCollector is running...");
}
final List<DataStore> stores = _dataStoreMgr.listImageStores();
final ConcurrentHashMap<Long, StorageStats> storageStats = new ConcurrentHashMap<>();
for (final DataStore store : stores) {
if (store.getUri() == null) {
continue;
}
final GetStorageStatsCommand command = new GetStorageStatsCommand(store.getTO());
final EndPoint ssAhost = _epSelector.select(store);
if (ssAhost == null) {
s_logger.debug("There is no secondary storage VM for secondary storage host " + store.getName());
continue;
}
final long storeId = store.getId();
final Answer answer;
try {
answer = ssAhost.sendMessageOrBreak(command);
if (answer != null && answer.getResult()) {
storageStats.put(storeId, (StorageStats) answer);
s_logger.trace("HostId: " + storeId + " Used: " + ((StorageStats) answer).getByteUsed() + " Total Available: " +
((StorageStats) answer).getCapacityBytes());
}
} catch (final Exception e) {
s_logger.warn("Unable to get stats for store: " + storeId, e);
}
}
_storageStats = storageStats;
final ConcurrentHashMap<Long, StorageStats> storagePoolStats = new ConcurrentHashMap<>();
final List<StoragePoolVO> storagePools = _storagePoolDao.listAll();
for (final StoragePoolVO pool : storagePools) {
// check if the pool has enabled hosts
final List<Long> hostIds = _storageManager.getUpHostsInPool(pool.getId());
if (hostIds == null || hostIds.isEmpty()) {
continue;
}
final GetStorageStatsCommand command = new GetStorageStatsCommand(pool.getUuid(), pool.getPoolType(), pool.getPath());
final long poolId = pool.getId();
try {
final Answer answer = _storageManager.sendToPool(pool, command);
if (answer != null && answer.getResult()) {
storagePoolStats.put(pool.getId(), (StorageStats) answer);
// Seems like we have dynamically updated the pool size since the prev. size and the current do not match
if (_storagePoolStats.get(poolId) != null && _storagePoolStats.get(poolId).getCapacityBytes() != ((StorageStats) answer).getCapacityBytes()) {
pool.setCapacityBytes(((StorageStats) answer).getCapacityBytes());
_storagePoolDao.update(pool.getId(), pool);
}
}
} catch (final StorageUnavailableException e) {
s_logger.info("Unable to reach pool: " + pool, e);
} catch (final Exception e) {
s_logger.warn("Unable to get stats for pool: " + pool, e);
}
}
_storagePoolStats = storagePoolStats;
}
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.coverage.view;
import com.intellij.CommonBundle;
import com.intellij.coverage.*;
import com.intellij.execution.configurations.RunConfigurationBase;
import com.intellij.execution.configurations.coverage.JavaCoverageEnabledConfiguration;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.java.coverage.JavaCoverageBundle;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.SlowOperations;
import com.intellij.util.ui.ColumnInfo;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class JavaCoverageViewExtension extends CoverageViewExtension {
private final JavaCoverageAnnotator myAnnotator;
public JavaCoverageViewExtension(JavaCoverageAnnotator annotator,
Project project,
CoverageSuitesBundle suitesBundle,
CoverageViewManager.StateBean stateBean) {
super(project, suitesBundle, stateBean);
myAnnotator = annotator;
}
@Override
public String getSummaryForNode(AbstractTreeNode node) {
if (!myCoverageViewManager.isReady()) return CommonBundle.getLoadingTreeNodeText();
if (myCoverageDataManager.isSubCoverageActive()) {
return showSubCoverageNotification();
}
PsiPackage aPackage = (PsiPackage)node.getValue();
final String coverageInformationString = myAnnotator
.getPackageCoverageInformationString(aPackage, null, myCoverageDataManager, myStateBean.myFlattenPackages);
return JavaCoverageBundle.message("coverage.view.node.summary", getNotCoveredMessage(coverageInformationString),
aPackage != null ? aPackage.getQualifiedName() : node.getName());
}
private static @Nls String showSubCoverageNotification() {
return JavaCoverageBundle.message("sub.coverage.notification");
}
@Override
public String getSummaryForRootNode(AbstractTreeNode childNode) {
if (myCoverageDataManager.isSubCoverageActive()) {
return showSubCoverageNotification();
}
final Object value = childNode.getValue();
String coverageInformationString = myAnnotator.getPackageCoverageInformationString((PsiPackage)value, null,
myCoverageDataManager);
if (coverageInformationString == null) {
if (!myCoverageViewManager.isReady()) return CommonBundle.getLoadingTreeNodeText();
PackageAnnotator.SummaryCoverageInfo info = new PackageAnnotator.PackageCoverageInfo();
final Collection children = childNode.getChildren();
for (Object child : children) {
final Object childValue = ((CoverageListNode)child).getValue();
PackageAnnotator.SummaryCoverageInfo childInfo = getSummaryCoverageForNodeValue((AbstractTreeNode<?>)childValue);
info = JavaCoverageAnnotator.merge(info, childInfo);
}
coverageInformationString = JavaCoverageAnnotator.getCoverageInformationString(info, false);
}
return JavaCoverageBundle.message("coverage.view.root.node.summary", getNotCoveredMessage(coverageInformationString));
}
private static String getNotCoveredMessage(String coverageInformationString) {
if (coverageInformationString == null) {
coverageInformationString = JavaCoverageBundle.message("coverage.view.no.coverage");
}
return coverageInformationString;
}
@Override
public String getPercentage(int columnIndex, @NotNull AbstractTreeNode node) {
final PackageAnnotator.SummaryCoverageInfo info = getSummaryCoverageForNodeValue(node);
if (columnIndex == 1) {
return myAnnotator.getClassCoveredPercentage(info);
}
if (columnIndex == 2) {
return myAnnotator.getMethodCoveredPercentage(info);
}
if (columnIndex == 3) {
return myAnnotator.getLineCoveredPercentage(info);
}
if (columnIndex == 4) {
return myAnnotator.getBranchCoveredPercentage(info);
}
return "";
}
private PackageAnnotator.SummaryCoverageInfo getSummaryCoverageForNodeValue(AbstractTreeNode<?> node) {
if (node instanceof CoverageListRootNode) {
return myAnnotator.getPackageCoverageInfo("", myStateBean.myFlattenPackages);
}
final JavaCoverageNode javaNode = (JavaCoverageNode)node;
if (javaNode.isClassCoverage()) {
return myAnnotator.getClassCoverageInfo(javaNode.getQualifiedName());
}
if (javaNode.isPackageCoverage()) {
return myAnnotator.getPackageCoverageInfo(javaNode.getQualifiedName(), myStateBean.myFlattenPackages);
}
final Object value = SlowOperations.allowSlowOperations(() -> javaNode.getValue());
if (value instanceof PsiNamedElement) {
return myAnnotator.getExtensionCoverageInfo((PsiNamedElement)value);
}
return null;
}
@Override
public PsiElement getElementToSelect(Object object) {
PsiElement psiElement = super.getElementToSelect(object);
if (psiElement != null) {
final PsiFile containingFile = psiElement.getContainingFile();
if (containingFile instanceof PsiClassOwner) {
final PsiClass[] classes = ((PsiClassOwner)containingFile).getClasses();
if (classes.length == 1) return classes[0];
for (PsiClass aClass : classes) {
if (PsiTreeUtil.isAncestor(aClass, psiElement, false)) return aClass;
}
}
}
return psiElement;
}
@Override
public VirtualFile getVirtualFile(Object object) {
if (object instanceof PsiPackage) {
final PsiDirectory[] directories = ((PsiPackage)object).getDirectories();
return directories.length > 0 ? directories[0].getVirtualFile() : null;
}
return super.getVirtualFile(object);
}
@Nullable
@Override
public PsiElement getParentElement(PsiElement element) {
if (element instanceof PsiClass) {
final PsiDirectory containingDirectory = element.getContainingFile().getContainingDirectory();
return containingDirectory != null ? JavaDirectoryService.getInstance().getPackage(containingDirectory) : null;
}
return ((PsiPackage)element).getParentPackage();
}
@NotNull
@Override
public AbstractTreeNode<?> createRootNode() {
return new JavaCoverageRootNode(myProject, JavaPsiFacade.getInstance(myProject).findPackage(""), mySuitesBundle, myStateBean);
}
@NotNull
@Override
public List<AbstractTreeNode<?>> createTopLevelNodes() {
final LinkedHashSet<PsiPackage> packages = new LinkedHashSet<>();
final LinkedHashSet<PsiClass> classes = new LinkedHashSet<>();
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
packages.addAll(((JavaCoverageSuite)suite).getCurrentSuitePackages(myProject));
classes.addAll(((JavaCoverageSuite)suite).getCurrentSuiteClasses(myProject));
}
final Set<PsiPackage> packs = new HashSet<>();
for (PsiPackage aPackage : packages) {
final String qualifiedName = aPackage.getQualifiedName();
for (PsiPackage psiPackage : packages) {
if (psiPackage.getQualifiedName().startsWith(qualifiedName + ".")) {
packs.add(psiPackage);
break;
}
}
}
packages.removeAll(packs);
final List<AbstractTreeNode<?>> topLevelNodes = new ArrayList<>();
final GlobalSearchScope searchScope = mySuitesBundle.getSearchScope(myProject);
for (PsiPackage aPackage : packages) {
processSubPackage(aPackage, topLevelNodes, searchScope);
}
for (PsiClass aClass : classes) {
topLevelNodes.add(new JavaCoverageNode(myProject, aClass, mySuitesBundle, myStateBean));
}
return topLevelNodes;
}
private void collectSubPackages(List<AbstractTreeNode<?>> children,
final PsiPackage rootPackage,
GlobalSearchScope searchScope) {
final PsiPackage[] subPackages = getSubpackages(rootPackage, searchScope);
for (final PsiPackage aPackage : subPackages) {
processSubPackage(aPackage, children, searchScope);
}
}
private void processSubPackage(final PsiPackage aPackage,
List<AbstractTreeNode<?>> children,
GlobalSearchScope searchScope) {
if (shouldIncludePackage(aPackage, searchScope)) {
final JavaCoverageNode node = new JavaCoverageNode(aPackage.getProject(), aPackage, mySuitesBundle, myStateBean);
children.add(node);
}
else if (!myStateBean.myFlattenPackages) {
collectSubPackages(children, aPackage, searchScope);
}
if (myStateBean.myFlattenPackages) {
collectSubPackages(children, aPackage, searchScope);
}
}
private boolean shouldIncludePackage(PsiPackage aPackage, GlobalSearchScope searchScope) {
return ReadAction.compute(() -> isInCoverageScope(aPackage)
&& (myAnnotator.isLoading() || getPackageCoverageInfo(aPackage) != null)
&& (!myStateBean.myFlattenPackages || aPackage.getClasses(searchScope).length != 0));
}
private boolean shouldIncludeClass(PsiClass aClass) {
return myAnnotator.isLoading() || getClassCoverageInfo(aClass) != null;
}
@Override
public List<AbstractTreeNode<?>> getChildrenNodes(final AbstractTreeNode node) {
List<AbstractTreeNode<?>> children = new ArrayList<>();
if (node instanceof CoverageListNode) {
final Object val = node.getValue();
if (val instanceof PsiClass) return Collections.emptyList();
//append package classes
if (val instanceof PsiPackage) {
final PsiPackage psiPackage = (PsiPackage)val;
final GlobalSearchScope searchScope = mySuitesBundle.getSearchScope(myProject);
if (ReadAction.compute(() -> isInCoverageScope(psiPackage))) {
if (!myStateBean.myFlattenPackages) {
collectSubPackages(children, psiPackage, searchScope);
}
final PsiFile[] childFiles = getFiles(psiPackage, searchScope);
for (final PsiFile file : childFiles) {
collectFileChildren(file, children);
}
}
else if (!myStateBean.myFlattenPackages) {
collectSubPackages(children, (PsiPackage)val, searchScope);
}
}
if (node instanceof CoverageListRootNode) {
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
final List<PsiClass> classes = ((JavaCoverageSuite)suite).getCurrentSuiteClasses(myProject);
for (PsiClass aClass : classes) {
children.add(new JavaCoverageNode(myProject, aClass, mySuitesBundle, myStateBean));
}
}
}
for (AbstractTreeNode<?> childNode : children) {
childNode.setParent(node);
}
}
return children;
}
private static PsiFile[] getFiles(PsiPackage psiPackage, GlobalSearchScope searchScope) {
return ReadAction.compute(() -> psiPackage.isValid() ? psiPackage.getFiles(searchScope) : PsiFile.EMPTY_ARRAY);
}
private static PsiPackage[] getSubpackages(PsiPackage psiPackage, GlobalSearchScope searchScope) {
return ReadAction.compute(() -> psiPackage.isValid() ? psiPackage.getSubPackages(searchScope) : PsiPackage.EMPTY_ARRAY);
}
protected void collectFileChildren(final PsiFile file, List<? super AbstractTreeNode<?>> children) {
if (file instanceof PsiClassOwner) {
PsiClass[] classes = ReadAction.compute(() -> file.isValid() ? ((PsiClassOwner)file).getClasses() : PsiClass.EMPTY_ARRAY);
for (PsiClass aClass : classes) {
if (shouldIncludeClass(aClass)) {
children.add(new JavaCoverageNode(myProject, aClass, mySuitesBundle, myStateBean));
}
}
}
}
@Nullable
private PackageAnnotator.ClassCoverageInfo getClassCoverageInfo(final PsiClass aClass) {
return myAnnotator.getClassCoverageInfo(ReadAction.compute(() -> aClass.isValid() ? aClass.getQualifiedName() : null));
}
@Nullable
private PackageAnnotator.PackageCoverageInfo getPackageCoverageInfo(final PsiPackage aPackage) {
return ReadAction.compute(() -> myAnnotator.getPackageCoverageInfo(aPackage.getQualifiedName(), myStateBean.myFlattenPackages));
}
@Override
public ColumnInfo[] createColumnInfos() {
ArrayList<ColumnInfo> infos = new ArrayList<>();
infos.add(new ElementColumnInfo());
infos.add(new PercentageCoverageColumnInfo(1, JavaCoverageBundle.message("coverage.view.column.class"), mySuitesBundle, myStateBean));
infos.add(new PercentageCoverageColumnInfo(2, JavaCoverageBundle.message("coverage.view.column.method"), mySuitesBundle, myStateBean));
infos.add(new PercentageCoverageColumnInfo(3, JavaCoverageBundle.message("coverage.view.column.line"), mySuitesBundle, myStateBean));
RunConfigurationBase<?> runConfiguration = mySuitesBundle.getRunConfiguration();
if (runConfiguration != null) {
JavaCoverageEnabledConfiguration coverageEnabledConfiguration = JavaCoverageEnabledConfiguration.getFrom(runConfiguration);
if (coverageEnabledConfiguration != null) {
isBranchColumnAvailable(infos, coverageEnabledConfiguration.getCoverageRunner(), coverageEnabledConfiguration.isSampling());
}
}
else {
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
CoverageRunner runner = suite.getRunner();
if (isBranchColumnAvailable(infos, runner, true)) {
break;
}
}
}
return infos.toArray(ColumnInfo.EMPTY_ARRAY);
}
private boolean isBranchColumnAvailable(ArrayList<? super ColumnInfo> infos, CoverageRunner coverageRunner, boolean sampling) {
if (coverageRunner instanceof JavaCoverageRunner && ((JavaCoverageRunner)coverageRunner).isBranchInfoAvailable(sampling)) {
infos.add(new PercentageCoverageColumnInfo(4, JavaCoverageBundle.message("coverage.view.column.branch"), mySuitesBundle, myStateBean));
return true;
}
return false;
}
private boolean isInCoverageScope(PsiElement element) {
if (element instanceof PsiPackage) {
final PsiPackage psiPackage = (PsiPackage)element;
final String qualifiedName = psiPackage.getQualifiedName();
for (CoverageSuite suite : mySuitesBundle.getSuites()) {
if (((JavaCoverageSuite)suite).isPackageFiltered(qualifiedName)) return true;
}
}
return false;
}
@Override
public boolean canSelectInCoverageView(Object object) {
final PsiFile psiFile = object instanceof VirtualFile ? PsiManager.getInstance(myProject).findFile((VirtualFile)object) : null;
if (psiFile instanceof PsiClassOwner) {
final String packageName = ((PsiClassOwner)psiFile).getPackageName();
return isInCoverageScope(JavaPsiFacade.getInstance(myProject).findPackage(packageName));
}
if (object instanceof PsiPackage) {
return isInCoverageScope((PsiElement)object);
}
return false;
}
@Override
public boolean supportFlattenPackages() {
return true;
}
}
|
|
/*
Derby - Class org.apache.derby.impl.sql.catalog.SYSFILESRowFactory
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.catalog;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.db.Database;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.sql.dictionary.CatalogRowFactory;
import org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.SchemaDescriptor;
import org.apache.derby.iapi.sql.dictionary.FileInfoDescriptor;
import org.apache.derby.iapi.sql.dictionary.SystemColumn;
import org.apache.derby.iapi.sql.dictionary.TupleDescriptor;
import org.apache.derby.iapi.types.SQLChar;
import org.apache.derby.iapi.types.SQLLongint;
import org.apache.derby.iapi.types.SQLVarchar;
import org.apache.derby.iapi.types.TypeId;
import org.apache.derby.iapi.types.DataValueFactory;
import org.apache.derby.iapi.types.RowLocation;
import org.apache.derby.iapi.sql.execute.ExecIndexRow;
import org.apache.derby.iapi.sql.execute.ExecRow;
import org.apache.derby.iapi.sql.execute.ExecutionContext;
import org.apache.derby.iapi.sql.execute.ExecutionFactory;
import org.apache.derby.iapi.types.DataTypeDescriptor;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.types.TypeId;
import org.apache.derby.iapi.services.uuid.UUIDFactory;
import org.apache.derby.catalog.TypeDescriptor;
import org.apache.derby.catalog.UUID;
import java.sql.Types;
import java.util.Properties;
/**
* Factory for creating a SYSFILES row.
*
*
* @version 0.1
*/
class SYSFILESRowFactory extends CatalogRowFactory
{
private static final String TABLENAME_STRING = "SYSFILES";
private static final int SYSFILES_COLUMN_COUNT = 4;
/* Column #s (1 based) */
private static final int ID_COL_NUM = 1;
private static final String ID_COL_NAME = "FILEID";
private static final int SCHEMA_ID_COL_NUM = 2;
private static final String SCHEMA_ID_COL_NAME = "SCHEMAID";
private static final int NAME_COL_NUM = 3;
private static final String NAME_COL_NAME = "FILENAME";
private static final int GENERATION_ID_COL_NUM = 4;
private static final String GENERATION_ID_COL_NAME = "GENERATIONID";
static final int SYSFILES_INDEX1_ID = 0;
static final int SYSFILES_INDEX2_ID = 1;
private static final int[][] indexColumnPositions =
{
{NAME_COL_NUM, SCHEMA_ID_COL_NUM},
{ID_COL_NUM}
};
private static final boolean[] uniqueness = null;
private static final String[] uuids =
{
"80000000-00d3-e222-873f-000a0a0b1900", // catalog UUID
"80000000-00d3-e222-9920-000a0a0b1900", // heap UUID
"80000000-00d3-e222-a373-000a0a0b1900", // SYSSQLFILES_INDEX1
"80000000-00d3-e222-be7b-000a0a0b1900" // SYSSQLFILES_INDEX2
};
/////////////////////////////////////////////////////////////////////////////
//
// CONSTRUCTORS
//
/////////////////////////////////////////////////////////////////////////////
SYSFILESRowFactory(UUIDFactory uuidf, ExecutionFactory ef, DataValueFactory dvf)
{
super(uuidf,ef,dvf);
initInfo(SYSFILES_COLUMN_COUNT, TABLENAME_STRING,
indexColumnPositions, uniqueness, uuids );
}
/////////////////////////////////////////////////////////////////////////////
//
// METHODS
//
/////////////////////////////////////////////////////////////////////////////
/**
* Make a SYSFILES row
*
* @return Row suitable for inserting into SYSFILES
*
* @exception StandardException thrown on failure
*/
public ExecRow makeRow(TupleDescriptor td, TupleDescriptor parent)
throws StandardException
{
String id_S = null;
String schemaId_S = null;
String SQLname = null;
long generationId = 0;
ExecRow row;
if (td != null)
{
FileInfoDescriptor descriptor = (FileInfoDescriptor)td;
id_S = descriptor.getUUID().toString();
schemaId_S = descriptor.getSchemaDescriptor().getUUID().toString();
SQLname = descriptor.getName();
generationId = descriptor.getGenerationId();
}
/* Build the row to insert */
row = getExecutionFactory().getValueRow(SYSFILES_COLUMN_COUNT);
/* 1st column is ID (UUID - char(36)) */
row.setColumn(ID_COL_NUM, new SQLChar(id_S));
/* 2nd column is SCHEMAID (UUID - char(36)) */
row.setColumn(SCHEMA_ID_COL_NUM, new SQLChar(schemaId_S));
/* 3rd column is NAME (varchar(30)) */
row.setColumn(NAME_COL_NUM, new SQLVarchar(SQLname));
/* 4th column is GENERATIONID (long) */
row.setColumn(GENERATION_ID_COL_NUM, new SQLLongint(generationId));
return row;
}
///////////////////////////////////////////////////////////////////////////
//
// ABSTRACT METHODS TO BE IMPLEMENTED BY CHILDREN OF CatalogRowFactory
//
///////////////////////////////////////////////////////////////////////////
/**
* Make a descriptor out of a SYSFILES row
*
* @param row a row
* @param parentTupleDescriptor Null for this kind of descriptor.
* @param dd dataDictionary
*
* @return a descriptor equivalent to a row
*
* @exception StandardException thrown on failure
*/
public TupleDescriptor buildDescriptor(
ExecRow row,
TupleDescriptor parentTupleDescriptor,
DataDictionary dd )
throws StandardException
{
if (SanityManager.DEBUG)
{
if (row.nColumns() != SYSFILES_COLUMN_COUNT)
{
SanityManager.THROWASSERT("Wrong number of columns for a SYSFILES row: "+
row.nColumns());
}
}
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
String id_S;
UUID id;
String schemaId_S;
UUID schemaId;
String name;
long generationId;
DataValueDescriptor col;
SchemaDescriptor schemaDescriptor;
FileInfoDescriptor result;
/* 1st column is ID (UUID - char(36)) */
col = row.getColumn(ID_COL_NUM);
id_S = col.getString();
id = getUUIDFactory().recreateUUID(id_S);
/* 2nd column is SchemaId */
col = row.getColumn(SCHEMA_ID_COL_NUM);
schemaId_S = col.getString();
schemaId = getUUIDFactory().recreateUUID(schemaId_S);
schemaDescriptor = dd.getSchemaDescriptor(schemaId, null);
if (SanityManager.DEBUG)
{
if (schemaDescriptor == null)
{
SanityManager.THROWASSERT("Missing schema for FileInfo: "+id_S);
}
}
/* 3nd column is NAME (varchar(128)) */
col = row.getColumn(NAME_COL_NUM);
name = col.getString();
/* 4th column is generationId (long) */
col = row.getColumn(GENERATION_ID_COL_NUM);
generationId = col.getLong();
result = ddg.newFileInfoDescriptor(id,schemaDescriptor,name,
generationId);
return result;
}
/**
* Builds a list of columns suitable for creating this Catalog.
*
*
* @return array of SystemColumn suitable for making this catalog.
*/
public SystemColumn[] buildColumnList()
throws StandardException
{
return new SystemColumn[] {
SystemColumnImpl.getUUIDColumn(ID_COL_NAME, false),
SystemColumnImpl.getUUIDColumn(SCHEMA_ID_COL_NAME, false),
SystemColumnImpl.getIdentifierColumn(NAME_COL_NAME, false),
SystemColumnImpl.getColumn(GENERATION_ID_COL_NAME, Types.BIGINT, false)
};
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.server.rest.profile;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.security.RolesAllowed;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.coord.ClusterCoordinator;
import org.apache.drill.exec.coord.store.TransientStore;
import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.UserBitShared.QueryInfo;
import org.apache.drill.exec.proto.UserBitShared.QueryProfile;
import org.apache.drill.exec.proto.helper.QueryIdHelper;
import org.apache.drill.exec.server.rest.DrillRestServer.UserAuthEnabled;
import org.apache.drill.exec.server.QueryProfileStoreContext;
import org.apache.drill.exec.server.rest.ViewableWithPermissions;
import org.apache.drill.exec.server.rest.auth.DrillUserPrincipal;
import org.apache.drill.exec.store.sys.PersistentStore;
import org.apache.drill.exec.store.sys.PersistentStoreProvider;
import org.apache.drill.exec.work.WorkManager;
import org.apache.drill.exec.work.foreman.Foreman;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.glassfish.jersey.server.mvc.Viewable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.drill.shaded.guava.com.google.common.cache.Cache;
import org.apache.drill.shaded.guava.com.google.common.cache.CacheBuilder;
import static org.owasp.encoder.Encode.forHtml;
@Path("/")
@RolesAllowed(DrillUserPrincipal.AUTHENTICATED_ROLE)
public class ProfileResources {
private static final Logger logger = LoggerFactory.getLogger(ProfileResources.class);
@Inject
UserAuthEnabled authEnabled;
@Inject
WorkManager work;
@Inject
DrillUserPrincipal principal;
@Inject
SecurityContext sc;
@Inject
HttpServletRequest request;
public static class ProfileInfo implements Comparable<ProfileInfo> {
private static final int QUERY_SNIPPET_MAX_CHAR = 150;
private static final int QUERY_SNIPPET_MAX_LINES = 8;
public static final SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
private final String queryId;
private final long startTime;
private final long endTime;
private final Date time;
private final String link;
private final String foreman;
private final String query;
private final String state;
private final String user;
private final double totalCost;
private final String queueName;
public ProfileInfo(DrillConfig drillConfig, String queryId, long startTime, long endTime, String foreman, String query,
String state, String user, double totalCost, String queueName) {
this.queryId = queryId;
this.startTime = startTime;
this.endTime = endTime;
this.time = new Date(startTime);
this.foreman = foreman;
this.link = generateLink(drillConfig, foreman, queryId);
this.query = extractQuerySnippet(query);
this.state = state;
this.user = user;
this.totalCost = totalCost;
this.queueName = queueName;
}
public String getUser() { return user; }
public String getQuery() { return query; }
public String getQueryId() { return queryId; }
public String getTime() { return format.format(time); }
public long getStartTime() { return startTime; }
public long getEndTime() { return endTime; }
public String getDuration() {
return (new SimpleDurationFormat(startTime, endTime)).verbose();
}
public String getState() { return state; }
public String getLink() { return link; }
public String getForeman() { return foreman; }
public double getTotalCost() { return totalCost; }
public String getQueueName() { return queueName; }
@Override
public int compareTo(ProfileInfo other) {
return time.compareTo(other.time);
}
/**
* Generates link which will return query profile in json representation.
*
* @param drillConfig drill configuration
* @param foreman foreman hostname
* @param queryId query id
* @return link
*/
private String generateLink(DrillConfig drillConfig, String foreman, String queryId) {
StringBuilder sb = new StringBuilder();
if (drillConfig.getBoolean(ExecConstants.HTTP_ENABLE_SSL)) {
sb.append("https://");
} else {
sb.append("http://");
}
sb.append(foreman);
sb.append(":");
sb.append(drillConfig.getInt(ExecConstants.HTTP_PORT));
sb.append("/profiles/");
sb.append(queryId);
sb.append(".json");
return sb.toString();
}
/**
* Extract only the first 150 characters of the query.
* If this spans more than 8 lines, we truncate excess lines for sake of readability
* @param queryText
* @return truncated text
*/
private String extractQuerySnippet(String queryText) {
//Extract upto max char limit as snippet
String sizeCappedQuerySnippet = queryText.substring(0, Math.min(queryText.length(), QUERY_SNIPPET_MAX_CHAR));
String[] queryParts = sizeCappedQuerySnippet.split(System.lineSeparator());
//Trimming down based on line-count
if (QUERY_SNIPPET_MAX_LINES < queryParts.length) {
int linesConstructed = 0;
StringBuilder lineCappedQuerySnippet = new StringBuilder();
for (String qPart : queryParts) {
lineCappedQuerySnippet.append(qPart);
if (++linesConstructed < QUERY_SNIPPET_MAX_LINES) {
lineCappedQuerySnippet.append(System.lineSeparator());
} else {
lineCappedQuerySnippet.append(" ... ");
break;
}
}
return lineCappedQuerySnippet.toString();
}
return sizeCappedQuerySnippet;
}
}
protected PersistentStoreProvider getProvider() {
return work.getContext().getStoreProvider();
}
protected ClusterCoordinator getCoordinator() {
return work.getContext().getClusterCoordinator();
}
@XmlRootElement
public class QProfilesBase {
private final List<String> errors;
public QProfilesBase(List<String> errors) {
this.errors = errors;
}
public List<String> getErrors() {
return errors;
}
public int getMaxFetchedQueries() {
return work.getContext().getConfig().getInt(ExecConstants.HTTP_MAX_PROFILES);
}
public String getQueriesPerPage() {
List<Integer> queriesPerPageOptions = work.getContext().getConfig().getIntList(ExecConstants.HTTP_PROFILES_PER_PAGE);
Collections.sort(queriesPerPageOptions);
return Joiner.on(",").join(queriesPerPageOptions);
}
}
@XmlRootElement
public class QProfiles extends QProfilesBase {
private final List<ProfileInfo> runningQueries;
private final List<ProfileInfo> finishedQueries;
public QProfiles(List<ProfileInfo> runningQueries, List<ProfileInfo> finishedQueries, List<String> errors) {
super(errors);
this.runningQueries = runningQueries;
this.finishedQueries = finishedQueries;
}
public List<ProfileInfo> getRunningQueries() {
return runningQueries;
}
public List<ProfileInfo> getFinishedQueries() {
return finishedQueries;
}
}
@XmlRootElement
public class QProfilesRunning extends QProfilesBase {
private final List<ProfileInfo> runningQueries;
public QProfilesRunning(List<ProfileInfo> runningQueries,List<String> errors) {
super(errors);
this.runningQueries = runningQueries;
}
public List<ProfileInfo> getRunningQueries() {
return runningQueries;
}
}
@XmlRootElement
public class QProfilesCompleted extends QProfilesBase {
private final List<ProfileInfo> finishedQueries;
public QProfilesCompleted(List<ProfileInfo> finishedQueries, List<String> errors) {
super(errors);
this.finishedQueries = finishedQueries;
}
public List<ProfileInfo> getFinishedQueries() {
return finishedQueries;
}
}
//max Param to cap listing of profiles
private static final String MAX_QPROFILES_PARAM = "max";
private static final Cache<String, String> PROFILE_CACHE = CacheBuilder
.newBuilder().expireAfterAccess(1, TimeUnit.MINUTES).build();
@GET
@Path("/profiles.json")
@Produces(MediaType.APPLICATION_JSON)
public Response getProfilesJSON(@Context UriInfo uriInfo) {
QProfilesRunning running_results = (QProfilesRunning)getRunningProfilesJSON(uriInfo).getEntity();
QProfilesCompleted completed_results = (QProfilesCompleted)getCompletedProfilesJSON(uriInfo).getEntity();
final List<String> total_errors = Lists.newArrayList();
total_errors.addAll(running_results.getErrors());
total_errors.addAll(completed_results.getErrors());
QProfiles final_results = new QProfiles(running_results.runningQueries, completed_results.finishedQueries, total_errors);
return total_errors.size() == 0
? Response.ok().entity(final_results).build()
: Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(final_results)
.build();
}
@GET
@Path("/profiles/json")
@Produces(MediaType.APPLICATION_JSON)
public Response getSpecificJSON(@Context UriInfo uriInfo, @QueryParam("status") String status) {
switch (status) {
case "running":
return getRunningProfilesJSON(uriInfo);
case "completed":
return getCompletedProfilesJSON(uriInfo);
case "all":
default:
return getProfilesJSON(uriInfo);
}
}
@GET
@Path("/profiles/running.json")
@Produces(MediaType.APPLICATION_JSON)
public Response getRunningProfilesJSON(@Context UriInfo uriInfo) {
try {
final QueryProfileStoreContext profileStoreContext = work.getContext().getProfileStoreContext();
final TransientStore<QueryInfo> running = profileStoreContext.getRunningProfileStore();
final List<String> errors = Lists.newArrayList();
final List<ProfileInfo> runningQueries = Lists.newArrayList();
final Iterator<Map.Entry<String, QueryInfo>> runningEntries = running.entries();
while (runningEntries.hasNext()) {
try {
final Map.Entry<String, QueryInfo> runningEntry = runningEntries.next();
final QueryInfo profile = runningEntry.getValue();
if (principal.canManageProfileOf(profile.getUser())) {
runningQueries.add(
new ProfileInfo(work.getContext().getConfig(),
runningEntry.getKey(), profile.getStart(),
System.currentTimeMillis(), profile.getForeman().getAddress(),
profile.getQuery(),
ProfileUtil.getQueryStateDisplayName(profile.getState()),
profile.getUser(), profile.getTotalCost(),
profile.getQueueName()));
}
} catch (Exception e) {
errors.add(e.getMessage());
logger.error("Error getting running query info.", e);
}
}
Collections.sort(runningQueries, Collections.reverseOrder());
QProfilesRunning rProf = new QProfilesRunning(runningQueries, errors);
return errors.size() == 0
? Response.ok().entity(rProf).build()
: Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(rProf)
.build();
} catch (Exception e) {
throw UserException.resourceError(e).message("Failed to get running profiles from ephemeral store.").build(logger);
}
}
@GET
@Path("/profiles/completed.json")
@Produces(MediaType.APPLICATION_JSON)
public Response getCompletedProfilesJSON(@Context UriInfo uriInfo) {
try {
final QueryProfileStoreContext profileStoreContext = work.getContext().getProfileStoreContext();
final PersistentStore<QueryProfile> completed = profileStoreContext.getCompletedProfileStore();
final List<String> errors = Lists.newArrayList();
final List<ProfileInfo> finishedQueries = Lists.newArrayList();
// Defining #Profiles to load
int maxProfilesToLoad = work.getContext().getConfig().getInt(ExecConstants.HTTP_MAX_PROFILES);
String maxProfilesParams = uriInfo.getQueryParameters().getFirst(MAX_QPROFILES_PARAM);
if (maxProfilesParams != null && !maxProfilesParams.isEmpty()) {
maxProfilesToLoad = Integer.valueOf(maxProfilesParams);
}
final Iterator<Map.Entry<String, QueryProfile>> range = completed.getRange(0, maxProfilesToLoad);
while (range.hasNext()) {
try {
final Map.Entry<String, QueryProfile> profileEntry = range.next();
final QueryProfile profile = profileEntry.getValue();
if (principal.canManageProfileOf(profile.getUser())) {
finishedQueries.add(
new ProfileInfo(work.getContext().getConfig(),
profileEntry.getKey(), profile.getStart(), profile.getEnd(),
profile.getForeman().getAddress(), profile.getQuery(),
ProfileUtil.getQueryStateDisplayName(profile.getState()),
profile.getUser(), profile.getTotalCost(), profile.getQueueName()));
}
} catch (Exception e) {
errors.add(e.getMessage());
logger.error("Error getting finished query profile.", e);
}
}
Collections.sort(finishedQueries, Collections.reverseOrder());
QProfilesCompleted cProf = new QProfilesCompleted(finishedQueries, errors);
return errors.size() == 0
? Response.ok().entity(cProf).build()
: Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(cProf)
.build();
} catch (Exception e) {
throw UserException.resourceError(e).message("Failed to get completed profiles from persistent store.").build(logger);
}
}
@GET
@Path("/profiles")
@Produces(MediaType.TEXT_HTML)
public Viewable getProfiles(@Context UriInfo uriInfo) {
QProfiles profiles = (QProfiles) getProfilesJSON(uriInfo).getEntity();
return ViewableWithPermissions.create(authEnabled.get(), "/rest/profile/list.ftl", sc, profiles);
}
private QueryProfile getQueryProfile(String queryId) {
QueryId id = QueryIdHelper.getQueryIdFromString(queryId);
// first check local running
Foreman f = work.getBee().getForemanForQueryId(id);
if(f != null){
QueryProfile queryProfile = f.getQueryManager().getQueryProfile();
checkOrThrowProfileViewAuthorization(queryProfile);
return queryProfile;
}
// then check remote running
try {
final TransientStore<QueryInfo> running = work.getContext().getProfileStoreContext().getRunningProfileStore();
final QueryInfo info = running.get(queryId);
if (info != null) {
QueryProfile queryProfile = work.getContext()
.getController()
.getTunnel(info.getForeman())
.requestQueryProfile(id)
.checkedGet(2, TimeUnit.SECONDS);
checkOrThrowProfileViewAuthorization(queryProfile);
return queryProfile;
}
}catch(Exception e){
logger.trace("Failed to find query as running profile.", e);
}
// then check blob store
try {
final PersistentStore<QueryProfile> profiles = work.getContext().getProfileStoreContext().getCompletedProfileStore();
final QueryProfile queryProfile = profiles.get(queryId);
if (queryProfile != null) {
checkOrThrowProfileViewAuthorization(queryProfile);
return queryProfile;
}
} catch (final Exception e) {
throw new DrillRuntimeException("error while retrieving profile", e);
}
throw UserException.validationError()
.message("No profile with given query id '%s' exists. Please verify the query id.", queryId)
.build(logger);
}
@GET
@Path("/profiles/{queryid}.json")
@Produces(MediaType.APPLICATION_JSON)
public Response getProfileJSON(@PathParam("queryid") String queryId) {
try {
String profileData = PROFILE_CACHE.getIfPresent(queryId);
if (profileData == null) {
profileData = new String(work.getContext().getProfileStoreContext()
.getProfileStoreConfig().getSerializer().serialize(getQueryProfile(queryId)));
} else {
PROFILE_CACHE.invalidate(queryId);
}
return Response.ok().entity(profileData).build();
} catch (Exception e) {
logger.debug("Failed to serialize profile for: " + queryId);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity("{ 'message' : 'error (unable to serialize profile)' }")
.build();
}
}
@GET
@Path("/profiles/{queryid}")
@Produces(MediaType.TEXT_HTML)
public Viewable getProfile(@PathParam("queryid") String queryId){
try {
ProfileWrapper wrapper = new ProfileWrapper(getQueryProfile(queryId), work.getContext().getConfig(), request);
return ViewableWithPermissions.create(authEnabled.get(), "/rest/profile/profile.ftl", sc, wrapper);
} catch (Exception | Error e) {
logger.error("Exception was thrown when fetching profile {} :\n{}", queryId, e);
return ViewableWithPermissions.create(authEnabled.get(), "/rest/errorMessage.ftl", sc, e);
}
}
@POST
@Path("/profiles/view")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.TEXT_HTML)
public Viewable viewProfile(@FormDataParam("profileData") String content) {
try {
QueryProfile profile = work.getContext().getProfileStoreContext()
.getProfileStoreConfig().getSerializer().deserialize(content.getBytes());
PROFILE_CACHE.put(profile.getQueryId(), content);
ProfileWrapper wrapper = new ProfileWrapper(profile,
work.getContext().getConfig(), request);
return ViewableWithPermissions.create(authEnabled.get(),
"/rest/profile/profile.ftl", sc, wrapper);
} catch (Exception | Error e) {
logger.error("Exception was thrown when parsing profile {} :\n{}",
content, e);
return ViewableWithPermissions.create(authEnabled.get(),
"/rest/errorMessage.ftl", sc, e);
}
}
@GET
@Path("/profiles/cancel/{queryid}")
@Produces(MediaType.TEXT_PLAIN)
public String cancelQuery(@PathParam("queryid") String queryId) {
QueryId id = QueryIdHelper.getQueryIdFromString(queryId);
// Prevent XSS
String encodedQueryID = forHtml(queryId);
// first check local running
if (work.getBee().cancelForeman(id, principal)) {
return String.format("Cancelled query %s on locally running node.", encodedQueryID);
}
// then check remote running
try {
final TransientStore<QueryInfo> running = work.getContext().getProfileStoreContext().getRunningProfileStore();
final QueryInfo info = running.get(queryId);
checkOrThrowQueryCancelAuthorization(info.getUser(), queryId);
Ack a = work.getContext().getController().getTunnel(info.getForeman()).requestCancelQuery(id).checkedGet(2, TimeUnit.SECONDS);
if(a.getOk()){
return String.format("Query %s canceled on node %s.", encodedQueryID, info.getForeman().getAddress());
}else{
return String.format("Attempted to cancel query %s on %s but the query is no longer active on that node.", encodedQueryID, info.getForeman().getAddress());
}
}catch(Exception e){
logger.debug("Failure to find query as running profile.", e);
return String.format
("Failure attempting to cancel query %s. Unable to find information about where query is actively running.", encodedQueryID);
}
}
private void checkOrThrowProfileViewAuthorization(final QueryProfile profile) {
if (!principal.canManageProfileOf(profile.getUser())) {
throw UserException.permissionError()
.message("Not authorized to view the profile of query '%s'", profile.getId())
.build(logger);
}
}
private void checkOrThrowQueryCancelAuthorization(final String queryUser, final String queryId) {
if (!principal.canManageQueryOf(queryUser)) {
throw UserException.permissionError()
.message("Not authorized to cancel the query '%s'", queryId)
.build(logger);
}
}
}
|
|
/**
* Generated by Agitar build: AgitarOne Version 6.0.0.000018 (Build date: Sep 06, 2013) [6.0.0.000018]
* JDK Version: 1.6.0_14
*
* Generated on 2014-3-11 13:26:53
* Time to generate: 01:18.328 seconds
*
*
* agitar.src.md5=com.huawei.ism.openapi.nas.cifsauthclient.CIFSShareAuthClientHandlerImp:e2289452b9250154a532d356ef22349a
* agitar.src.md5=com.huawei.ism.openapi.common.DefaultCommHandler:b6802204ee7c3e439b341a5d028a1348
*
*/
package com.huawei.ism.openapi.nas.cifsauthclient;
import com.agitar.lib.junit.AgitarTestCase;
import com.agitar.lib.mockingbird.Mockingbird;
import com.huawei.ism.openapi.common.commu.rest.RestManager;
import com.huawei.ism.openapi.common.commu.rest.RestRequestHandler;
import com.huawei.ism.openapi.common.exception.ApiException;
import com.huawei.ism.openapi.common.exception.ApiMessage;
import com.huawei.ism.openapi.common.keydeifines.ConstantsDefine;
import com.huawei.ism.openapi.common.keydeifines.EnumDefine;
import com.huawei.ism.openapi.common.logmanager.LogUtil;
import com.huawei.ism.openapi.common.model.MO;
import com.huawei.ism.openapi.common.model.MOType;
import com.huawei.ism.openapi.common.utils.OpenApiUtils;
import java.net.URI;
import java.util.HashMap;
import org.json.JSONException;
import org.json.JSONObject;
public class CIFSShareAuthClientHandlerImpAgitarTest extends AgitarTestCase {
public Class getTargetClass() {
return CIFSShareAuthClientHandlerImp.class;
}
public void testConstructor() throws Throwable {
RestRequestHandler restRequestHandler = new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100);
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(restRequestHandler, "testCIFSShareAuthClientHandlerImpDeviceID");
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "testCIFSShareAuthClientHandlerImpDeviceID", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertSame("cIFSShareAuthClientHandlerImp.restRequestHandler", restRequestHandler, getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
public void testCreateWithAggressiveMocks() throws Throwable {
storeStaticField(EnumDefine.LOG_TYPE_E.class, "LOG_INFO");
storeStaticField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT");
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = (CIFSShareAuthClientHandlerImp) Mockingbird.getProxyObject(CIFSShareAuthClientHandlerImp.class, true);
CIFSShareAuthClientMO cIFSShareAuthClientMO = (CIFSShareAuthClientMO) Mockingbird.getProxyObject(CIFSShareAuthClientMO.class);
MO mO = (MO) Mockingbird.getProxyObject(MO.class);
OpenApiUtils openApiUtils = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
MOType mOType = (MOType) Mockingbird.getProxyObject(MOType.class);
OpenApiUtils openApiUtils2 = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
LogUtil logUtil = (LogUtil) Mockingbird.getProxyObject(LogUtil.class);
setPrivateField(cIFSShareAuthClientHandlerImp, "deviceID", "");
setPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler", null);
setPrivateField(cIFSShareAuthClientMO, "mo", mO);
Mockingbird.enterRecordingMode();
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils);
setPrivateField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT", mOType);
setPrivateField(mOType, "value", new Integer(0));
setPrivateField(mO, "properties", null);
setPrivateField(EnumDefine.LOG_TYPE_E.class, "LOG_INFO", null);
Mockingbird.setReturnValue(false, openApiUtils, "composeRelativeUri", "(java.lang.String[])java.lang.String", "", 1);
JSONObject jSONObject = (JSONObject) Mockingbird.getProxyObject(JSONObject.class);
Mockingbird.replaceObjectForRecording(JSONObject.class, "<init>()", jSONObject);
Mockingbird.setReturnValue(jSONObject.put("TYPE", 0), null);
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils2);
Mockingbird.setReturnValue(false, openApiUtils2, "composeFromMap", "(org.json.JSONObject,java.util.Map)void", null, 1);
Mockingbird.setReturnValue(LogUtil.getLogUtilInstance(), logUtil);
Mockingbird.setReturnValue(jSONObject.toString(), "");
Mockingbird.setReturnValue(false, Mockingbird.getProxyObject(StringBuilder.class), "toString", "()java.lang.String", "", 1);
Mockingbird.setReturnValue(false, logUtil, "showLog", "(com.huawei.ism.openapi.common.keydeifines.EnumDefine$LOG_TYPE_E,java.lang.String)void", null, 1);
RestManager restManager = (RestManager) Mockingbird.getProxyObject(RestManager.class);
Mockingbird.replaceObjectForRecording(RestManager.class, "<init>(java.lang.Class,com.huawei.ism.openapi.common.commu.rest.RestRequestHandler,java.lang.String,java.util.Map,org.json.JSONObject)", restManager);
Mockingbird.setReturnValue(restManager.getPostRequestMO(), null);
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
CIFSShareAuthClientMO result = cIFSShareAuthClientHandlerImp.create(cIFSShareAuthClientMO);
assertNull("result", result);
}
public void testDeleteWithAggressiveMocks() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = (CIFSShareAuthClientHandlerImp) Mockingbird.getProxyObject(CIFSShareAuthClientHandlerImp.class, true);
OpenApiUtils openApiUtils = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
setPrivateField(cIFSShareAuthClientHandlerImp, "deviceID", "");
setPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler", null);
Mockingbird.enterRecordingMode();
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils);
Mockingbird.setReturnValue(false, openApiUtils, "composeRelativeUri", "(java.lang.String[])java.lang.String", "", 1);
Mockingbird.replaceObjectForRecording(HashMap.class, "<init>()", Mockingbird.getProxyObject(HashMap.class));
RestManager restManager = (RestManager) Mockingbird.getProxyObject(RestManager.class);
Mockingbird.replaceObjectForRecording(RestManager.class, "<init>(java.lang.Class,com.huawei.ism.openapi.common.commu.rest.RestRequestHandler,java.lang.String,java.util.Map,org.json.JSONObject)", restManager);
Mockingbird.setReturnValue(restManager.getDelRequestMO(), null);
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
cIFSShareAuthClientHandlerImp.delete("");
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertNull("cIFSShareAuthClientHandlerImp.restRequestHandler", getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
public void testGetBatch() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100), "testCIFSShareAuthClientHandlerImpDeviceID");
CIFSShareAuthClientQuery cond = new CIFSShareAuthClientQuery(new CIFSShareAuthClientMO(new JSONObject()), 100L, 1000L);
CIFSShareAuthClientIterator batch = cIFSShareAuthClientHandlerImp.getBatch(cond);
CIFSShareAuthClientIterator result = cIFSShareAuthClientHandlerImp.getBatch(cond);
assertSame("result", batch, result);
}
public void testGetBatch1() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100), "testCIFSShareAuthClientHandlerImpDeviceID");
CIFSShareAuthClientQuery cond = new CIFSShareAuthClientQuery(new CIFSShareAuthClientMO(new JSONObject()), 100L, 1000L);
CIFSShareAuthClientIterator result = cIFSShareAuthClientHandlerImp.getBatch(cond);
assertSame("cIFSShareAuthClientHandlerImp.authClientIterator", result, getPrivateField(cIFSShareAuthClientHandlerImp, "authClientIterator"));
assertNull("result.getConcretFilterHeaders()", result.getConcretFilterHeaders());
}
public void testModifyWithAggressiveMocks() throws Throwable {
storeStaticField(EnumDefine.LOG_TYPE_E.class, "LOG_INFO");
storeStaticField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT");
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = (CIFSShareAuthClientHandlerImp) Mockingbird.getProxyObject(CIFSShareAuthClientHandlerImp.class, true);
CIFSShareAuthClientMO cIFSShareAuthClientMO = (CIFSShareAuthClientMO) Mockingbird.getProxyObject(CIFSShareAuthClientMO.class);
MO mO = (MO) Mockingbird.getProxyObject(MO.class);
OpenApiUtils openApiUtils = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
MOType mOType = (MOType) Mockingbird.getProxyObject(MOType.class);
OpenApiUtils openApiUtils2 = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
LogUtil logUtil = (LogUtil) Mockingbird.getProxyObject(LogUtil.class);
setPrivateField(cIFSShareAuthClientHandlerImp, "deviceID", "");
setPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler", null);
setPrivateField(cIFSShareAuthClientMO, "mo", mO);
Mockingbird.enterRecordingMode();
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils);
setPrivateField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT", mOType);
setPrivateField(mOType, "value", new Integer(0));
setPrivateField(mO, "properties", null);
setPrivateField(EnumDefine.LOG_TYPE_E.class, "LOG_INFO", null);
Mockingbird.setReturnValue(cIFSShareAuthClientMO.getId(), "");
Mockingbird.setReturnValue(false, openApiUtils, "composeRelativeUri", "(java.lang.String[])java.lang.String", "", 1);
JSONObject jSONObject = (JSONObject) Mockingbird.getProxyObject(JSONObject.class);
Mockingbird.replaceObjectForRecording(JSONObject.class, "<init>()", jSONObject);
Mockingbird.setReturnValue(jSONObject.put("TYPE", 0), null);
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils2);
Mockingbird.setReturnValue(false, openApiUtils2, "composeFromMap", "(org.json.JSONObject,java.util.Map)void", null, 1);
Mockingbird.setReturnValue(LogUtil.getLogUtilInstance(), logUtil);
Mockingbird.setReturnValue(jSONObject.toString(), "");
Mockingbird.setReturnValue(false, Mockingbird.getProxyObject(StringBuilder.class), "toString", "()java.lang.String", "", 1);
Mockingbird.setReturnValue(false, logUtil, "showLog", "(com.huawei.ism.openapi.common.keydeifines.EnumDefine$LOG_TYPE_E,java.lang.String)void", null, 1);
RestManager restManager = (RestManager) Mockingbird.getProxyObject(RestManager.class);
Mockingbird.replaceObjectForRecording(RestManager.class, "<init>(java.lang.Class,com.huawei.ism.openapi.common.commu.rest.RestRequestHandler,java.lang.String,java.util.Map,org.json.JSONObject)", restManager);
Mockingbird.setReturnValue(restManager.getPutRequestMO(), null);
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
cIFSShareAuthClientHandlerImp.modify(cIFSShareAuthClientMO);
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertNull("cIFSShareAuthClientHandlerImp.restRequestHandler", getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
public void testCreateThrowsApiException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(null, "testCIFSShareAuthClientHandlerImpDeviceID");
try {
cIFSShareAuthClientHandlerImp.create(new CIFSShareAuthClientMO());
fail("Expected ApiException to be thrown");
} catch (ApiException ex) {
assertEquals("ex.getMessage()", "the restRequestHandler is null", ex.getMessage());
assertThrownBy(RestManager.class, ex);
assertEquals("ex.getErrMessage().getErrorDescription()", "the restRequestHandler is null", ex.getErrMessage().getErrorDescription());
assertNull("ex.getException()", ex.getException());
}
}
public void testCreateThrowsApiExceptionWithAggressiveMocks() throws Throwable {
storeStaticField(ConstantsDefine.ExceptionConstant.class, "JASONEXPMSG");
storeStaticField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT");
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = (CIFSShareAuthClientHandlerImp) Mockingbird.getProxyObject(CIFSShareAuthClientHandlerImp.class, true);
OpenApiUtils openApiUtils = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
MOType mOType = (MOType) Mockingbird.getProxyObject(MOType.class);
JSONException jSONException = (JSONException) Mockingbird.getProxyObject(JSONException.class);
ApiMessage apiMessage = (ApiMessage) Mockingbird.getProxyObject(ApiMessage.class);
setPrivateField(cIFSShareAuthClientHandlerImp, "deviceID", "");
Mockingbird.enterRecordingMode();
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils);
setPrivateField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT", mOType);
setPrivateField(mOType, "value", new Integer(0));
setPrivateField(ConstantsDefine.ExceptionConstant.class, "JASONEXPMSG", apiMessage);
Mockingbird.setReturnValue(false, openApiUtils, "composeRelativeUri", "(java.lang.String[])java.lang.String", "", 1);
JSONObject jSONObject = (JSONObject) Mockingbird.getProxyObject(JSONObject.class);
Mockingbird.replaceObjectForRecording(JSONObject.class, "<init>()", jSONObject);
Mockingbird.setException(jSONObject.put("TYPE", 0), jSONException);
Mockingbird.setReturnValue(jSONException.getMessage(), "");
Mockingbird.replaceObjectForRecording(ApiException.class, "<init>(com.huawei.ism.openapi.common.exception.ApiMessage,java.lang.Exception)", Mockingbird.getProxyObject(ApiException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.create(null);
fail("Expected ApiException to be thrown");
} catch (ApiException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertNull("cIFSShareAuthClientHandlerImp.restRequestHandler", getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
public void testCreateThrowsIllegalArgumentException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100), "testCIFSShareAuthClientHandlerImp\rDeviceID");
try {
cIFSShareAuthClientHandlerImp.create(null);
fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException ex) {
assertNull("ex.getMessage()", ex.getMessage());
assertThrownBy(URI.class, ex);
}
}
public void testCreateThrowsNullPointerException() throws Throwable {
RestRequestHandler restRequestHandler = new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100);
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(restRequestHandler, "testCIFSShareAuthClientHandlerImpDeviceID");
CIFSShareAuthClientMO mo = new CIFSShareAuthClientMO();
Mockingbird.enterRecordingMode();
Object postRequestMO = ((RestManager) Mockingbird.getProxyObject(RestManager.class)).getPostRequestMO();
Mockingbird.setException(true, postRequestMO, (Throwable) Mockingbird.getProxyObject(NullPointerException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.create(mo);
fail("Expected NullPointerException to be thrown");
} catch (NullPointerException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "testCIFSShareAuthClientHandlerImpDeviceID", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertSame("cIFSShareAuthClientHandlerImp.restRequestHandler", restRequestHandler, getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
public void testCreateThrowsNullPointerException1() throws Throwable {
RestRequestHandler restRequestHandler = new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100);
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(restRequestHandler, "testCIFSShareAuthClientHandlerImpDeviceID");
Mockingbird.enterRecordingMode();
Object postRequestMO = ((RestManager) Mockingbird.getProxyObject(RestManager.class)).getPostRequestMO();
Mockingbird.setException(true, postRequestMO, (Throwable) Mockingbird.getProxyObject(NullPointerException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.create(null);
fail("Expected NullPointerException to be thrown");
} catch (NullPointerException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "testCIFSShareAuthClientHandlerImpDeviceID", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertSame("cIFSShareAuthClientHandlerImp.restRequestHandler", restRequestHandler, getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
public void testDeleteThrowsApiException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(null, "testCIFSShareAuthClientHandlerImpDeviceID");
try {
cIFSShareAuthClientHandlerImp.delete("testCIFSShareAuthClientHandlerImpId");
fail("Expected ApiException to be thrown");
} catch (ApiException ex) {
assertEquals("ex.getMessage()", "the restRequestHandler is null", ex.getMessage());
assertThrownBy(RestManager.class, ex);
assertEquals("ex.getErrMessage().getErrorDescription()", "the restRequestHandler is null", ex.getErrMessage().getErrorDescription());
assertNull("ex.getException()", ex.getException());
}
}
public void testDeleteThrowsIllegalArgumentException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100), "testCIFSShareAuthClientHandlerImpDeviceID");
try {
cIFSShareAuthClientHandlerImp.delete("testCIFSShareAuthClientHandlerImp\rId");
fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException ex) {
assertNull("ex.getMessage()", ex.getMessage());
assertThrownBy(URI.class, ex);
}
}
public void testDeleteThrowsNullPointerException() throws Throwable {
RestRequestHandler restRequestHandler = new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100);
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(restRequestHandler, "testCIFSShareAuthClientHandlerImpDeviceID");
Mockingbird.enterRecordingMode();
Mockingbird.setException(true, ((RestManager) Mockingbird.getProxyObject(RestManager.class)).getDelRequestMO(), (Throwable) Mockingbird.getProxyObject(NullPointerException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.delete("testCIFSShareAuthClientHandlerImpId");
fail("Expected NullPointerException to be thrown");
} catch (NullPointerException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "testCIFSShareAuthClientHandlerImpDeviceID", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertSame("cIFSShareAuthClientHandlerImp.restRequestHandler", restRequestHandler, getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
public void testGetThrowsApiException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(null, "testCIFSShareAuthClientHandlerImpDeviceID");
try {
cIFSShareAuthClientHandlerImp.get("testCIFSShareAuthClientHandlerImpId");
fail("Expected ApiException to be thrown");
} catch (ApiException ex) {
assertEquals("ex.getMessage()", "the restRequestHandler is null", ex.getMessage());
assertThrownBy(RestManager.class, ex);
assertEquals("ex.getErrMessage().getErrorDescription()", "the restRequestHandler is null", ex.getErrMessage().getErrorDescription());
assertNull("ex.getException()", ex.getException());
}
}
public void testGetThrowsIllegalArgumentException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100), "testCIFSShareAuthClientHandlerImpDeviceID");
try {
cIFSShareAuthClientHandlerImp.get("testCIFSShareAuthClientHandlerImp\rId");
fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException ex) {
assertNull("ex.getMessage()", ex.getMessage());
assertThrownBy(URI.class, ex);
}
}
public void testGetThrowsNullPointerException() throws Throwable {
RestRequestHandler restRequestHandler = new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100);
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(restRequestHandler, "testCIFSShareAuthClientHandlerImpDeviceID");
Mockingbird.enterRecordingMode();
Mockingbird.setException(true, ((RestManager) Mockingbird.getProxyObject(RestManager.class)).getGetRequestMO(), (Throwable) Mockingbird.getProxyObject(NullPointerException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.get("testCIFSShareAuthClientHandlerImpId");
fail("Expected NullPointerException to be thrown");
} catch (NullPointerException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "testCIFSShareAuthClientHandlerImpDeviceID", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertSame("cIFSShareAuthClientHandlerImp.restRequestHandler", restRequestHandler, getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
public void testModifyThrowsApiException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(null, "testCIFSShareAuthClientHandlerImpDeviceID");
CIFSShareAuthClientMO mo = new CIFSShareAuthClientMO();
try {
cIFSShareAuthClientHandlerImp.modify(mo);
fail("Expected ApiException to be thrown");
} catch (ApiException ex) {
assertEquals("ex.getMessage()", "the restRequestHandler is null", ex.getMessage());
assertThrownBy(RestManager.class, ex);
assertEquals("ex.getErrMessage().getErrorDescription()", "the restRequestHandler is null", ex.getErrMessage().getErrorDescription());
assertNull("ex.getException()", ex.getException());
assertNull("mo.getAccessId()", mo.getAccessId());
}
}
public void testModifyThrowsApiExceptionWithAggressiveMocks() throws Throwable {
storeStaticField(ConstantsDefine.ExceptionConstant.class, "JASONEXPMSG");
storeStaticField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT");
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = (CIFSShareAuthClientHandlerImp) Mockingbird.getProxyObject(CIFSShareAuthClientHandlerImp.class, true);
CIFSShareAuthClientMO cIFSShareAuthClientMO = (CIFSShareAuthClientMO) Mockingbird.getProxyObject(CIFSShareAuthClientMO.class);
OpenApiUtils openApiUtils = (OpenApiUtils) Mockingbird.getProxyObject(OpenApiUtils.class);
MOType mOType = (MOType) Mockingbird.getProxyObject(MOType.class);
JSONException jSONException = (JSONException) Mockingbird.getProxyObject(JSONException.class);
ApiMessage apiMessage = (ApiMessage) Mockingbird.getProxyObject(ApiMessage.class);
setPrivateField(cIFSShareAuthClientHandlerImp, "deviceID", "");
Mockingbird.enterRecordingMode();
Mockingbird.setReturnValue(OpenApiUtils.getOpenApiUtilsInstance(), openApiUtils);
setPrivateField(MOType.class, "SNAS_CIFS_SHARE_AUTH_CLIENT", mOType);
setPrivateField(mOType, "value", new Integer(0));
setPrivateField(ConstantsDefine.ExceptionConstant.class, "JASONEXPMSG", apiMessage);
Mockingbird.setReturnValue(cIFSShareAuthClientMO.getId(), "");
Mockingbird.setReturnValue(false, openApiUtils, "composeRelativeUri", "(java.lang.String[])java.lang.String", "", 1);
JSONObject jSONObject = (JSONObject) Mockingbird.getProxyObject(JSONObject.class);
Mockingbird.replaceObjectForRecording(JSONObject.class, "<init>()", jSONObject);
Mockingbird.setException(jSONObject.put("TYPE", 0), jSONException);
Mockingbird.setReturnValue(jSONException.getMessage(), "");
Mockingbird.replaceObjectForRecording(ApiException.class, "<init>(com.huawei.ism.openapi.common.exception.ApiMessage,java.lang.Exception)", Mockingbird.getProxyObject(ApiException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.modify(cIFSShareAuthClientMO);
fail("Expected ApiException to be thrown");
} catch (ApiException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertNull("cIFSShareAuthClientHandlerImp.restRequestHandler", getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
public void testModifyThrowsIllegalArgumentException() throws Throwable {
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(new RestRequestHandler("testCIFSShareAuthClientHandlerImp\rIp", 100), "testCIFSShareAuthClientHandlerImpDeviceID");
try {
cIFSShareAuthClientHandlerImp.modify(null);
fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException ex) {
assertNull("ex.getMessage()", ex.getMessage());
assertThrownBy(URI.class, ex);
}
}
public void testModifyThrowsNullPointerException() throws Throwable {
RestRequestHandler restRequestHandler = new RestRequestHandler("testCIFSShareAuthClientHandlerImpIp", 100);
CIFSShareAuthClientHandlerImp cIFSShareAuthClientHandlerImp = new CIFSShareAuthClientHandlerImp(restRequestHandler, "testCIFSShareAuthClientHandlerImpDeviceID");
Mockingbird.enterRecordingMode();
Mockingbird.setException(true, ((RestManager) Mockingbird.getProxyObject(RestManager.class)).getPutRequestMO(), (Throwable) Mockingbird.getProxyObject(NullPointerException.class));
Mockingbird.enterTestMode(CIFSShareAuthClientHandlerImp.class);
try {
cIFSShareAuthClientHandlerImp.modify(null);
fail("Expected NullPointerException to be thrown");
} catch (NullPointerException ex) {
assertEquals("cIFSShareAuthClientHandlerImp.deviceID", "testCIFSShareAuthClientHandlerImpDeviceID", getPrivateField(cIFSShareAuthClientHandlerImp, "deviceID"));
assertSame("cIFSShareAuthClientHandlerImp.restRequestHandler", restRequestHandler, getPrivateField(cIFSShareAuthClientHandlerImp, "restRequestHandler"));
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.net;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Constructor;
import java.net.BindException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
import java.net.NoRouteToHostException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.net.ConnectException;
import java.nio.channels.SocketChannel;
import java.util.Map.Entry;
import java.util.regex.Pattern;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import javax.net.SocketFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.net.util.SubnetUtils;
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.util.ReflectionUtils;
import com.google.common.base.Preconditions;
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Unstable
public class NetUtils {
private static final Log LOG = LogFactory.getLog(NetUtils.class);
private static Map<String, String> hostToResolved =
new HashMap<String, String>();
/** text to point users elsewhere: {@value} */
private static final String FOR_MORE_DETAILS_SEE
= " For more details see: ";
/** text included in wrapped exceptions if the host is null: {@value} */
public static final String UNKNOWN_HOST = "(unknown)";
/** Base URL of the Hadoop Wiki: {@value} */
public static final String HADOOP_WIKI = "http://wiki.apache.org/hadoop/";
/**
* Get the socket factory for the given class according to its
* configuration parameter
* <tt>hadoop.rpc.socket.factory.class.<ClassName></tt>. When no
* such parameter exists then fall back on the default socket factory as
* configured by <tt>hadoop.rpc.socket.factory.class.default</tt>. If
* this default socket factory is not configured, then fall back on the JVM
* default socket factory.
*
* @param conf the configuration
* @param clazz the class (usually a {@link VersionedProtocol})
* @return a socket factory
*/
public static SocketFactory getSocketFactory(Configuration conf,
Class<?> clazz) {
SocketFactory factory = null;
String propValue =
conf.get("hadoop.rpc.socket.factory.class." + clazz.getSimpleName());
if ((propValue != null) && (propValue.length() > 0))
factory = getSocketFactoryFromProperty(conf, propValue);
if (factory == null)
factory = getDefaultSocketFactory(conf);
return factory;
}
/**
* Get the default socket factory as specified by the configuration
* parameter <tt>hadoop.rpc.socket.factory.default</tt>
*
* @param conf the configuration
* @return the default socket factory as specified in the configuration or
* the JVM default socket factory if the configuration does not
* contain a default socket factory property.
*/
public static SocketFactory getDefaultSocketFactory(Configuration conf) {
String propValue = conf.get(
CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT);
if ((propValue == null) || (propValue.length() == 0))
return SocketFactory.getDefault();
return getSocketFactoryFromProperty(conf, propValue);
}
/**
* Get the socket factory corresponding to the given proxy URI. If the
* given proxy URI corresponds to an absence of configuration parameter,
* returns null. If the URI is malformed raises an exception.
*
* @param propValue the property which is the class name of the
* SocketFactory to instantiate; assumed non null and non empty.
* @return a socket factory as defined in the property value.
*/
public static SocketFactory getSocketFactoryFromProperty(
Configuration conf, String propValue) {
try {
Class<?> theClass = conf.getClassByName(propValue);
return (SocketFactory) ReflectionUtils.newInstance(theClass, conf);
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Socket Factory class not found: " + cnfe);
}
}
/**
* Util method to build socket addr from either:
* <host>:<port>
* <fs>://<host>:<port>/<path>
*/
public static InetSocketAddress createSocketAddr(String target) {
return createSocketAddr(target, -1);
}
/**
* Util method to build socket addr from either:
* <host>
* <host>:<port>
* <fs>://<host>:<port>/<path>
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort) {
return createSocketAddr(target, defaultPort, null);
}
/**
* Create an InetSocketAddress from the given target string and
* default port. If the string cannot be parsed correctly, the
* <code>configName</code> parameter is used as part of the
* exception message, allowing the user to better diagnose
* the misconfiguration.
*
* @param target a string of either "host" or "host:port"
* @param defaultPort the default port if <code>target</code> does not
* include a port number
* @param configName the name of the configuration from which
* <code>target</code> was loaded. This is used in the
* exception message in the case that parsing fails.
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort,
String configName) {
String helpText = "";
if (configName != null) {
helpText = " (configuration property '" + configName + "')";
}
if (target == null) {
throw new IllegalArgumentException("Target address cannot be null." +
helpText);
}
boolean hasScheme = target.contains("://");
URI uri = null;
try {
uri = hasScheme ? URI.create(target) : URI.create("dummyscheme://"+target);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Does not contain a valid host:port authority: " + target + helpText
);
}
String host = uri.getHost();
int port = uri.getPort();
if (port == -1) {
port = defaultPort;
}
String path = uri.getPath();
if ((host == null) || (port < 0) ||
(!hasScheme && path != null && !path.isEmpty()))
{
throw new IllegalArgumentException(
"Does not contain a valid host:port authority: " + target + helpText
);
}
return createSocketAddrForHost(host, port);
}
/**
* Create a socket address with the given host and port. The hostname
* might be replaced with another host that was set via
* {@link #addStaticResolution(String, String)}. The value of
* hadoop.security.token.service.use_ip will determine whether the
* standard java host resolver is used, or if the fully qualified resolver
* is used.
* @param host the hostname or IP use to instantiate the object
* @param port the port number
* @return InetSocketAddress
*/
public static InetSocketAddress createSocketAddrForHost(String host, int port) {
String staticHost = getStaticResolution(host);
String resolveHost = (staticHost != null) ? staticHost : host;
InetSocketAddress addr;
try {
InetAddress iaddr = SecurityUtil.getByName(resolveHost);
// if there is a static entry for the host, make the returned
// address look like the original given host
if (staticHost != null) {
iaddr = InetAddress.getByAddress(host, iaddr.getAddress());
}
addr = new InetSocketAddress(iaddr, port);
} catch (UnknownHostException e) {
addr = InetSocketAddress.createUnresolved(host, port);
}
return addr;
}
/**
* Resolve the uri's hostname and add the default port if not in the uri
* @param uri to resolve
* @param defaultPort if none is given
* @return URI
*/
public static URI getCanonicalUri(URI uri, int defaultPort) {
// skip if there is no authority, ie. "file" scheme or relative uri
String host = uri.getHost();
if (host == null) {
return uri;
}
String fqHost = canonicalizeHost(host);
int port = uri.getPort();
// short out if already canonical with a port
if (host.equals(fqHost) && port != -1) {
return uri;
}
// reconstruct the uri with the canonical host and port
try {
uri = new URI(uri.getScheme(), uri.getUserInfo(),
fqHost, (port == -1) ? defaultPort : port,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
return uri;
}
// cache the canonicalized hostnames; the cache currently isn't expired,
// but the canonicals will only change if the host's resolver configuration
// changes
private static final ConcurrentHashMap<String, String> canonicalizedHostCache =
new ConcurrentHashMap<String, String>();
private static String canonicalizeHost(String host) {
// check if the host has already been canonicalized
String fqHost = canonicalizedHostCache.get(host);
if (fqHost == null) {
try {
fqHost = SecurityUtil.getByName(host).getHostName();
// slight race condition, but won't hurt
canonicalizedHostCache.put(host, fqHost);
} catch (UnknownHostException e) {
fqHost = host;
}
}
return fqHost;
}
/**
* Adds a static resolution for host. This can be used for setting up
* hostnames with names that are fake to point to a well known host. For e.g.
* in some testcases we require to have daemons with different hostnames
* running on the same machine. In order to create connections to these
* daemons, one can set up mappings from those hostnames to "localhost".
* {@link NetUtils#getStaticResolution(String)} can be used to query for
* the actual hostname.
* @param host
* @param resolvedName
*/
public static void addStaticResolution(String host, String resolvedName) {
synchronized (hostToResolved) {
hostToResolved.put(host, resolvedName);
}
}
/**
* Retrieves the resolved name for the passed host. The resolved name must
* have been set earlier using
* {@link NetUtils#addStaticResolution(String, String)}
* @param host
* @return the resolution
*/
public static String getStaticResolution(String host) {
synchronized (hostToResolved) {
return hostToResolved.get(host);
}
}
/**
* This is used to get all the resolutions that were added using
* {@link NetUtils#addStaticResolution(String, String)}. The return
* value is a List each element of which contains an array of String
* of the form String[0]=hostname, String[1]=resolved-hostname
* @return the list of resolutions
*/
public static List <String[]> getAllStaticResolutions() {
synchronized (hostToResolved) {
Set <Entry <String, String>>entries = hostToResolved.entrySet();
if (entries.size() == 0) {
return null;
}
List <String[]> l = new ArrayList<String[]>(entries.size());
for (Entry<String, String> e : entries) {
l.add(new String[] {e.getKey(), e.getValue()});
}
return l;
}
}
/**
* Returns InetSocketAddress that a client can use to
* connect to the server. Server.getListenerAddress() is not correct when
* the server binds to "0.0.0.0". This returns "hostname:port" of the server,
* or "127.0.0.1:port" when the getListenerAddress() returns "0.0.0.0:port".
*
* @param server
* @return socket address that a client can use to connect to the server.
*/
public static InetSocketAddress getConnectAddress(Server server) {
return getConnectAddress(server.getListenerAddress());
}
/**
* Returns an InetSocketAddress that a client can use to connect to the
* given listening address.
*
* @param addr of a listener
* @return socket address that a client can use to connect to the server.
*/
public static InetSocketAddress getConnectAddress(InetSocketAddress addr) {
if (!addr.isUnresolved() && addr.getAddress().isAnyLocalAddress()) {
try {
addr = new InetSocketAddress(InetAddress.getLocalHost(), addr.getPort());
} catch (UnknownHostException uhe) {
// shouldn't get here unless the host doesn't have a loopback iface
addr = createSocketAddrForHost("127.0.0.1", addr.getPort());
}
}
return addr;
}
/**
* Same as <code>getInputStream(socket, socket.getSoTimeout()).</code>
* <br><br>
*
* @see #getInputStream(Socket, long)
*/
public static SocketInputWrapper getInputStream(Socket socket)
throws IOException {
return getInputStream(socket, socket.getSoTimeout());
}
/**
* Return a {@link SocketInputWrapper} for the socket and set the given
* timeout. If the socket does not have an associated channel, then its socket
* timeout will be set to the specified value. Otherwise, a
* {@link SocketInputStream} will be created which reads with the configured
* timeout.
*
* Any socket created using socket factories returned by {@link #NetUtils},
* must use this interface instead of {@link Socket#getInputStream()}.
*
* In general, this should be called only once on each socket: see the note
* in {@link SocketInputWrapper#setTimeout(long)} for more information.
*
* @see Socket#getChannel()
*
* @param socket
* @param timeout timeout in milliseconds. zero for waiting as
* long as necessary.
* @return SocketInputWrapper for reading from the socket.
* @throws IOException
*/
public static SocketInputWrapper getInputStream(Socket socket, long timeout)
throws IOException {
InputStream stm = (socket.getChannel() == null) ?
socket.getInputStream() : new SocketInputStream(socket);
SocketInputWrapper w = new SocketInputWrapper(socket, stm);
w.setTimeout(timeout);
return w;
}
/**
* Same as getOutputStream(socket, 0). Timeout of zero implies write will
* wait until data is available.<br><br>
*
* From documentation for {@link #getOutputStream(Socket, long)} : <br>
* Returns OutputStream for the socket. If the socket has an associated
* SocketChannel then it returns a
* {@link SocketOutputStream} with the given timeout. If the socket does not
* have a channel, {@link Socket#getOutputStream()} is returned. In the later
* case, the timeout argument is ignored and the write will wait until
* data is available.<br><br>
*
* Any socket created using socket factories returned by {@link NetUtils},
* must use this interface instead of {@link Socket#getOutputStream()}.
*
* @see #getOutputStream(Socket, long)
*
* @param socket
* @return OutputStream for writing to the socket.
* @throws IOException
*/
public static OutputStream getOutputStream(Socket socket)
throws IOException {
return getOutputStream(socket, 0);
}
/**
* Returns OutputStream for the socket. If the socket has an associated
* SocketChannel then it returns a
* {@link SocketOutputStream} with the given timeout. If the socket does not
* have a channel, {@link Socket#getOutputStream()} is returned. In the later
* case, the timeout argument is ignored and the write will wait until
* data is available.<br><br>
*
* Any socket created using socket factories returned by {@link NetUtils},
* must use this interface instead of {@link Socket#getOutputStream()}.
*
* @see Socket#getChannel()
*
* @param socket
* @param timeout timeout in milliseconds. This may not always apply. zero
* for waiting as long as necessary.
* @return OutputStream for writing to the socket.
* @throws IOException
*/
public static OutputStream getOutputStream(Socket socket, long timeout)
throws IOException {
return (socket.getChannel() == null) ?
socket.getOutputStream() : new SocketOutputStream(socket, timeout);
}
/**
* This is a drop-in replacement for
* {@link Socket#connect(SocketAddress, int)}.
* In the case of normal sockets that don't have associated channels, this
* just invokes <code>socket.connect(endpoint, timeout)</code>. If
* <code>socket.getChannel()</code> returns a non-null channel,
* connect is implemented using Hadoop's selectors. This is done mainly
* to avoid Sun's connect implementation from creating thread-local
* selectors, since Hadoop does not have control on when these are closed
* and could end up taking all the available file descriptors.
*
* @see java.net.Socket#connect(java.net.SocketAddress, int)
*
* @param socket
* @param address the remote address
* @param timeout timeout in milliseconds
*/
public static void connect(Socket socket,
SocketAddress address,
int timeout) throws IOException {
connect(socket, address, null, timeout);
}
/**
* Like {@link NetUtils#connect(Socket, SocketAddress, int)} but
* also takes a local address and port to bind the socket to.
*
* @param socket
* @param endpoint the remote address
* @param localAddr the local address to bind the socket to
* @param timeout timeout in milliseconds
*/
public static void connect(Socket socket,
SocketAddress endpoint,
SocketAddress localAddr,
int timeout) throws IOException {
if (socket == null || endpoint == null || timeout < 0) {
throw new IllegalArgumentException("Illegal argument for connect()");
}
SocketChannel ch = socket.getChannel();
if (localAddr != null) {
Class localClass = localAddr.getClass();
Class remoteClass = endpoint.getClass();
Preconditions.checkArgument(localClass.equals(remoteClass),
"Local address %s must be of same family as remote address %s.",
localAddr, endpoint);
socket.bind(localAddr);
}
try {
if (ch == null) {
// let the default implementation handle it.
socket.connect(endpoint, timeout);
} else {
SocketIOWithTimeout.connect(ch, endpoint, timeout);
}
} catch (SocketTimeoutException ste) {
throw new ConnectTimeoutException(ste.getMessage());
}
// There is a very rare case allowed by the TCP specification, such that
// if we are trying to connect to an endpoint on the local machine,
// and we end up choosing an ephemeral port equal to the destination port,
// we will actually end up getting connected to ourself (ie any data we
// send just comes right back). This is only possible if the target
// daemon is down, so we'll treat it like connection refused.
if (socket.getLocalPort() == socket.getPort() &&
socket.getLocalAddress().equals(socket.getInetAddress())) {
LOG.info("Detected a loopback TCP socket, disconnecting it");
socket.close();
throw new ConnectException(
"Localhost targeted connection resulted in a loopback. " +
"No daemon is listening on the target port.");
}
}
/**
* Given a string representation of a host, return its ip address
* in textual presentation.
*
* @param name a string representation of a host:
* either a textual representation its IP address or its host name
* @return its IP address in the string format
*/
public static String normalizeHostName(String name) {
try {
return InetAddress.getByName(name).getHostAddress();
} catch (UnknownHostException e) {
return name;
}
}
/**
* Given a collection of string representation of hosts, return a list of
* corresponding IP addresses in the textual representation.
*
* @param names a collection of string representations of hosts
* @return a list of corresponding IP addresses in the string format
* @see #normalizeHostName(String)
*/
public static List<String> normalizeHostNames(Collection<String> names) {
List<String> hostNames = new ArrayList<String>(names.size());
for (String name : names) {
hostNames.add(normalizeHostName(name));
}
return hostNames;
}
/**
* Performs a sanity check on the list of hostnames/IPs to verify they at least
* appear to be valid.
* @param names - List of hostnames/IPs
* @throws UnknownHostException
*/
public static void verifyHostnames(String[] names) throws UnknownHostException {
for (String name: names) {
if (name == null) {
throw new UnknownHostException("null hostname found");
}
// The first check supports URL formats (e.g. hdfs://, etc.).
// java.net.URI requires a schema, so we add a dummy one if it doesn't
// have one already.
URI uri = null;
try {
uri = new URI(name);
if (uri.getHost() == null) {
uri = new URI("http://" + name);
}
} catch (URISyntaxException e) {
uri = null;
}
if (uri == null || uri.getHost() == null) {
throw new UnknownHostException(name + " is not a valid Inet address");
}
}
}
private static final Pattern ipPortPattern = // Pattern for matching ip[:port]
Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d+)?");
/**
* Attempt to obtain the host name of the given string which contains
* an IP address and an optional port.
*
* @param ipPort string of form ip[:port]
* @return Host name or null if the name can not be determined
*/
public static String getHostNameOfIP(String ipPort) {
if (null == ipPort || !ipPortPattern.matcher(ipPort).matches()) {
return null;
}
try {
int colonIdx = ipPort.indexOf(':');
String ip = (-1 == colonIdx) ? ipPort
: ipPort.substring(0, ipPort.indexOf(':'));
return InetAddress.getByName(ip).getHostName();
} catch (UnknownHostException e) {
return null;
}
}
/**
* Return hostname without throwing exception.
* @return hostname
*/
public static String getHostname() {
try {return "" + InetAddress.getLocalHost();}
catch(UnknownHostException uhe) {return "" + uhe;}
}
/**
* Compose a "host:port" string from the address.
*/
public static String getHostPortString(InetSocketAddress addr) {
return addr.getHostName() + ":" + addr.getPort();
}
/**
* Checks if {@code host} is a local host name and return {@link InetAddress}
* corresponding to that address.
*
* @param host the specified host
* @return a valid local {@link InetAddress} or null
* @throws SocketException if an I/O error occurs
*/
public static InetAddress getLocalInetAddress(String host)
throws SocketException {
if (host == null) {
return null;
}
InetAddress addr = null;
try {
addr = SecurityUtil.getByName(host);
if (NetworkInterface.getByInetAddress(addr) == null) {
addr = null; // Not a local address
}
} catch (UnknownHostException ignore) { }
return addr;
}
/**
* Given an InetAddress, checks to see if the address is a local address, by
* comparing the address with all the interfaces on the node.
* @param addr address to check if it is local node's address
* @return true if the address corresponds to the local node
*/
public static boolean isLocalAddress(InetAddress addr) {
// Check if the address is any local or loop back
boolean local = addr.isAnyLocalAddress() || addr.isLoopbackAddress();
// Check if the address is defined on any interface
if (!local) {
try {
local = NetworkInterface.getByInetAddress(addr) != null;
} catch (SocketException e) {
local = false;
}
}
return local;
}
/**
* Take an IOException , the local host port and remote host port details and
* return an IOException with the input exception as the cause and also
* include the host details. The new exception provides the stack trace of the
* place where the exception is thrown and some extra diagnostics information.
* If the exception is BindException or ConnectException or
* UnknownHostException or SocketTimeoutException, return a new one of the
* same type; Otherwise return an IOException.
*
* @param destHost target host (nullable)
* @param destPort target port
* @param localHost local host (nullable)
* @param localPort local port
* @param exception the caught exception.
* @return an exception to throw
*/
public static IOException wrapException(final String destHost,
final int destPort,
final String localHost,
final int localPort,
final IOException exception) {
if (exception instanceof BindException) {
return new BindException(
"Problem binding to ["
+ localHost
+ ":"
+ localPort
+ "] "
+ exception
+ ";"
+ see("BindException"));
} else if (exception instanceof ConnectException) {
// connection refused; include the host:port in the error
return wrapWithMessage(exception,
"Call From "
+ localHost
+ " to "
+ destHost
+ ":"
+ destPort
+ " failed on connection exception: "
+ exception
+ ";"
+ see("ConnectionRefused"));
} else if (exception instanceof UnknownHostException) {
return wrapWithMessage(exception,
"Invalid host name: "
+ getHostDetailsAsString(destHost, destPort, localHost)
+ exception
+ ";"
+ see("UnknownHost"));
} else if (exception instanceof SocketTimeoutException) {
return wrapWithMessage(exception,
"Call From "
+ localHost + " to " + destHost + ":" + destPort
+ " failed on socket timeout exception: " + exception
+ ";"
+ see("SocketTimeout"));
} else if (exception instanceof NoRouteToHostException) {
return wrapWithMessage(exception,
"No Route to Host from "
+ localHost + " to " + destHost + ":" + destPort
+ " failed on socket timeout exception: " + exception
+ ";"
+ see("NoRouteToHost"));
}
else {
return (IOException) new IOException("Failed on local exception: "
+ exception
+ "; Host Details : "
+ getHostDetailsAsString(destHost, destPort, localHost))
.initCause(exception);
}
}
private static String see(final String entry) {
return FOR_MORE_DETAILS_SEE + HADOOP_WIKI + entry;
}
@SuppressWarnings("unchecked")
private static <T extends IOException> T wrapWithMessage(
T exception, String msg) {
Class<? extends Throwable> clazz = exception.getClass();
try {
Constructor<? extends Throwable> ctor = clazz.getConstructor(String.class);
Throwable t = ctor.newInstance(msg);
return (T)(t.initCause(exception));
} catch (Throwable e) {
LOG.warn("Unable to wrap exception of type " +
clazz + ": it has no (String) constructor", e);
return exception;
}
}
/**
* Get the host details as a string
* @param destHost destinatioon host (nullable)
* @param destPort destination port
* @param localHost local host (nullable)
* @return a string describing the destination host:port and the local host
*/
private static String getHostDetailsAsString(final String destHost,
final int destPort,
final String localHost) {
StringBuilder hostDetails = new StringBuilder(27);
hostDetails.append("local host is: ")
.append(quoteHost(localHost))
.append("; ");
hostDetails.append("destination host is: ").append(quoteHost(destHost))
.append(":")
.append(destPort).append("; ");
return hostDetails.toString();
}
/**
* Quote a hostname if it is not null
* @param hostname the hostname; nullable
* @return a quoted hostname or {@link #UNKNOWN_HOST} if the hostname is null
*/
private static String quoteHost(final String hostname) {
return (hostname != null) ?
("\"" + hostname + "\"")
: UNKNOWN_HOST;
}
/**
* @return true if the given string is a subnet specified
* using CIDR notation, false otherwise
*/
public static boolean isValidSubnet(String subnet) {
try {
new SubnetUtils(subnet);
return true;
} catch (IllegalArgumentException iae) {
return false;
}
}
/**
* Add all addresses associated with the given nif in the
* given subnet to the given list.
*/
private static void addMatchingAddrs(NetworkInterface nif,
SubnetInfo subnetInfo, List<InetAddress> addrs) {
Enumeration<InetAddress> ifAddrs = nif.getInetAddresses();
while (ifAddrs.hasMoreElements()) {
InetAddress ifAddr = ifAddrs.nextElement();
if (subnetInfo.isInRange(ifAddr.getHostAddress())) {
addrs.add(ifAddr);
}
}
}
/**
* Return an InetAddress for each interface that matches the
* given subnet specified using CIDR notation.
*
* @param subnet subnet specified using CIDR notation
* @param returnSubinterfaces
* whether to return IPs associated with subinterfaces
* @throws IllegalArgumentException if subnet is invalid
*/
public static List<InetAddress> getIPs(String subnet,
boolean returnSubinterfaces) {
List<InetAddress> addrs = new ArrayList<InetAddress>();
SubnetInfo subnetInfo = new SubnetUtils(subnet).getInfo();
Enumeration<NetworkInterface> nifs;
try {
nifs = NetworkInterface.getNetworkInterfaces();
} catch (SocketException e) {
LOG.error("Unable to get host interfaces", e);
return addrs;
}
while (nifs.hasMoreElements()) {
NetworkInterface nif = nifs.nextElement();
// NB: adding addresses even if the nif is not up
addMatchingAddrs(nif, subnetInfo, addrs);
if (!returnSubinterfaces) {
continue;
}
Enumeration<NetworkInterface> subNifs = nif.getSubInterfaces();
while (subNifs.hasMoreElements()) {
addMatchingAddrs(subNifs.nextElement(), subnetInfo, addrs);
}
}
return addrs;
}
/**
* Return a free port number. There is no guarantee it will remain free, so
* it should be used immediately.
*
* @returns A free port for binding a local socket
*/
public static int getFreeSocketPort() {
int port = 0;
try {
ServerSocket s = new ServerSocket(0);
port = s.getLocalPort();
s.close();
return port;
} catch (IOException e) {
// Could not get a free port. Return default port 0.
}
return port;
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.ide.highlighter.HighlighterFactory;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.LogicalPosition;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider;
import com.intellij.openapi.fileEditor.impl.text.TextEditorPsiDataProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ProjectManagerListener;
import com.intellij.openapi.util.*;
import com.intellij.openapi.vfs.*;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.util.IncorrectOperationException;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
final class TestEditorManagerImpl extends FileEditorManagerEx implements Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.idea.test.TestEditorManagerImpl");
private final TestEditorSplitter myTestEditorSplitter = new TestEditorSplitter();
private final Project myProject;
private int counter = 0;
private final Map<VirtualFile, Editor> myVirtualFile2Editor = new HashMap<>();
private VirtualFile myActiveFile;
private static final LightVirtualFile LIGHT_VIRTUAL_FILE = new LightVirtualFile("Dummy.java");
public TestEditorManagerImpl(@NotNull Project project) {
myProject = project;
registerExtraEditorDataProvider(new TextEditorPsiDataProvider(), null);
project.getMessageBus().connect().subscribe(ProjectManager.TOPIC, new ProjectManagerListener() {
@Override
public void projectClosed(Project project) {
if (project == myProject) {
closeAllFiles();
}
}
});
VirtualFileManager.getInstance().addVirtualFileListener(new VirtualFileListener() {
@Override
public void beforeFileDeletion(@NotNull VirtualFileEvent event) {
for (VirtualFile file : getOpenFiles()) {
if (VfsUtilCore.isAncestor(event.getFile(), file, false)) {
closeFile(file);
}
}
}
}, myProject);
}
@Override
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> openFileWithProviders(@NotNull final VirtualFile file,
final boolean focusEditor,
boolean searchForSplitter) {
final Ref<Pair<FileEditor[], FileEditorProvider[]>> result = new Ref<>();
CommandProcessor.getInstance().executeCommand(myProject, () -> result.set(openFileImpl3(file, focusEditor)), "", null);
return result.get();
}
private Pair<FileEditor[], FileEditorProvider[]> openFileImpl3(final VirtualFile file, boolean focusEditor) {
// for non-text editors. uml, etc
final FileEditorProvider provider = file.getUserData(FileEditorProvider.KEY);
if (provider != null && provider.accept(getProject(), file)) {
return Pair.create(new FileEditor[]{provider.createEditor(getProject(), file)}, new FileEditorProvider[]{provider});
}
//text editor
Editor editor = openTextEditor(new OpenFileDescriptor(myProject, file), focusEditor);
assert editor != null;
final FileEditor fileEditor = TextEditorProvider.getInstance().getTextEditor(editor);
final FileEditorProvider fileEditorProvider = getProvider();
Pair<FileEditor[], FileEditorProvider[]> result = Pair.create(new FileEditor[]{fileEditor}, new FileEditorProvider[]{fileEditorProvider});
modifyTabWell(() -> myTestEditorSplitter.openAndFocusTab(file, fileEditor, fileEditorProvider));
return result;
}
private void modifyTabWell(Runnable tabWellModification) {
if (myProject.isDisposed()) return;
FileEditor lastFocusedEditor = myTestEditorSplitter.getFocusedFileEditor();
VirtualFile lastFocusedFile = myTestEditorSplitter.getFocusedFile();
FileEditorProvider oldProvider = myTestEditorSplitter.getProviderFromFocused();
tabWellModification.run();
FileEditor currentlyFocusedEditor = myTestEditorSplitter.getFocusedFileEditor();
VirtualFile currentlyFocusedFile = myTestEditorSplitter.getFocusedFile();
FileEditorProvider newProvider = myTestEditorSplitter.getProviderFromFocused();
final FileEditorManagerEvent event =
new FileEditorManagerEvent(this, lastFocusedFile, lastFocusedEditor, oldProvider, currentlyFocusedFile, currentlyFocusedEditor, newProvider);
final FileEditorManagerListener publisher = getProject().getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER);
notifyPublisher(() -> publisher.selectionChanged(event));
}
@NotNull
@Override
public Pair<FileEditor[], FileEditorProvider[]> openFileWithProviders(@NotNull VirtualFile file,
boolean focusEditor,
@NotNull EditorWindow window) {
return openFileWithProviders(file, focusEditor, false);
}
@Override
public boolean isInsideChange() {
return false;
}
@NotNull
@Override
public ActionCallback notifyPublisher(@NotNull Runnable runnable) {
runnable.run();
return ActionCallback.DONE;
}
@Override
public EditorsSplitters getSplittersFor(Component c) {
return null;
}
@Override
public void createSplitter(int orientation, EditorWindow window) {
String containerName = createNewTabbedContainerName();
myTestEditorSplitter.setActiveTabGroup(containerName);
}
private String createNewTabbedContainerName() {
counter++;
return "SplitTabContainer" + ((Object) counter).toString();
}
@Override
public void changeSplitterOrientation() {
}
@Override
public void flipTabs() {
}
@Override
public boolean tabsMode() {
return false;
}
@Override
public boolean isInSplitter() {
return false;
}
@Override
public boolean hasOpenedFile() {
return false;
}
@Override
public VirtualFile getCurrentFile() {
return myActiveFile;
}
@Override
public Pair<FileEditor, FileEditorProvider> getSelectedEditorWithProvider(@NotNull VirtualFile file) {
return null;
}
@Override
public boolean isChanged(@NotNull EditorComposite editor) {
return false;
}
@Override
public EditorWindow getNextWindow(@NotNull EditorWindow window) {
return null;
}
@Override
public EditorWindow getPrevWindow(@NotNull EditorWindow window) {
return null;
}
@Override
public void addTopComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
}
@Override
public void removeTopComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
}
@Override
public void addBottomComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
}
@Override
public void removeBottomComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
}
@Override
public void closeAllFiles() {
for (VirtualFile file : getOpenFiles()) {
closeFile(file);
}
}
private static FileEditorProvider getProvider() {
return new FileEditorProvider() {
@Override
public boolean accept(@NotNull Project project, @NotNull VirtualFile file) {
return false;
}
@Override
@NotNull
public FileEditor createEditor(@NotNull Project project, @NotNull VirtualFile file) {
throw new IncorrectOperationException();
}
@Override
public void disposeEditor(@NotNull FileEditor editor) {
}
@Override
@NotNull
public FileEditorState readState(@NotNull Element sourceElement, @NotNull Project project, @NotNull VirtualFile file) {
throw new IncorrectOperationException();
}
@Override
@NotNull
public String getEditorTypeId() {
return "";
}
@Override
@NotNull
public FileEditorPolicy getPolicy() {
throw new IncorrectOperationException();
}
};
}
@Override
public EditorWindow getCurrentWindow() {
return null;
}
@NotNull
@Override
public AsyncResult<EditorWindow> getActiveWindow() {
return AsyncResult.done(null);
}
@Override
public void setCurrentWindow(EditorWindow window) {
}
@Override
public VirtualFile getFile(@NotNull FileEditor editor) {
return LIGHT_VIRTUAL_FILE;
}
@Override
public void updateFilePresentation(@NotNull VirtualFile file) {
}
@Override
public void unsplitWindow() {
}
@Override
public void unsplitAllWindow() {
}
@Override
@NotNull
public EditorWindow[] getWindows() {
return new EditorWindow[0];
}
@Override
public FileEditor getSelectedEditor(@NotNull VirtualFile file) {
final Editor editor = getEditor(file);
return editor == null ? null : TextEditorProvider.getInstance().getTextEditor(editor);
}
@Override
public boolean isFileOpen(@NotNull VirtualFile file) {
return getEditor(file) != null;
}
@Override
@NotNull
public FileEditor[] getEditors(@NotNull VirtualFile file) {
FileEditor e = getSelectedEditor(file);
if (e == null) return new FileEditor[0];
return new FileEditor[] {e};
}
@NotNull
@Override
public FileEditor[] getAllEditors(@NotNull VirtualFile file) {
return getEditors(file);
}
@Override
@NotNull
public VirtualFile[] getSiblings(@NotNull VirtualFile file) {
throw new UnsupportedOperationException();
}
@Override
public void dispose() {
closeAllFiles();
}
@Override
public void closeFile(@NotNull final VirtualFile file) {
Editor editor = myVirtualFile2Editor.remove(file);
if (editor != null){
TextEditorProvider editorProvider = TextEditorProvider.getInstance();
editorProvider.disposeEditor(editorProvider.getTextEditor(editor));
EditorFactory.getInstance().releaseEditor(editor);
}
if (Comparing.equal(file, myActiveFile)) {
myActiveFile = null;
}
modifyTabWell(() -> myTestEditorSplitter.closeFile(file));
}
@Override
public void closeFile(@NotNull VirtualFile file, @NotNull EditorWindow window) {
closeFile(file);
}
@Override
@NotNull
public VirtualFile[] getSelectedFiles() {
return myActiveFile == null ? VirtualFile.EMPTY_ARRAY : new VirtualFile[]{myActiveFile};
}
@Override
@NotNull
public FileEditor[] getSelectedEditors() {
return new FileEditor[0];
}
@Override
public Editor getSelectedTextEditor() {
return myActiveFile != null ? getEditor(myActiveFile) : null;
}
@Override
public JComponent getComponent() {
return new JLabel();
}
@Override
@NotNull
public VirtualFile[] getOpenFiles() {
return VfsUtilCore.toVirtualFileArray(myVirtualFile2Editor.keySet());
}
public Editor getEditor(VirtualFile file) {
return myVirtualFile2Editor.get(file);
}
@Override
@NotNull
public FileEditor[] getAllEditors() {
FileEditor[] result = new FileEditor[myVirtualFile2Editor.size()];
int i = 0;
for (Map.Entry<VirtualFile, Editor> entry : myVirtualFile2Editor.entrySet()) {
TextEditor textEditor = TextEditorProvider.getInstance().getTextEditor(entry.getValue());
result[i++] = textEditor;
}
return result;
}
@Override
public void showEditorAnnotation(@NotNull FileEditor editor, @NotNull JComponent annotationComponent) {
}
@Override
public void removeEditorAnnotation(@NotNull FileEditor editor, @NotNull JComponent annotationComponent) {
}
@Override
public Editor openTextEditor(@NotNull OpenFileDescriptor descriptor, boolean focusEditor) {
final VirtualFile file = descriptor.getFile();
Editor editor = myVirtualFile2Editor.get(file);
if (editor == null) {
PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
LOG.assertTrue(psiFile != null, file);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(psiFile);
LOG.assertTrue(document != null, psiFile);
editor = EditorFactory.getInstance().createEditor(document, myProject);
final EditorHighlighter highlighter = HighlighterFactory.createHighlighter(myProject, file);
((EditorEx) editor).setHighlighter(highlighter);
((EditorEx) editor).setFile(file);
myVirtualFile2Editor.put(file, editor);
}
if (descriptor.getOffset() >= 0){
editor.getCaretModel().moveToOffset(descriptor.getOffset());
}
else if (descriptor.getLine() >= 0 && descriptor.getColumn() >= 0){
editor.getCaretModel().moveToLogicalPosition(new LogicalPosition(descriptor.getLine(), descriptor.getColumn()));
}
editor.getSelectionModel().removeSelection();
myActiveFile = file;
return editor;
}
@Override
public void addFileEditorManagerListener(@NotNull FileEditorManagerListener listener) {
}
@Override
public void addFileEditorManagerListener(@NotNull FileEditorManagerListener listener, @NotNull Disposable parentDisposable) {
}
@Override
public void removeFileEditorManagerListener(@NotNull FileEditorManagerListener listener) {
}
@Override
@NotNull
public List<FileEditor> openEditor(@NotNull OpenFileDescriptor descriptor, boolean focusEditor) {
FileEditor[] result = openFileWithProviders(descriptor.getFile(), focusEditor, false).getFirst();
for (FileEditor fileEditor : result) {
if (getSelectedEditor(descriptor.getFile()) == fileEditor) {
if (fileEditor instanceof NavigatableFileEditor) {
((NavigatableFileEditor)fileEditor).navigateTo(descriptor);
}
break;
}
}
return Arrays.asList(result);
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
public JComponent getPreferredFocusedComponent() {
throw new UnsupportedOperationException();
}
@Override
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> getEditorsWithProviders(@NotNull VirtualFile file) {
Pair<FileEditor, FileEditorProvider> editorAndProvider = myTestEditorSplitter.getEditorAndProvider(file);
FileEditor[] fileEditor = new FileEditor[0];
FileEditorProvider[] fileEditorProvider= new FileEditorProvider[0];
if (editorAndProvider != null) {
fileEditor = new FileEditor[] {editorAndProvider.first};
fileEditorProvider = new FileEditorProvider[]{editorAndProvider.second};
}
return Pair.create(fileEditor, fileEditorProvider);
}
@Override
public int getWindowSplitCount() {
return 0;
}
@Override
public boolean hasSplitOrUndockedWindows() {
return false;
}
@NotNull
@Override
public EditorsSplitters getSplitters() {
throw new IncorrectOperationException();
}
@NotNull
@Override
public ActionCallback getReady(@NotNull Object requestor) {
return ActionCallback.DONE;
}
@Override
public void setSelectedEditor(@NotNull VirtualFile file, @NotNull String fileEditorProviderId) {
}
}
|
|
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.controller;
import com.linkedin.pinot.common.protocols.SegmentCompletionProtocol;
import com.linkedin.pinot.common.utils.CommonConstants;
import com.linkedin.pinot.common.utils.StringUtil;
import com.linkedin.pinot.filesystem.LocalPinotFS;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ControllerConf extends PropertiesConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(ControllerConf.class);
private static final String CONTROLLER_VIP_HOST = "controller.vip.host";
private static final String CONTROLLER_VIP_PORT = "controller.vip.port";
private static final String CONTROLLER_VIP_PROTOCOL = "controller.vip.protocol";
private static final String CONTROLLER_HOST = "controller.host";
private static final String CONTROLLER_PORT = "controller.port";
private static final String DATA_DIR = "controller.data.dir";
// Potentially same as data dir if local
private static final String LOCAL_TEMP_DIR = "controller.local.temp.dir";
private static final String ZK_STR = "controller.zk.str";
private static final String UPDATE_SEGMENT_STATE_MODEL = "controller.update_segment_state_model"; // boolean: Update the statemodel on boot?
private static final String HELIX_CLUSTER_NAME = "controller.helix.cluster.name";
private static final String CLUSTER_TENANT_ISOLATION_ENABLE = "cluster.tenant.isolation.enable";
private static final String CONSOLE_WEBAPP_ROOT_PATH = "controller.query.console";
private static final String CONSOLE_WEBAPP_USE_HTTPS = "controller.query.console.useHttps";
private static final String EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT = "controller.upload.onlineToOfflineTimeout";
private static final String RETENTION_MANAGER_FREQUENCY_IN_SECONDS = "controller.retention.frequencyInSeconds";
private static final String VALIDATION_MANAGER_FREQUENCY_IN_SECONDS = "controller.validation.frequencyInSeconds";
private static final String STATUS_CHECKER_FREQUENCY_IN_SECONDS = "controller.statuschecker.frequencyInSeconds";
private static final String REALTIME_SEGMENT_RELOCATOR_FREQUENCY = "controller.realtime.segment.relocator.frequency";
private static final String STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS = "controller.statuschecker.waitForPushTimeInSeconds";
private static final String SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS = "server.request.timeoutSeconds";
private static final String SEGMENT_COMMIT_TIMEOUT_SECONDS = "controller.realtime.segment.commit.timeoutSeconds";
private static final String DELETED_SEGMENTS_RETENTION_IN_DAYS = "controller.deleted.segments.retentionInDays";
private static final String TASK_MANAGER_FREQUENCY_IN_SECONDS = "controller.task.frequencyInSeconds";
private static final String TABLE_MIN_REPLICAS = "table.minReplicas";
private static final String ENABLE_SPLIT_COMMIT = "controller.enable.split.commit";
private static final String JERSEY_ADMIN_API_PORT = "jersey.admin.api.port";
private static final String JERSEY_ADMIN_IS_PRIMARY = "jersey.admin.isprimary";
private static final String ACCESS_CONTROL_FACTORY_CLASS = "controller.admin.access.control.factory.class";
// Amount of the time the segment can take from the beginning of upload to the end of upload. Used when parallel push
// protection is enabled. If the upload does not finish within the timeout, next upload can override the previous one.
private static final String SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS = "controller.segment.upload.timeoutInMillis";
private static final String REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS = "controller.realtime.segment.metadata.commit.numLocks";
private static final String ENABLE_STORAGE_QUOTA_CHECK = "controller.enable.storage.quota.check";
// Because segment level validation is expensive and requires heavy ZK access, we run segment level validation with a
// separate interval
private static final String SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS =
"controller.segment.level.validation.intervalInSeconds";
// Defines the kind of storage and the underlying PinotFS implementation
private static final String PINOT_FS_FACTORY_CLASS_PREFIX = "controller.storage.factory.class";
private static final String PINOT_FS_FACTORY_CLASS_LOCAL = "controller.storage.factory.class.file";
private static final int DEFAULT_RETENTION_CONTROLLER_FREQUENCY_IN_SECONDS = 6 * 60 * 60; // 6 Hours.
private static final int DEFAULT_VALIDATION_CONTROLLER_FREQUENCY_IN_SECONDS = 60 * 60; // 1 Hour.
private static final int DEFAULT_STATUS_CONTROLLER_FREQUENCY_IN_SECONDS = 5 * 60; // 5 minutes
private static final String DEFAULT_REALTIME_SEGMENT_RELOCATOR_FREQUENCY = "1h"; // 1 hour
private static final int DEFAULT_STATUS_CONTROLLER_WAIT_FOR_PUSH_TIME_IN_SECONDS = 10 * 60; // 10 minutes
private static final long DEFAULT_EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT_MILLIS = 120_000L; // 2 minutes
private static final int DEFAULT_SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS = 30;
private static final int DEFAULT_DELETED_SEGMENTS_RETENTION_IN_DAYS = 7;
private static final int DEFAULT_TASK_MANAGER_FREQUENCY_IN_SECONDS = -1; // Disabled
private static final int DEFAULT_TABLE_MIN_REPLICAS = 1;
private static final boolean DEFAULT_ENABLE_SPLIT_COMMIT = false;
private static final int DEFAULT_JERSEY_ADMIN_PORT = 21000;
private static final String DEFAULT_ACCESS_CONTROL_FACTORY_CLASS =
"com.linkedin.pinot.controller.api.access.AllowAllAccessFactory";
private static final long DEFAULT_SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS = 600_000L; // 10 minutes
private static final int DEFAULT_REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS = 64;
private static final boolean DEFAULT_ENABLE_STORAGE_QUOTA_CHECK = true;
private static final int DEFAULT_SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS = 24 * 60 * 60;
private static final String DEFAULT_PINOT_FS_FACTORY_CLASS_LOCAL = LocalPinotFS.class.getName();
public ControllerConf(File file) throws ConfigurationException {
super(file);
}
public ControllerConf() {
super();
}
/**
* Returns the URI for the given path, appends the local (file) scheme to the URI if no scheme exists.
*/
public static URI getUriFromPath(String path) {
try {
URI uri = new URI(path);
if (uri.getScheme() != null) {
return uri;
} else {
return new URI(CommonConstants.Segment.LOCAL_SEGMENT_SCHEME, path, null);
}
} catch (URISyntaxException e) {
LOGGER.error("Could not construct uri from path {}", path);
throw new RuntimeException(e);
}
}
public static URI constructSegmentLocation(String baseDataDir, String tableName, String segmentName) {
try {
return new URI(StringUtil.join(File.separator, baseDataDir, tableName, URLEncoder.encode(segmentName, "UTF-8")));
} catch (UnsupportedEncodingException | URISyntaxException e) {
LOGGER.error("Could not construct segment location with baseDataDir {}, tableName {}, segmentName {}",
baseDataDir, tableName, segmentName);
throw new RuntimeException(e);
}
}
public static String constructDownloadUrl(String tableName, String segmentName, String vip) {
try {
return StringUtil.join("/", vip, "segments", tableName, URLEncoder.encode(segmentName, "UTF-8"));
} catch (UnsupportedEncodingException e) {
// Shouldn't happen
throw new AssertionError("Encountered error while encoding in UTF-8 format", e);
}
}
public void setLocalTempDir(String localTempDir) {
setProperty(LOCAL_TEMP_DIR, localTempDir);
}
public String getLocalTempDir() {
return getString(LOCAL_TEMP_DIR, null);
}
public void setPinotFSFactoryClasses(Configuration pinotFSFactoryClasses) {
setProperty(PINOT_FS_FACTORY_CLASS_LOCAL, DEFAULT_PINOT_FS_FACTORY_CLASS_LOCAL);
if (pinotFSFactoryClasses != null) {
pinotFSFactoryClasses.getKeys()
.forEachRemaining(key -> setProperty((String) key, pinotFSFactoryClasses.getProperty((String) key)));
}
}
public void setSplitCommit(boolean isSplitCommit) {
setProperty(ENABLE_SPLIT_COMMIT, isSplitCommit);
}
public void setQueryConsolePath(String path) {
setProperty(CONSOLE_WEBAPP_ROOT_PATH, path);
}
public String getQueryConsoleWebappPath() {
if (containsKey(CONSOLE_WEBAPP_ROOT_PATH)) {
return (String) getProperty(CONSOLE_WEBAPP_ROOT_PATH);
}
return ControllerConf.class.getClassLoader().getResource("webapp").toExternalForm();
}
public void setQueryConsoleUseHttps(boolean useHttps) {
setProperty(CONSOLE_WEBAPP_USE_HTTPS, useHttps);
}
public boolean getQueryConsoleUseHttps() {
return containsKey(CONSOLE_WEBAPP_USE_HTTPS) && getBoolean(CONSOLE_WEBAPP_USE_HTTPS);
}
public void setJerseyAdminPrimary(String jerseyAdminPrimary) {
setProperty(JERSEY_ADMIN_IS_PRIMARY, jerseyAdminPrimary);
}
public void setHelixClusterName(String clusterName) {
setProperty(HELIX_CLUSTER_NAME, clusterName);
}
public void setControllerHost(String host) {
setProperty(CONTROLLER_HOST, host);
}
public void setControllerVipHost(String vipHost) {
setProperty(CONTROLLER_VIP_HOST, vipHost);
}
public void setControllerVipPort(String vipPort) {
setProperty(CONTROLLER_VIP_PORT, vipPort);
}
public void setControllerVipProtocol(String vipProtocol) {
setProperty(CONTROLLER_VIP_PROTOCOL, vipProtocol);
}
public void setControllerPort(String port) {
setProperty(CONTROLLER_PORT, port);
}
public void setDataDir(String dataDir) {
setProperty(DATA_DIR, dataDir);
}
public void setRealtimeSegmentCommitTimeoutSeconds(int timeoutSec) {
setProperty(SEGMENT_COMMIT_TIMEOUT_SECONDS, Integer.toString(timeoutSec));
}
public void setUpdateSegmentStateModel(String updateStateModel) {
setProperty(UPDATE_SEGMENT_STATE_MODEL, updateStateModel);
}
public void setZkStr(String zkStr) {
setProperty(ZK_STR, zkStr);
}
// A boolean to decide whether Jersey API should be the primary one. For now, we set this to be false,
// but we turn it on to true when we are sure that jersey api has no backward compatibility problems.
public boolean isJerseyAdminPrimary() {
return getBoolean(JERSEY_ADMIN_IS_PRIMARY, true);
}
public String getHelixClusterName() {
return (String) getProperty(HELIX_CLUSTER_NAME);
}
public String getControllerHost() {
return (String) getProperty(CONTROLLER_HOST);
}
public String getControllerPort() {
return (String) getProperty(CONTROLLER_PORT);
}
public String getDataDir() {
return (String) getProperty(DATA_DIR);
}
public int getSegmentCommitTimeoutSeconds() {
if (containsKey(SEGMENT_COMMIT_TIMEOUT_SECONDS)) {
return Integer.parseInt((String) getProperty(SEGMENT_COMMIT_TIMEOUT_SECONDS));
}
return SegmentCompletionProtocol.getDefaultMaxSegmentCommitTimeSeconds();
}
public boolean isUpdateSegmentStateModel() {
if (containsKey(UPDATE_SEGMENT_STATE_MODEL)) {
return Boolean.parseBoolean(getProperty(UPDATE_SEGMENT_STATE_MODEL).toString());
}
return false; // Default is to leave the statemodel untouched.
}
public String generateVipUrl() {
return getControllerVipProtocol() + "://" + getControllerVipHost() + ":" + getControllerVipPort();
}
public String getZkStr() {
Object zkAddressObj = getProperty(ZK_STR);
// The set method converted comma separated string into ArrayList, so need to convert back to String here.
if (zkAddressObj instanceof ArrayList) {
List<String> zkAddressList = (ArrayList<String>) zkAddressObj;
String[] zkAddress = zkAddressList.toArray(new String[0]);
return StringUtil.join(",", zkAddress);
} else if (zkAddressObj instanceof String) {
return (String) zkAddressObj;
} else {
throw new RuntimeException("Unexpected data type for zkAddress PropertiesConfiguration, expecting String but got "
+ zkAddressObj.getClass().getName());
}
}
@Override
public String toString() {
return super.toString();
}
public Configuration getPinotFSFactoryClasses() {
return this.subset(PINOT_FS_FACTORY_CLASS_PREFIX);
}
public boolean getAcceptSplitCommit() {
return getBoolean(ENABLE_SPLIT_COMMIT, DEFAULT_ENABLE_SPLIT_COMMIT);
}
public String getControllerVipHost() {
if (containsKey(CONTROLLER_VIP_HOST) && ((String) getProperty(CONTROLLER_VIP_HOST)).length() > 0) {
return (String) getProperty(CONTROLLER_VIP_HOST);
}
return (String) getProperty(CONTROLLER_HOST);
}
public String getControllerVipPort() {
if (containsKey(CONTROLLER_VIP_PORT) && ((String) getProperty(CONTROLLER_VIP_PORT)).length() > 0) {
return (String) getProperty(CONTROLLER_VIP_PORT);
}
return getControllerPort();
}
public String getControllerVipProtocol() {
if (containsKey(CONTROLLER_VIP_PROTOCOL) && getProperty(CONTROLLER_VIP_PROTOCOL).equals("https")) {
return "https";
}
return "http";
}
public int getRetentionControllerFrequencyInSeconds() {
if (containsKey(RETENTION_MANAGER_FREQUENCY_IN_SECONDS)) {
return Integer.parseInt((String) getProperty(RETENTION_MANAGER_FREQUENCY_IN_SECONDS));
}
return DEFAULT_RETENTION_CONTROLLER_FREQUENCY_IN_SECONDS;
}
public void setRetentionControllerFrequencyInSeconds(int retentionFrequencyInSeconds) {
setProperty(RETENTION_MANAGER_FREQUENCY_IN_SECONDS, Integer.toString(retentionFrequencyInSeconds));
}
public int getValidationControllerFrequencyInSeconds() {
if (containsKey(VALIDATION_MANAGER_FREQUENCY_IN_SECONDS)) {
return Integer.parseInt((String) getProperty(VALIDATION_MANAGER_FREQUENCY_IN_SECONDS));
}
return DEFAULT_VALIDATION_CONTROLLER_FREQUENCY_IN_SECONDS;
}
public void setValidationControllerFrequencyInSeconds(int validationFrequencyInSeconds) {
setProperty(VALIDATION_MANAGER_FREQUENCY_IN_SECONDS, Integer.toString(validationFrequencyInSeconds));
}
public int getStatusCheckerFrequencyInSeconds() {
if (containsKey(STATUS_CHECKER_FREQUENCY_IN_SECONDS)) {
return Integer.parseInt((String) getProperty(STATUS_CHECKER_FREQUENCY_IN_SECONDS));
}
return DEFAULT_STATUS_CONTROLLER_FREQUENCY_IN_SECONDS;
}
public void setStatusCheckerFrequencyInSeconds(int statusCheckerFrequencyInSeconds) {
setProperty(STATUS_CHECKER_FREQUENCY_IN_SECONDS, Integer.toString(statusCheckerFrequencyInSeconds));
}
public String getRealtimeSegmentRelocatorFrequency() {
if (containsKey(REALTIME_SEGMENT_RELOCATOR_FREQUENCY)) {
return (String) getProperty(REALTIME_SEGMENT_RELOCATOR_FREQUENCY);
}
return DEFAULT_REALTIME_SEGMENT_RELOCATOR_FREQUENCY;
}
public void setRealtimeSegmentRelocatorFrequency(String relocatorFrequency) {
setProperty(REALTIME_SEGMENT_RELOCATOR_FREQUENCY, relocatorFrequency);
}
public int getStatusCheckerWaitForPushTimeInSeconds() {
if (containsKey(STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS)) {
return Integer.parseInt((String) getProperty(STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS));
}
return DEFAULT_STATUS_CONTROLLER_WAIT_FOR_PUSH_TIME_IN_SECONDS;
}
public void setStatusCheckerWaitForPushTimeInSeconds(int statusCheckerWaitForPushTimeInSeconds) {
setProperty(STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS, Integer.toString(statusCheckerWaitForPushTimeInSeconds));
}
public long getExternalViewOnlineToOfflineTimeout() {
if (containsKey(EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT)) {
return Integer.parseInt((String) getProperty(EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT));
}
return DEFAULT_EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT_MILLIS;
}
public void setExternalViewOnlineToOfflineTimeout(long timeout) {
setProperty(EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT, timeout);
}
public boolean tenantIsolationEnabled() {
if (containsKey(CLUSTER_TENANT_ISOLATION_ENABLE)) {
return Boolean.parseBoolean(getProperty(CLUSTER_TENANT_ISOLATION_ENABLE).toString());
}
return true;
}
public void setTenantIsolationEnabled(boolean isSingleTenant) {
setProperty(CLUSTER_TENANT_ISOLATION_ENABLE, isSingleTenant);
}
public void setServerAdminRequestTimeoutSeconds(int timeoutSeconds) {
setProperty(SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS, timeoutSeconds);
}
public int getServerAdminRequestTimeoutSeconds() {
return getInt(SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS, DEFAULT_SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS);
}
public int getDeletedSegmentsRetentionInDays() {
return getInt(DELETED_SEGMENTS_RETENTION_IN_DAYS, DEFAULT_DELETED_SEGMENTS_RETENTION_IN_DAYS);
}
public void setDeletedSegmentsRetentionInDays(int retentionInDays) {
setProperty(DELETED_SEGMENTS_RETENTION_IN_DAYS, retentionInDays);
}
public int getTaskManagerFrequencyInSeconds() {
return getInt(TASK_MANAGER_FREQUENCY_IN_SECONDS, DEFAULT_TASK_MANAGER_FREQUENCY_IN_SECONDS);
}
public void setTaskManagerFrequencyInSeconds(int frequencyInSeconds) {
setProperty(TASK_MANAGER_FREQUENCY_IN_SECONDS, Integer.toString(frequencyInSeconds));
}
public int getDefaultTableMinReplicas() {
return getInt(TABLE_MIN_REPLICAS, DEFAULT_TABLE_MIN_REPLICAS);
}
public void setTableMinReplicas(int minReplicas) {
setProperty(TABLE_MIN_REPLICAS, minReplicas);
}
public String getJerseyAdminApiPort() {
return getString(JERSEY_ADMIN_API_PORT, String.valueOf(DEFAULT_JERSEY_ADMIN_PORT));
}
public String getAccessControlFactoryClass() {
return getString(ACCESS_CONTROL_FACTORY_CLASS, DEFAULT_ACCESS_CONTROL_FACTORY_CLASS);
}
public void setAccessControlFactoryClass(String accessControlFactoryClass) {
setProperty(ACCESS_CONTROL_FACTORY_CLASS, accessControlFactoryClass);
}
public long getSegmentUploadTimeoutInMillis() {
return getLong(SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS, DEFAULT_SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS);
}
public void setSegmentUploadTimeoutInMillis(long segmentUploadTimeoutInMillis) {
setProperty(SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS, segmentUploadTimeoutInMillis);
}
public int getRealtimeSegmentMetadataCommitNumLocks() {
return getInt(REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS, DEFAULT_REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS);
}
public void setRealtimeSegmentMetadataCommitNumLocks(int realtimeSegmentMetadataCommitNumLocks) {
setProperty(REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS, realtimeSegmentMetadataCommitNumLocks);
}
public boolean getEnableStorageQuotaCheck() {
return getBoolean(ENABLE_STORAGE_QUOTA_CHECK, DEFAULT_ENABLE_STORAGE_QUOTA_CHECK);
}
public int getSegmentLevelValidationIntervalInSeconds() {
return getInt(SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS, DEFAULT_SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS);
}
}
|
|
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.schema;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import org.h2.api.ErrorCode;
import org.h2.api.TableEngine;
import org.h2.command.ddl.CreateTableData;
import org.h2.constraint.Constraint;
import org.h2.engine.Database;
import org.h2.engine.DbObject;
import org.h2.engine.DbObjectBase;
import org.h2.engine.FunctionAlias;
import org.h2.engine.Session;
import org.h2.engine.SysProperties;
import org.h2.engine.User;
import org.h2.index.Index;
import org.h2.message.DbException;
import org.h2.message.Trace;
import org.h2.mvstore.db.MVTableEngine;
import org.h2.table.RegularTable;
import org.h2.table.Table;
import org.h2.table.TableLink;
import org.h2.util.JdbcUtils;
import org.h2.util.New;
/**
* A schema as created by the SQL statement
* CREATE SCHEMA
*/
public class Schema extends DbObjectBase {
private User owner;
private final boolean system;
private final HashMap<String, Table> tablesAndViews;
private final HashMap<String, Index> indexes;
private final HashMap<String, Sequence> sequences;
private final HashMap<String, TriggerObject> triggers;
private final HashMap<String, Constraint> constraints;
private final HashMap<String, Constant> constants;
private final HashMap<String, FunctionAlias> functions;
/**
* The set of returned unique names that are not yet stored. It is used to
* avoid returning the same unique name twice when multiple threads
* concurrently create objects.
*/
private final HashSet<String> temporaryUniqueNames = New.hashSet();
/**
* Create a new schema object.
*
* @param database the database
* @param id the object id
* @param schemaName the schema name
* @param owner the owner of the schema
* @param system if this is a system schema (such a schema can not be
* dropped)
*/
public Schema(Database database, int id, String schemaName, User owner,
boolean system) {
tablesAndViews = database.newStringMap();
indexes = database.newStringMap();
sequences = database.newStringMap();
triggers = database.newStringMap();
constraints = database.newStringMap();
constants = database.newStringMap();
functions = database.newStringMap();
initDbObjectBase(database, id, schemaName, Trace.SCHEMA);
this.owner = owner;
this.system = system;
}
/**
* Check if this schema can be dropped. System schemas can not be dropped.
*
* @return true if it can be dropped
*/
public boolean canDrop() {
return !system;
}
@Override
public String getCreateSQLForCopy(Table table, String quotedName) {
throw DbException.throwInternalError();
}
@Override
public String getDropSQL() {
return null;
}
@Override
public String getCreateSQL() {
if (system) {
return null;
}
return "CREATE SCHEMA IF NOT EXISTS " +
getSQL() + " AUTHORIZATION " + owner.getSQL();
}
@Override
public int getType() {
return DbObject.SCHEMA;
}
@Override
public void removeChildrenAndResources(Session session) {
while (triggers != null && triggers.size() > 0) {
TriggerObject obj = (TriggerObject) triggers.values().toArray()[0];
database.removeSchemaObject(session, obj);
}
while (constraints != null && constraints.size() > 0) {
Constraint obj = (Constraint) constraints.values().toArray()[0];
database.removeSchemaObject(session, obj);
}
// There can be dependencies between tables e.g. using computed columns,
// so we might need to loop over them multiple times.
boolean runLoopAgain = false;
do {
runLoopAgain = false;
if (tablesAndViews != null) {
// Loop over a copy because the map is modified underneath us.
for (Table obj : New.arrayList(tablesAndViews.values())) {
// Check for null because multiple tables might be deleted
// in one go underneath us.
if (obj.getName() != null) {
if (database.getDependentTable(obj, obj) == null) {
database.removeSchemaObject(session, obj);
} else {
runLoopAgain = true;
}
}
}
}
} while (runLoopAgain);
while (indexes != null && indexes.size() > 0) {
Index obj = (Index) indexes.values().toArray()[0];
database.removeSchemaObject(session, obj);
}
while (sequences != null && sequences.size() > 0) {
Sequence obj = (Sequence) sequences.values().toArray()[0];
database.removeSchemaObject(session, obj);
}
while (constants != null && constants.size() > 0) {
Constant obj = (Constant) constants.values().toArray()[0];
database.removeSchemaObject(session, obj);
}
while (functions != null && functions.size() > 0) {
FunctionAlias obj = (FunctionAlias) functions.values().toArray()[0];
database.removeSchemaObject(session, obj);
}
database.removeMeta(session, getId());
owner = null;
invalidate();
}
@Override
public void checkRename() {
// ok
}
/**
* Get the owner of this schema.
*
* @return the owner
*/
public User getOwner() {
return owner;
}
@SuppressWarnings("unchecked")
private HashMap<String, SchemaObject> getMap(int type) {
HashMap<String, ? extends SchemaObject> result;
switch (type) {
case DbObject.TABLE_OR_VIEW:
result = tablesAndViews;
break;
case DbObject.SEQUENCE:
result = sequences;
break;
case DbObject.INDEX:
result = indexes;
break;
case DbObject.TRIGGER:
result = triggers;
break;
case DbObject.CONSTRAINT:
result = constraints;
break;
case DbObject.CONSTANT:
result = constants;
break;
case DbObject.FUNCTION_ALIAS:
result = functions;
break;
default:
throw DbException.throwInternalError("type=" + type);
}
return (HashMap<String, SchemaObject>) result;
}
/**
* Add an object to this schema.
* This method must not be called within CreateSchemaObject;
* use Database.addSchemaObject() instead
*
* @param obj the object to add
*/
public void add(SchemaObject obj) {
if (SysProperties.CHECK && obj.getSchema() != this) {
DbException.throwInternalError("wrong schema");
}
String name = obj.getName();
HashMap<String, SchemaObject> map = getMap(obj.getType());
if (SysProperties.CHECK && map.get(name) != null) {
DbException.throwInternalError("object already exists: " + name);
}
map.put(name, obj);
freeUniqueName(name);
}
/**
* Rename an object.
*
* @param obj the object to rename
* @param newName the new name
*/
public void rename(SchemaObject obj, String newName) {
int type = obj.getType();
HashMap<String, SchemaObject> map = getMap(type);
if (SysProperties.CHECK) {
if (!map.containsKey(obj.getName())) {
DbException.throwInternalError("not found: " + obj.getName());
}
if (obj.getName().equals(newName) || map.containsKey(newName)) {
DbException.throwInternalError("object already exists: " + newName);
}
}
obj.checkRename();
map.remove(obj.getName());
freeUniqueName(obj.getName());
obj.rename(newName);
map.put(newName, obj);
freeUniqueName(newName);
}
/**
* Try to find a table or view with this name. This method returns null if
* no object with this name exists. Local temporary tables are also
* returned.
*
* @param session the session
* @param name the object name
* @return the object or null
*/
public Table findTableOrView(Session session, String name) {
Table table = tablesAndViews.get(name);
if (table == null && session != null) {
table = session.findLocalTempTable(name);
}
return table;
}
/**
* Try to find an index with this name. This method returns null if
* no object with this name exists.
*
* @param session the session
* @param name the object name
* @return the object or null
*/
public Index findIndex(Session session, String name) {
Index index = indexes.get(name);
if (index == null) {
index = session.findLocalTempTableIndex(name);
}
return index;
}
/**
* Try to find a trigger with this name. This method returns null if
* no object with this name exists.
*
* @param name the object name
* @return the object or null
*/
public TriggerObject findTrigger(String name) {
return triggers.get(name);
}
/**
* Try to find a sequence with this name. This method returns null if
* no object with this name exists.
*
* @param sequenceName the object name
* @return the object or null
*/
public Sequence findSequence(String sequenceName) {
return sequences.get(sequenceName);
}
/**
* Try to find a constraint with this name. This method returns null if no
* object with this name exists.
*
* @param session the session
* @param name the object name
* @return the object or null
*/
public Constraint findConstraint(Session session, String name) {
Constraint constraint = constraints.get(name);
if (constraint == null) {
constraint = session.findLocalTempTableConstraint(name);
}
return constraint;
}
/**
* Try to find a user defined constant with this name. This method returns
* null if no object with this name exists.
*
* @param constantName the object name
* @return the object or null
*/
public Constant findConstant(String constantName) {
return constants.get(constantName);
}
/**
* Try to find a user defined function with this name. This method returns
* null if no object with this name exists.
*
* @param functionAlias the object name
* @return the object or null
*/
public FunctionAlias findFunction(String functionAlias) {
return functions.get(functionAlias);
}
/**
* Release a unique object name.
*
* @param name the object name
*/
public void freeUniqueName(String name) {
if (name != null) {
synchronized (temporaryUniqueNames) {
temporaryUniqueNames.remove(name);
}
}
}
private String getUniqueName(DbObject obj,
HashMap<String, ? extends SchemaObject> map, String prefix) {
String hash = Integer.toHexString(obj.getName().hashCode()).toUpperCase();
String name = null;
synchronized (temporaryUniqueNames) {
for (int i = 1, len = hash.length(); i < len; i++) {
name = prefix + hash.substring(0, i);
if (!map.containsKey(name) && !temporaryUniqueNames.contains(name)) {
break;
}
name = null;
}
if (name == null) {
prefix = prefix + hash + "_";
for (int i = 0;; i++) {
name = prefix + i;
if (!map.containsKey(name) && !temporaryUniqueNames.contains(name)) {
break;
}
}
}
temporaryUniqueNames.add(name);
}
return name;
}
/**
* Create a unique constraint name.
*
* @param session the session
* @param table the constraint table
* @return the unique name
*/
public String getUniqueConstraintName(Session session, Table table) {
HashMap<String, Constraint> tableConstraints;
if (table.isTemporary() && !table.isGlobalTemporary()) {
tableConstraints = session.getLocalTempTableConstraints();
} else {
tableConstraints = constraints;
}
return getUniqueName(table, tableConstraints, "CONSTRAINT_");
}
/**
* Create a unique index name.
*
* @param session the session
* @param table the indexed table
* @param prefix the index name prefix
* @return the unique name
*/
public String getUniqueIndexName(Session session, Table table, String prefix) {
HashMap<String, Index> tableIndexes;
if (table.isTemporary() && !table.isGlobalTemporary()) {
tableIndexes = session.getLocalTempTableIndexes();
} else {
tableIndexes = indexes;
}
return getUniqueName(table, tableIndexes, prefix);
}
/**
* Get the table or view with the given name.
* Local temporary tables are also returned.
*
* @param session the session
* @param name the table or view name
* @return the table or view
* @throws DbException if no such object exists
*/
public Table getTableOrView(Session session, String name) {
Table table = tablesAndViews.get(name);
if (table == null) {
if (session != null) {
table = session.findLocalTempTable(name);
}
if (table == null) {
throw DbException.get(ErrorCode.TABLE_OR_VIEW_NOT_FOUND_1, name);
}
}
return table;
}
/**
* Get the index with the given name.
*
* @param name the index name
* @return the index
* @throws DbException if no such object exists
*/
public Index getIndex(String name) {
Index index = indexes.get(name);
if (index == null) {
throw DbException.get(ErrorCode.INDEX_NOT_FOUND_1, name);
}
return index;
}
/**
* Get the constraint with the given name.
*
* @param name the constraint name
* @return the constraint
* @throws DbException if no such object exists
*/
public Constraint getConstraint(String name) {
Constraint constraint = constraints.get(name);
if (constraint == null) {
throw DbException.get(ErrorCode.CONSTRAINT_NOT_FOUND_1, name);
}
return constraint;
}
/**
* Get the user defined constant with the given name.
*
* @param constantName the constant name
* @return the constant
* @throws DbException if no such object exists
*/
public Constant getConstant(String constantName) {
Constant constant = constants.get(constantName);
if (constant == null) {
throw DbException.get(ErrorCode.CONSTANT_NOT_FOUND_1, constantName);
}
return constant;
}
/**
* Get the sequence with the given name.
*
* @param sequenceName the sequence name
* @return the sequence
* @throws DbException if no such object exists
*/
public Sequence getSequence(String sequenceName) {
Sequence sequence = sequences.get(sequenceName);
if (sequence == null) {
throw DbException.get(ErrorCode.SEQUENCE_NOT_FOUND_1, sequenceName);
}
return sequence;
}
/**
* Get all objects.
*
* @return a (possible empty) list of all objects
*/
public ArrayList<SchemaObject> getAll() {
ArrayList<SchemaObject> all = New.arrayList();
all.addAll(getMap(DbObject.TABLE_OR_VIEW).values());
all.addAll(getMap(DbObject.SEQUENCE).values());
all.addAll(getMap(DbObject.INDEX).values());
all.addAll(getMap(DbObject.TRIGGER).values());
all.addAll(getMap(DbObject.CONSTRAINT).values());
all.addAll(getMap(DbObject.CONSTANT).values());
all.addAll(getMap(DbObject.FUNCTION_ALIAS).values());
return all;
}
/**
* Get all objects of the given type.
*
* @param type the object type
* @return a (possible empty) list of all objects
*/
public ArrayList<SchemaObject> getAll(int type) {
HashMap<String, SchemaObject> map = getMap(type);
return New.arrayList(map.values());
}
/**
* Get all tables and views.
*
* @return a (possible empty) list of all objects
*/
public ArrayList<Table> getAllTablesAndViews() {
synchronized (database) {
return New.arrayList(tablesAndViews.values());
}
}
/**
* Remove an object from this schema.
*
* @param obj the object to remove
*/
public void remove(SchemaObject obj) {
String objName = obj.getName();
HashMap<String, SchemaObject> map = getMap(obj.getType());
if (SysProperties.CHECK && !map.containsKey(objName)) {
DbException.throwInternalError("not found: " + objName);
}
map.remove(objName);
freeUniqueName(objName);
}
/**
* Add a table to the schema.
*
* @param data the create table information
* @return the created {@link Table} object
*/
public Table createTable(CreateTableData data) {
synchronized (database) {
if (!data.temporary || data.globalTemporary) {
database.lockMeta(data.session);
}
data.schema = this;
if (data.tableEngine == null) {
if (database.getSettings().mvStore) {
data.tableEngine = MVTableEngine.class.getName();
}
}
if (data.tableEngine != null) {
TableEngine engine;
try {
engine = (TableEngine) JdbcUtils.loadUserClass(data.tableEngine).newInstance();
} catch (Exception e) {
throw DbException.convert(e);
}
return engine.createTable(data);
}
return new RegularTable(data);
}
}
/**
* Add a linked table to the schema.
*
* @param id the object id
* @param tableName the table name of the alias
* @param driver the driver class name
* @param url the database URL
* @param user the user name
* @param password the password
* @param originalSchema the schema name of the target table
* @param originalTable the table name of the target table
* @param emitUpdates if updates should be emitted instead of delete/insert
* @param force create the object even if the database can not be accessed
* @return the {@link TableLink} object
*/
public TableLink createTableLink(int id, String tableName, String driver,
String url, String user, String password, String originalSchema,
String originalTable, boolean emitUpdates, boolean force) {
synchronized (database) {
return new TableLink(this, id, tableName,
driver, url, user, password,
originalSchema, originalTable, emitUpdates, force);
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.ColumnHandle;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.Partition;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.SerializableNativeValue;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.split.SplitManager;
import com.facebook.presto.sql.planner.DeterminismEvaluator;
import com.facebook.presto.sql.planner.LiteralInterpreter;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanRewriter;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Converts cardinality-insensitive aggregations (max, min, "distinct") over partition keys
* into simple metadata queries
*/
public class MetadataQueryOptimizer
extends PlanOptimizer
{
private static final Set<String> ALLOWED_FUNCTIONS = ImmutableSet.of("max", "min", "approx_distinct");
private final Metadata metadata;
private final SplitManager splitManager;
public MetadataQueryOptimizer(Metadata metadata, SplitManager splitManager)
{
checkNotNull(metadata, "metadata is null");
checkNotNull(splitManager, "splitManager is null");
this.metadata = metadata;
this.splitManager = splitManager;
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
return PlanRewriter.rewriteWith(new Optimizer(metadata, splitManager, idAllocator), plan, null);
}
private static class Optimizer
extends PlanRewriter<Void>
{
private final PlanNodeIdAllocator idAllocator;
private final Metadata metadata;
private final SplitManager splitManager;
private Optimizer(Metadata metadata, SplitManager splitManager, PlanNodeIdAllocator idAllocator)
{
this.metadata = metadata;
this.splitManager = splitManager;
this.idAllocator = idAllocator;
}
@Override
public PlanNode visitAggregation(AggregationNode node, RewriteContext<Void> context)
{
// supported functions are only MIN/MAX/APPROX_DISTINCT or distinct aggregates
for (FunctionCall call : node.getAggregations().values()) {
if (!ALLOWED_FUNCTIONS.contains(call.getName().toString()) && !call.isDistinct()) {
return context.defaultRewrite(node);
}
}
Optional<TableScanNode> result = findTableScan(node.getSource());
if (!result.isPresent()) {
return context.defaultRewrite(node);
}
// verify all outputs of table scan are partition keys
TableScanNode tableScan = result.get();
ImmutableMap.Builder<Symbol, Type> typesBuilder = ImmutableMap.builder();
ImmutableMap.Builder<Symbol, ColumnHandle> columnBuilder = ImmutableMap.builder();
List<Symbol> inputs = tableScan.getOutputSymbols();
for (Symbol symbol : inputs) {
ColumnHandle column = tableScan.getAssignments().get(symbol);
ColumnMetadata columnMetadata = metadata.getColumnMetadata(tableScan.getTable(), column);
if (!columnMetadata.isPartitionKey()) {
// the optimization is only valid if the aggregation node only
// relies on partition keys
return context.defaultRewrite(node);
}
typesBuilder.put(symbol, columnMetadata.getType());
columnBuilder.put(symbol, column);
}
Map<Symbol, ColumnHandle> columns = columnBuilder.build();
Map<Symbol, Type> types = typesBuilder.build();
// Materialize the list of partitions and replace the TableScan node
// with a Values node
List<Partition> partitions;
if (tableScan.getGeneratedPartitions().isPresent()) {
partitions = tableScan.getGeneratedPartitions().get().getPartitions();
}
else {
partitions = splitManager.getPartitions(result.get().getTable(), Optional.of(tableScan.getPartitionsDomainSummary()))
.getPartitions();
}
ImmutableList.Builder<List<Expression>> rowsBuilder = ImmutableList.builder();
for (Partition partition : partitions) {
Map<ColumnHandle, SerializableNativeValue> entries = partition.getTupleDomain().extractNullableFixedValues();
ImmutableList.Builder<Expression> rowBuilder = ImmutableList.builder();
// for each input column, add a literal expression using the entry value
for (Symbol input : inputs) {
ColumnHandle column = columns.get(input);
Type type = types.get(input);
SerializableNativeValue value = entries.get(column);
if (value == null) {
// partition key does not have a single value, so bail out to be safe
return context.defaultRewrite(node);
}
else {
rowBuilder.add(LiteralInterpreter.toExpression(value.getValue(), type));
}
}
rowsBuilder.add(rowBuilder.build());
}
// replace the tablescan node with a values node
ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), inputs, rowsBuilder.build());
return PlanRewriter.rewriteWith(new Replacer(valuesNode), node);
}
private Optional<TableScanNode> findTableScan(PlanNode source)
{
while (true) {
// allow any chain of linear transformations
if (source instanceof MarkDistinctNode ||
source instanceof FilterNode ||
source instanceof LimitNode ||
source instanceof TopNNode ||
source instanceof SortNode) {
source = source.getSources().get(0);
}
else if (source instanceof ProjectNode) {
// verify projections are deterministic
ProjectNode project = (ProjectNode) source;
if (!Iterables.all(project.getExpressions(), DeterminismEvaluator::isDeterministic)) {
return Optional.empty();
}
source = project.getSource();
}
else if (source instanceof TableScanNode) {
return Optional.of((TableScanNode) source);
}
else {
return Optional.empty();
}
}
}
}
private static class Replacer
extends PlanRewriter<Void>
{
private final ValuesNode replacement;
private Replacer(ValuesNode replacement)
{
this.replacement = replacement;
}
@Override
public PlanNode visitTableScan(TableScanNode node, RewriteContext<Void> context)
{
return replacement;
}
}
}
|
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview.robolectric;
import android.graphics.Rect;
import android.support.test.filters.SmallTest;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
import org.chromium.android_webview.AwScrollOffsetManager;
import org.chromium.base.test.util.Feature;
import org.chromium.testing.local.LocalRobolectricTestRunner;
/**
* Integration tests for ScrollOffsetManager.
*/
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class AwScrollOffsetManagerTest {
private static class TestScrollOffsetManagerDelegate implements AwScrollOffsetManager.Delegate {
private int mOverScrollDeltaX;
private int mOverScrollDeltaY;
private int mOverScrollCallCount;
private int mScrollX;
private int mScrollY;
private int mNativeScrollX;
private int mNativeScrollY;
private int mInvalidateCount;
public int getOverScrollDeltaX() {
return mOverScrollDeltaX;
}
public int getOverScrollDeltaY() {
return mOverScrollDeltaY;
}
public int getOverScrollCallCount() {
return mOverScrollCallCount;
}
public int getScrollX() {
return mScrollX;
}
public int getScrollY() {
return mScrollY;
}
public int getNativeScrollX() {
return mNativeScrollX;
}
public int getNativeScrollY() {
return mNativeScrollY;
}
public int getInvalidateCount() {
return mInvalidateCount;
}
@Override
public void overScrollContainerViewBy(int deltaX, int deltaY, int scrollX, int scrollY,
int scrollRangeX, int scrollRangeY, boolean isTouchEvent) {
mOverScrollDeltaX = deltaX;
mOverScrollDeltaY = deltaY;
mOverScrollCallCount += 1;
}
@Override
public void scrollContainerViewTo(int x, int y) {
mScrollX = x;
mScrollY = y;
}
@Override
public void scrollNativeTo(int x, int y) {
mNativeScrollX = x;
mNativeScrollY = y;
}
@Override
public int getContainerViewScrollX() {
return mScrollX;
}
@Override
public int getContainerViewScrollY() {
return mScrollY;
}
@Override
public void invalidate() {
mInvalidateCount += 1;
}
@Override
public void cancelFling() {}
@Override
public void smoothScroll(int targetX, int targetY, long durationMs) {}
}
private void simulateScrolling(AwScrollOffsetManager offsetManager,
TestScrollOffsetManagerDelegate delegate, int scrollX, int scrollY) {
// Scrolling is a two-phase action. First we ask the manager to scroll
int callCount = delegate.getOverScrollCallCount();
offsetManager.scrollContainerViewTo(scrollX, scrollY);
// The manager then asks the delegate to overscroll the view.
Assert.assertEquals(callCount + 1, delegate.getOverScrollCallCount());
Assert.assertEquals(scrollX, delegate.getOverScrollDeltaX() + delegate.getScrollX());
Assert.assertEquals(scrollY, delegate.getOverScrollDeltaY() + delegate.getScrollY());
// As a response to that the menager expects the view to call back with the new scroll.
offsetManager.onContainerViewOverScrolled(scrollX, scrollY, false, false);
}
private void simlateOverScrollPropagation(
AwScrollOffsetManager offsetManager, TestScrollOffsetManagerDelegate delegate) {
Assert.assertTrue(delegate.getOverScrollCallCount() > 0);
offsetManager.onContainerViewOverScrolled(
delegate.getOverScrollDeltaX() + delegate.getScrollX(),
delegate.getOverScrollDeltaY() + delegate.getScrollY(), false, false);
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testWhenContentSizeMatchesView() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int width = 132;
final int height = 212;
final int scrollX = 11;
final int scrollY = 13;
offsetManager.setMaxScrollOffset(0, 0);
offsetManager.setContainerViewSize(width, height);
Assert.assertEquals(width, offsetManager.computeHorizontalScrollRange());
Assert.assertEquals(height, offsetManager.computeVerticalScrollRange());
// Since the view size and contents size are equal no scrolling should be possible.
Assert.assertEquals(0, offsetManager.computeMaximumHorizontalScrollOffset());
Assert.assertEquals(0, offsetManager.computeMaximumVerticalScrollOffset());
// Scrolling should generate overscroll but not update the scroll offset.
simulateScrolling(offsetManager, delegate, scrollX, scrollY);
Assert.assertEquals(scrollX, delegate.getOverScrollDeltaX());
Assert.assertEquals(scrollY, delegate.getOverScrollDeltaY());
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollY());
Assert.assertEquals(0, delegate.getNativeScrollX());
Assert.assertEquals(0, delegate.getNativeScrollY());
// Scrolling to 0,0 should result in no deltas.
simulateScrolling(offsetManager, delegate, 0, 0);
Assert.assertEquals(0, delegate.getOverScrollDeltaX());
Assert.assertEquals(0, delegate.getOverScrollDeltaY());
// Negative scrolling should result in negative deltas but no scroll offset update.
simulateScrolling(offsetManager, delegate, -scrollX, -scrollY);
Assert.assertEquals(-scrollX, delegate.getOverScrollDeltaX());
Assert.assertEquals(-scrollY, delegate.getOverScrollDeltaY());
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollY());
Assert.assertEquals(0, delegate.getNativeScrollX());
Assert.assertEquals(0, delegate.getNativeScrollY());
}
private static final int VIEW_WIDTH = 211;
private static final int VIEW_HEIGHT = 312;
private static final int MAX_HORIZONTAL_OFFSET = 757;
private static final int MAX_VERTICAL_OFFSET = 127;
private static final int CONTENT_WIDTH = VIEW_WIDTH + MAX_HORIZONTAL_OFFSET;
private static final int CONTENT_HEIGHT = VIEW_HEIGHT + MAX_VERTICAL_OFFSET;
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testScrollRangeAndMaxOffset() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
Assert.assertEquals(CONTENT_WIDTH, offsetManager.computeHorizontalScrollRange());
Assert.assertEquals(CONTENT_HEIGHT, offsetManager.computeVerticalScrollRange());
Assert.assertEquals(
MAX_HORIZONTAL_OFFSET, offsetManager.computeMaximumHorizontalScrollOffset());
Assert.assertEquals(
MAX_VERTICAL_OFFSET, offsetManager.computeMaximumVerticalScrollOffset());
// Scrolling beyond the maximum should be clamped.
final int scrollX = MAX_HORIZONTAL_OFFSET + 10;
final int scrollY = MAX_VERTICAL_OFFSET + 11;
simulateScrolling(offsetManager, delegate, scrollX, scrollY);
Assert.assertEquals(scrollX, delegate.getOverScrollDeltaX());
Assert.assertEquals(scrollY, delegate.getOverScrollDeltaY());
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getScrollX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getScrollY());
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getNativeScrollX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getNativeScrollY());
// Scrolling to negative coordinates should be clamped back to 0,0.
simulateScrolling(offsetManager, delegate, -scrollX, -scrollY);
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollY());
Assert.assertEquals(0, delegate.getNativeScrollX());
Assert.assertEquals(0, delegate.getNativeScrollY());
// The onScrollChanged method is callable by third party code and should also be clamped
offsetManager.onContainerViewScrollChanged(scrollX, scrollY);
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getNativeScrollX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getNativeScrollY());
offsetManager.onContainerViewScrollChanged(-scrollX, -scrollY);
Assert.assertEquals(0, delegate.getNativeScrollX());
Assert.assertEquals(0, delegate.getNativeScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testDelegateCanOverrideScroll() {
final int overrideScrollX = 10;
final int overrideScrollY = 10;
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate() {
@Override
public int getContainerViewScrollX() {
return overrideScrollX;
}
@Override
public int getContainerViewScrollY() {
return overrideScrollY;
}
};
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
offsetManager.onContainerViewOverScrolled(0, 0, false, false);
Assert.assertEquals(overrideScrollX, delegate.getNativeScrollX());
Assert.assertEquals(overrideScrollY, delegate.getNativeScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testDelegateOverridenScrollsDontExceedBounds() {
final int overrideScrollX = MAX_HORIZONTAL_OFFSET + 10;
final int overrideScrollY = MAX_VERTICAL_OFFSET + 20;
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate() {
@Override
public int getContainerViewScrollX() {
return overrideScrollX;
}
@Override
public int getContainerViewScrollY() {
return overrideScrollY;
}
};
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
offsetManager.onContainerViewOverScrolled(0, 0, false, false);
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getNativeScrollX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getNativeScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testScrollContainerViewTo() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int scrollX = 31;
final int scrollY = 41;
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
Assert.assertEquals(0, delegate.getOverScrollDeltaX());
Assert.assertEquals(0, delegate.getOverScrollDeltaY());
int callCount = delegate.getOverScrollCallCount();
offsetManager.scrollContainerViewTo(scrollX, scrollY);
Assert.assertEquals(callCount + 1, delegate.getOverScrollCallCount());
Assert.assertEquals(scrollX, delegate.getOverScrollDeltaX());
Assert.assertEquals(scrollY, delegate.getOverScrollDeltaY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testOnContainerViewOverScrolled() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int scrollX = 31;
final int scrollY = 41;
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollY());
Assert.assertEquals(0, delegate.getNativeScrollX());
Assert.assertEquals(0, delegate.getNativeScrollY());
offsetManager.onContainerViewOverScrolled(scrollX, scrollY, false, false);
Assert.assertEquals(scrollX, delegate.getScrollX());
Assert.assertEquals(scrollY, delegate.getScrollY());
Assert.assertEquals(scrollX, delegate.getNativeScrollX());
Assert.assertEquals(scrollY, delegate.getNativeScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testDefersScrollUntilTouchEnd() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int scrollX = 31;
final int scrollY = 41;
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
offsetManager.setProcessingTouchEvent(true);
offsetManager.onContainerViewOverScrolled(scrollX, scrollY, false, false);
Assert.assertEquals(scrollX, delegate.getScrollX());
Assert.assertEquals(scrollY, delegate.getScrollY());
Assert.assertEquals(0, delegate.getNativeScrollX());
Assert.assertEquals(0, delegate.getNativeScrollY());
offsetManager.setProcessingTouchEvent(false);
Assert.assertEquals(scrollX, delegate.getScrollX());
Assert.assertEquals(scrollY, delegate.getScrollY());
Assert.assertEquals(scrollX, delegate.getNativeScrollX());
Assert.assertEquals(scrollY, delegate.getNativeScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testRequestChildRectangleOnScreenDontScrollIfAlreadyThere() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
offsetManager.requestChildRectangleOnScreen(
0, 0, new Rect(0, 0, VIEW_WIDTH / 4, VIEW_HEIGHT / 4), true);
Assert.assertEquals(0, delegate.getOverScrollDeltaX());
Assert.assertEquals(0, delegate.getOverScrollDeltaY());
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollY());
offsetManager.requestChildRectangleOnScreen(3 * VIEW_WIDTH / 4, 3 * VIEW_HEIGHT / 4,
new Rect(0, 0, VIEW_WIDTH / 4, VIEW_HEIGHT / 4), true);
Assert.assertEquals(0, delegate.getOverScrollDeltaX());
Assert.assertEquals(0, delegate.getOverScrollDeltaY());
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testRequestChildRectangleOnScreenScrollToBottom() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int rectWidth = 2;
final int rectHeight = 3;
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
offsetManager.requestChildRectangleOnScreen(CONTENT_WIDTH - rectWidth,
CONTENT_HEIGHT - rectHeight, new Rect(0, 0, rectWidth, rectHeight), true);
simlateOverScrollPropagation(offsetManager, delegate);
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getOverScrollDeltaX());
Assert.assertEquals(
CONTENT_HEIGHT - rectHeight - VIEW_HEIGHT / 3, delegate.getOverScrollDeltaY());
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getScrollX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testRequestChildRectangleOnScreenScrollToBottomLargeRect() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int rectWidth = VIEW_WIDTH;
final int rectHeight = VIEW_HEIGHT;
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
offsetManager.requestChildRectangleOnScreen(CONTENT_WIDTH - rectWidth,
CONTENT_HEIGHT - rectHeight, new Rect(0, 0, rectWidth, rectHeight), true);
simlateOverScrollPropagation(offsetManager, delegate);
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getOverScrollDeltaX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getOverScrollDeltaY());
Assert.assertEquals(MAX_HORIZONTAL_OFFSET, delegate.getScrollX());
Assert.assertEquals(MAX_VERTICAL_OFFSET, delegate.getScrollY());
}
@Test
@SmallTest
@Feature({"AndroidWebView"})
public void testRequestChildRectangleOnScreenScrollToTop() {
TestScrollOffsetManagerDelegate delegate = new TestScrollOffsetManagerDelegate();
AwScrollOffsetManager offsetManager = new AwScrollOffsetManager(delegate);
final int rectWidth = 2;
final int rectHeight = 3;
offsetManager.setMaxScrollOffset(MAX_HORIZONTAL_OFFSET, MAX_VERTICAL_OFFSET);
offsetManager.setContainerViewSize(VIEW_WIDTH, VIEW_HEIGHT);
simulateScrolling(
offsetManager, delegate, CONTENT_WIDTH - VIEW_WIDTH, CONTENT_HEIGHT - VIEW_HEIGHT);
offsetManager.requestChildRectangleOnScreen(
0, 0, new Rect(0, 0, rectWidth, rectHeight), true);
simlateOverScrollPropagation(offsetManager, delegate);
Assert.assertEquals(-CONTENT_WIDTH + VIEW_WIDTH, delegate.getOverScrollDeltaX());
Assert.assertEquals(-CONTENT_HEIGHT + VIEW_HEIGHT, delegate.getOverScrollDeltaY());
Assert.assertEquals(0, delegate.getScrollX());
Assert.assertEquals(0, delegate.getScrollX());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.linq4j.tree;
import org.apache.calcite.linq4j.function.Function1;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
/**
* Entry point for optimizers that factor ou deterministic expressions to
* final static fields.
* Instances of this class should not be reused, so new visitor should be
* created for optimizing a new expression tree.
*/
public class ClassDeclarationFinder extends Shuttle {
protected final ClassDeclarationFinder parent;
/**
* The list of new final static fields to be added to the current class.
*/
protected final List<MemberDeclaration> addedDeclarations =
new ArrayList<MemberDeclaration>();
private final Function1<ClassDeclarationFinder, ClassDeclarationFinder> childFactory;
private static final Function1<ClassDeclarationFinder,
ClassDeclarationFinder> DEFAULT_CHILD_FACTORY =
DeterministicCodeOptimizer::new;
/**
* Creates visitor that uses default optimizer.
*
* @return optimizing visitor
*/
public static ClassDeclarationFinder create() {
return create(DEFAULT_CHILD_FACTORY);
}
/**
* Creates visitor that uses given class as optimizer.
* The implementation should support ({@code ClassDeclarationFinder})
* constructor.
*
* @param optimizingClass class that implements optimizations
* @return optimizing visitor
*/
public static ClassDeclarationFinder create(
final Class<? extends ClassDeclarationFinder> optimizingClass) {
return create(newChildCreator(optimizingClass));
}
/**
* Creates visitor that uses given factory to create optimizers.
*
* @param childFactory factory that creates optimizers
* @return optimizing visitor
*/
public static ClassDeclarationFinder create(
Function1<ClassDeclarationFinder, ClassDeclarationFinder> childFactory) {
return new ClassDeclarationFinder(childFactory);
}
/**
* Creates factory that creates instances of optimizing visitors.
* The implementation should support ({@code ClassDeclarationFinder})
* constructor.
*
* @param optimizingClass class that implements optimizations
* @return factory that creates instances of given classes
*/
private static Function1<ClassDeclarationFinder, ClassDeclarationFinder> newChildCreator(
Class<? extends ClassDeclarationFinder> optimizingClass) {
try {
final Constructor<? extends ClassDeclarationFinder> constructor =
optimizingClass.getConstructor(ClassDeclarationFinder.class);
return a0 -> {
try {
return constructor.newInstance(a0);
} catch (InstantiationException e) {
throw new IllegalStateException(
"Unable to create optimizer via " + constructor, e);
} catch (IllegalAccessException e) {
throw new IllegalStateException(
"Unable to create optimizer via " + constructor, e);
} catch (InvocationTargetException e) {
throw new IllegalStateException(
"Unable to create optimizer via " + constructor, e);
}
};
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Given class " + optimizingClass
+ "does not support (ClassDeclarationFinder) constructor", e);
}
}
/**
* Creates optimizer with no parent.
*/
private ClassDeclarationFinder(
Function1<ClassDeclarationFinder, ClassDeclarationFinder> childFactory) {
this.parent = null;
this.childFactory = childFactory;
}
/**
* Creates a child optimizer.
* Typically a child is created for each class declaration,
* so each optimizer collects fields for exactly one class.
*
* @param parent parent optimizer
*/
protected ClassDeclarationFinder(ClassDeclarationFinder parent) {
this.parent = parent;
this.childFactory = parent.childFactory;
}
/**
* Creates optimizer local to the newly generated anonymous class.
*
* @param newExpression expression to optimize
* @return nested visitor if anonymous class is given
*/
@Override public Shuttle preVisit(NewExpression newExpression) {
if (newExpression.memberDeclarations == null) {
return this;
}
ClassDeclarationFinder visitor = goDeeper();
visitor.learnFinalStaticDeclarations(newExpression.memberDeclarations);
return visitor;
}
/**
* Creates optimizer local to the newly generated class.
*
* @param classDeclaration expression to optimize
* @return nested visitor
*/
@Override public Shuttle preVisit(ClassDeclaration classDeclaration) {
ClassDeclarationFinder visitor = goDeeper();
visitor.learnFinalStaticDeclarations(classDeclaration.memberDeclarations);
return visitor;
}
@Override public Expression visit(NewExpression newExpression,
List<Expression> arguments, List<MemberDeclaration> memberDeclarations) {
if (parent == null) {
// Unable to optimize since no wrapper class exists to put fields to.
arguments = newExpression.arguments;
} else if (memberDeclarations != null) {
// Arguments to new Test(1+2) { ... } should be optimized via parent
// optimizer.
arguments = Expressions.acceptExpressions(newExpression.arguments,
parent);
}
Expression result =
super.visit(newExpression, arguments, memberDeclarations);
if (memberDeclarations == null) {
return tryOptimizeNewInstance((NewExpression) result);
}
memberDeclarations = optimizeDeclarations(memberDeclarations);
return super.visit((NewExpression) result, arguments,
memberDeclarations);
}
/**
* Processes the list of declarations when class expression detected.
* Sub-classes might figure out the existing fields for reuse.
*
* @param memberDeclarations list of declarations to process.
*/
protected void learnFinalStaticDeclarations(
List<MemberDeclaration> memberDeclarations) {
}
/**
* Optimizes {@code new Type()} constructs.
*
* @param newExpression expression to optimize
* @return always returns un-optimized expression
*/
protected Expression tryOptimizeNewInstance(NewExpression newExpression) {
return newExpression;
}
@Override public ClassDeclaration visit(ClassDeclaration classDeclaration,
List<MemberDeclaration> memberDeclarations) {
memberDeclarations = optimizeDeclarations(memberDeclarations);
return super.visit(classDeclaration, memberDeclarations);
}
/**
* Adds new declarations (e.g. final static fields) to the list of existing
* ones.
*
* @param memberDeclarations existing list of declarations
* @return new list of declarations or the same if no modifications required
*/
protected List<MemberDeclaration> optimizeDeclarations(
List<MemberDeclaration> memberDeclarations) {
if (addedDeclarations.isEmpty()) {
return memberDeclarations;
}
List<MemberDeclaration> newDecls =
new ArrayList<>(memberDeclarations.size()
+ addedDeclarations.size());
newDecls.addAll(memberDeclarations);
newDecls.addAll(addedDeclarations);
return newDecls;
}
/**
* Verifies if the expression is effectively constant.
* This method should be overridden in sub-classes.
*
* @param expression expression to test
* @return always returns false
*/
protected boolean isConstant(Expression expression) {
return false;
}
/**
* Verifies if all the expressions in given list are effectively constant.
*
* @param list list of expressions to test
* @return true when all the expressions are known to be constant
*/
protected boolean isConstant(Iterable<? extends Expression> list) {
for (Expression expression : list) {
if (!isConstant(expression)) {
return false;
}
}
return true;
}
/**
* Finds if there exists ready for reuse declaration for given expression.
* This method should be overridden in sub-classes.
*
* @param expression input expression
* @return always returns null
*/
protected ParameterExpression findDeclaredExpression(Expression expression) {
return null;
}
/**
* Verifies if the variable name is already in use.
* This method should be overridden in sub-classes.
*
* @param name name of the variable to test
* @return always returns false
*/
protected boolean hasField(String name) {
return false;
}
/**
* Creates child visitor. It is used to traverse nested class declarations.
*
* @return new {@code Visitor} that is used to optimize class declarations
*/
protected ClassDeclarationFinder goDeeper() {
return childFactory.apply(this);
}
}
// End ClassDeclarationFinder.java
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.scalar;
import com.facebook.presto.operator.Description;
import com.google.common.base.Ascii;
import com.google.common.base.Charsets;
import com.google.common.primitives.Ints;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import javax.annotation.Nullable;
import static com.google.common.base.Preconditions.checkArgument;
public final class StringFunctions
{
private StringFunctions() {}
@Description("convert ASCII character code to string")
@ScalarFunction
public static Slice chr(long n)
{
Slice slice = Slices.allocate(1);
slice.setByte(0, Ints.saturatedCast(n));
return slice;
}
@Description("concatenates given strings")
@ScalarFunction
public static Slice concat(Slice str1, Slice str2)
{
Slice concat = Slices.allocate(str1.length() + str2.length());
concat.setBytes(0, str1);
concat.setBytes(str1.length(), str2);
return concat;
}
@Description("length of the given string")
@ScalarFunction
public static long length(Slice slice)
{
return slice.length();
}
@Description("greedily removes occurrences of a pattern in a string")
@ScalarFunction
public static Slice replace(Slice str, Slice search)
{
return replace(str, search, Slices.EMPTY_SLICE);
}
@Description("greedily replaces occurrences of a pattern with a string")
@ScalarFunction
public static Slice replace(Slice str, Slice search, Slice replace)
{
String replaced = str.toString(Charsets.UTF_8).replace(
search.toString(Charsets.UTF_8),
replace.toString(Charsets.UTF_8));
return Slices.copiedBuffer(replaced, Charsets.UTF_8);
}
@Description("reverses the given string")
@ScalarFunction
public static Slice reverse(Slice slice)
{
Slice reverse = Slices.allocate(slice.length());
for (int i = 0, j = slice.length() - 1; i < slice.length(); i++, j--) {
reverse.setByte(j, slice.getByte(i));
}
return reverse;
}
@Description("returns index of first occurrence of a substring (or 0 if not found)")
@ScalarFunction("strpos")
public static long stringPosition(Slice string, Slice substring)
{
if (substring.length() > string.length()) {
return 0;
}
for (int i = 0; i <= (string.length() - substring.length()); i++) {
if (string.equals(i, substring.length(), substring, 0, substring.length())) {
return i + 1;
}
}
return 0;
}
@Description("suffix starting at given index")
@ScalarFunction
public static Slice substr(Slice slice, long start)
{
return substr(slice, start, slice.length());
}
@Description("substring of given length starting at an index")
@ScalarFunction
public static Slice substr(Slice slice, long start, long length)
{
if ((start == 0) || (length <= 0)) {
return Slices.EMPTY_SLICE;
}
if (start > 0) {
// make start zero-based
start--;
}
else {
// negative start is relative to end of string
start += slice.length();
if (start < 0) {
return Slices.EMPTY_SLICE;
}
}
if ((start + length) > slice.length()) {
length = slice.length() - start;
}
if (start >= slice.length()) {
return Slices.EMPTY_SLICE;
}
return slice.slice((int) start, (int) length);
}
// TODO: Implement a more efficient string search
@Nullable
@Description("splits a string by a delimiter and returns the specified field (counting from one)")
@ScalarFunction
public static Slice splitPart(Slice string, Slice delimiter, long index)
{
checkArgument(index > 0, "Index must be greater than zero");
if (delimiter.length() == 0) {
if (index > string.length()) {
// index is too big, null is returned
return null;
}
return string.slice((int) (index - 1), 1);
}
int previousIndex = 0;
int matchCount = 0;
for (int i = 0; i <= (string.length() - delimiter.length()); i++) {
if (string.equals(i, delimiter.length(), delimiter, 0, delimiter.length())) {
matchCount++;
if (matchCount == index) {
return string.slice(previousIndex, i - previousIndex);
}
// noinspection AssignmentToForLoopParameter
i += (delimiter.length() - 1);
previousIndex = i + 1;
}
}
if (matchCount == index - 1) {
// returns last section of the split
return string.slice(previousIndex, string.length() - previousIndex);
}
// index is too big, null is returned
return null;
}
@Description("removes spaces from the beginning of a string")
@ScalarFunction("ltrim")
public static Slice leftTrim(Slice slice)
{
int start = firstNonSpace(slice);
return slice.slice(start, slice.length() - start);
}
@Description("removes spaces from the end of a string")
@ScalarFunction("rtrim")
public static Slice rightTrim(Slice slice)
{
int end = lastNonSpace(slice);
return slice.slice(0, end + 1);
}
@Description("removes spaces from the beginning and end of a string")
@ScalarFunction
public static Slice trim(Slice slice)
{
int start = firstNonSpace(slice);
if (start == slice.length()) {
return Slices.EMPTY_SLICE;
}
int end = lastNonSpace(slice);
assert (end >= 0) && (end >= start);
return slice.slice(start, (end - start) + 1);
}
private static int firstNonSpace(Slice slice)
{
for (int i = 0; i < slice.length(); i++) {
if (slice.getByte(i) != ' ') {
return i;
}
}
return slice.length();
}
private static int lastNonSpace(Slice slice)
{
for (int i = slice.length() - 1; i >= 0; i--) {
if (slice.getByte(i) != ' ') {
return i;
}
}
return -1;
}
@Description("converts the alphabets in a string to lower case")
@ScalarFunction
public static Slice lower(Slice slice)
{
Slice upper = Slices.allocate(slice.length());
for (int i = 0; i < slice.length(); i++) {
upper.setByte(i, Ascii.toLowerCase((char) slice.getByte(i)));
}
return upper;
}
@Description("converts all the alphabets in the string to upper case")
@ScalarFunction
public static Slice upper(Slice slice)
{
Slice upper = Slices.allocate(slice.length());
for (int i = 0; i < slice.length(); i++) {
upper.setByte(i, Ascii.toUpperCase((char) slice.getByte(i)));
}
return upper;
}
}
|
|
/*
* Copyright (c) 2010, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of California, Berkeley
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package radlab.rain.util;
import java.util.Collections;
import java.util.LinkedList;
public class PoissonSamplingStrategy implements ISamplingStrategy
{
public static long getNthPercentile( int pct, LinkedList<Long> samples )
{
if( samples.size() == 0 )
return 0;
Collections.sort( samples );
int index = (int) Math.round( (double) ( pct*( samples.size()+1 ) )/100.0 );
if( index < samples.size() )
return samples.get( index ).longValue();
else return samples.get( samples.size() - 1 ); // Return the second last sample
}
private LinkedList<Long> _samples = new LinkedList<Long>();
private int _nextSampleToAccept = 1;
private int _currentSample = 0;
private double _meanSamplingInterval = 1.0;
private NegativeExponential _expRandom = null;
private long _sampleSum = 0;
public PoissonSamplingStrategy(double meanSamplingInterval)
{
this._meanSamplingInterval = meanSamplingInterval;
this._expRandom = new NegativeExponential(this._meanSamplingInterval);
this.reset();
}
public double getMeanSamplingInterval()
{
return this._meanSamplingInterval;
}
public void setMeanSamplingInterval( double val )
{
this._meanSamplingInterval = val;
}
public void reset()
{
this._currentSample = 0;
this._nextSampleToAccept = 1;
this._samples.clear();
this._sampleSum = 0;
}
public int getSamplesCollected()
{
return this._samples.size();
}
public int getSamplesSeen()
{
return this._currentSample;
}
public long getNthPercentile( int pct )
{
return PoissonSamplingStrategy.getNthPercentile( pct, this._samples );
}
public double getSampleMean()
{
long samples = this.getSamplesCollected();
if( samples == 0 )
return 0.0;
else return (double) this._sampleSum / (double) samples;
}
public double getSampleStandardDeviation()
{
long samples = this.getSamplesCollected();
if( samples == 0 || samples == 1 )
return 0.0;
double sampleMean = this.getSampleMean();
// Sum the deviations from the mean for all items
double deviationSqSum = 0.0;
for( Long value : this._samples )
{
// Print out value so we can debug the sd computation
//System.out.println( value );
deviationSqSum += Math.pow( (double)(value - sampleMean), 2 );
}
// Divide deviationSqSum by N-1 then return the square root
return Math.sqrt( deviationSqSum/(double)(samples - 1) );
}
public double getTvalue( double populationMean )
{
long samples = this.getSamplesCollected();
if( samples == 0 || samples == 1 )
return 0.0;
return ( this.getSampleMean() - populationMean ) / ( this.getSampleStandardDeviation()/Math.sqrt( this.getSamplesCollected() ) );
}
public boolean accept(long value)
{
this._currentSample++;
if (this._currentSample == this._nextSampleToAccept)
{
this._sampleSum += value;
this._samples.add(value);
// Update the nextSampleToAccept
double randExp = this._expRandom.nextDouble();
//System.out.println( "Random exp: " + randExp );
this._nextSampleToAccept = this._currentSample + (int) Math.ceil( randExp ) ;
//System.out.println("Next sample to accept: " + this._nextSampleToAccept);
return true;
}
return false;
}
public LinkedList<Long> getRawSamples() { return this._samples; };
/*
public static void main(String[] args)
{
// Generate 11,000,000 numbers - try uniform random and expRandom numbers
// Sort them and compute the 90th and 99th percentiles for the ground truth
// Pass each random number generated to a sampler and let it keep the ones it chooses and compute the percentiles on the captured samples
// Compare the ground truth 90th and 99th percentiles with those computed by the sampler
int maxNumbers = 1000000; //100000;//1000000;
double meanSamplingInterval = 1000.0;
LinkedList<Long> allSamplesUniform = new LinkedList<Long>();
LinkedList<Long> allSamplesExp = new LinkedList<Long>();
// Sampling strategies to compare
PoissonSamplingStrategy expSamplerAllUniform = new PoissonSamplingStrategy( meanSamplingInterval );
PoissonSamplingStrategy expSamplerExp = new PoissonSamplingStrategy( meanSamplingInterval );
// Random number generators
double populationSampleMean = 1000.0;
Random random = new Random();
NegativeExponential expRandom = new NegativeExponential( populationSampleMean );
long totalUniform = 0;
long maxUniform = Long.MIN_VALUE;
long minUniform = Long.MAX_VALUE;
long totalExp = 0;
long maxExp = Long.MIN_VALUE;
long minExp = Long.MAX_VALUE;
// Generate numbers (according to a uniform and exponential distribution) with same mean
for( int i = 0; i < maxNumbers; i++ )
{
long valUniform = Math.round( random.nextDouble()*(2*populationSampleMean) );
totalUniform += valUniform;
if( valUniform > maxUniform )
maxUniform = valUniform;
if( valUniform < minUniform )
minUniform = valUniform;
// Sample uniformly distributed numbers
// expSamplerAllUniform.accept( valUniform );
allSamplesUniform.add( valUniform );
long valExp = (long) Math.ceil( expRandom.nextDouble() );
totalExp += valExp;
if( valExp > maxExp )
maxExp = valExp;
if( valExp < minExp )
minExp = valExp;
// Sample exp distributed numbers
// expSamplerExp.accept( valExp );
allSamplesExp.add( valExp );
}
double meanUniform = (double)totalUniform/(double)maxNumbers;
double meanExp = (double)totalExp/(double)maxNumbers;
// Compute percentiles for normally distributed numbers and exponentially distributed numbers
long uniform90th = PoissonSamplingStrategy.getNthPercentile( 90, allSamplesUniform );
long uniform99th = PoissonSamplingStrategy.getNthPercentile( 90, allSamplesUniform );
long exp90th = PoissonSamplingStrategy.getNthPercentile( 90, allSamplesExp );
long exp99th = PoissonSamplingStrategy.getNthPercentile( 99, allSamplesExp );
System.out.println( "Max uniform : " + maxUniform );
System.out.println( "Mean uniform : " + meanUniform );
System.out.println( "Min uniform : " + minUniform );
System.out.println( "90th uniform gt : " + uniform90th );
System.out.println( "90th uniform smp : " + expSamplerAllUniform.getNthPercentile( 90 ) );
System.out.println( "99th uniform gt : " + uniform99th );
System.out.println( "99th uniform smp : " + expSamplerAllUniform.getNthPercentile( 99 ) );
System.out.println( "# samples seen : " + expSamplerAllUniform.getSamplesSeen() );
System.out.println( "# samples saved : " + expSamplerAllUniform.getSamplesCollected() );
System.out.println( "" );
System.out.println( "Max exp : " + maxExp );
System.out.println( "Mean exp : " + meanExp );
System.out.println( "Min exp : " + minExp );
System.out.println( "90th exp gt : " + exp90th );
System.out.println( "90th uniform smp : " + expSamplerExp.getNthPercentile( 90 ) );
System.out.println( "99th exp gt : " + exp99th );
System.out.println( "99th uniform smp : " + expSamplerExp.getNthPercentile( 99 ) );
System.out.println( "# samples seen : " + expSamplerExp.getSamplesSeen() );
System.out.println( "# samples saved : " + expSamplerExp.getSamplesCollected() );
// Use bootstrapping to quantify the variance in our samples
int numTrials = 1000;
long[] arrU90 = new long[numTrials];
long[] arrU99 = new long[numTrials];
long[] arrE90 = new long[numTrials];
long[] arrE99 = new long[numTrials];
long[] arrE90Usample = new long[numTrials];
long[] arrE99Usample = new long[numTrials];
LinkedList<Long> randomSamples = new LinkedList<Long>();
System.out.println( "Starting bootstrapping..." );
for( int i = 0; i < numTrials; i++ )
{
System.out.println( "Trial: " + i );
// Create samplers
randomSamples.clear();
expSamplerAllUniform.reset();
expSamplerExp.reset();
// Go through the datasets allSamplesUniform and allSamplesExp and select a sample,
for( int j = 0; j < maxNumbers; j++ )
{
if( j%100000 == 0 )
System.out.println( "Number: " + j );
expSamplerAllUniform.accept( allSamplesUniform.get(j) );
expSamplerExp.accept( allSamplesExp.get( j ) );
double randomVal = random.nextDouble();
if( randomVal <= 0.001 )
randomSamples.add( allSamplesExp.get( j ) );
}
System.out.println( "Computing percentiles for trial: " + i );
arrU90[i] = expSamplerAllUniform.getNthPercentile( 90 );
arrU99[i] = expSamplerAllUniform.getNthPercentile( 99 );
arrE90[i] = expSamplerExp.getNthPercentile( 90 );
arrE99[i] = expSamplerExp.getNthPercentile( 99 );
arrE90Usample[i] = PoissonSamplingStrategy.getNthPercentile( 90, randomSamples );
arrE99Usample[i] = PoissonSamplingStrategy.getNthPercentile( 99, randomSamples );
System.out.println( "Poisson sampler uniform data : " + expSamplerAllUniform.getSamplesCollected() );
System.out.println( "Poisson sampler exp data : " + expSamplerExp.getSamplesCollected() );
System.out.println( "Uniform sampler exp data : " + randomSamples.size() );
System.out.println( arrU90[i] + " " + arrU99[i] + " " + arrE90[i] + " " + arrE99[i] + " " + arrE90Usample[i] + " " + arrE99Usample[i] );
}
System.out.println( "Final results..." );
System.out.println( "Poisson sampler uniform data : " + expSamplerAllUniform.getSamplesCollected() );
System.out.println( "Poisson sampler exp data : " + expSamplerExp.getSamplesCollected() );
System.out.println( "Uniform sampler exp data : " + randomSamples.size() );
for( int i = 0; i < numTrials; i++ )
{
System.out.println( arrU90[i] + " " + arrU99[i] + " " + arrE90[i] + " " + arrE99[i] + " " + arrE90Usample[i] + " " + arrE99Usample[i] );
}
}*/
}
|
|
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dot.junit.opcodes.invoke_super_range;
import dot.junit.DxTestCase;
import dot.junit.DxUtil;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_1;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_10;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_14;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_15;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_17;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_18;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_19;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_2;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_20;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_24;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_4;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_5;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_6;
import dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_7;
public class Test_invoke_super_range extends DxTestCase {
/**
* @title invoke method of superclass
*/
public void testN1() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_1
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
T_invoke_super_range_1 t = new T_invoke_super_range_1();
assertEquals(5, t.run());
}
/**
* @title Invoke protected method of superclass
*/
public void testN3() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_7
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
T_invoke_super_range_7 t = new T_invoke_super_range_7();
assertEquals(5, t.run());
}
/**
* @title Check that new frame is created by invoke_super_range and
* arguments are passed to method
*/
public void testN5() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_14
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
T_invoke_super_range_14 t = new T_invoke_super_range_14();
assertTrue(t.run());
}
/**
* @title Recursion of method lookup procedure
*/
public void testN6() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_17
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper2
T_invoke_super_range_17 t = new T_invoke_super_range_17();
assertEquals(5, t.run());
}
/**
* @title obj ref is null
*/
public void testE1() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_1
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
T_invoke_super_range_2 t = new T_invoke_super_range_2();
try {
t.run();
fail("expected NullPointerException");
} catch (NullPointerException npe) {
// expected
}
}
/**
* @title Native method can't be linked
*/
public void testE2() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_4
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
T_invoke_super_range_4 t = new T_invoke_super_range_4();
try {
t.run();
fail("expected UnsatisfiedLinkError");
} catch (UnsatisfiedLinkError ule) {
// expected
}
}
/**
* @title Attempt to invoke abstract method
*/
public void testE4() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_6
//@uses dot.junit.opcodes.invoke_super_range.ATest
T_invoke_super_range_6 t = new T_invoke_super_range_6();
try {
t.run();
fail("expected AbstractMethodError");
} catch (AbstractMethodError iae) {
// expected
}
}
/**
* @constraint A14
* @title invalid constant pool index
*/
public void testVFE1() {
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_8");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint A15
* @title <clinit> may not be called using invoke-super
*/
public void testVFE3() {
try {
new T_invoke_super_range_10().run();
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint B1
* @title number of arguments passed to method
*/
public void testVFE4() {
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_11");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint B9
* @title types of arguments passed to method.
*/
public void testVFE5() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_12
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_12");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint A15
* @title <init> may not be called using invoke_super_range
*/
public void testVFE6() {
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_16");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint B10
* @title assignment incompatible references when accessing
* protected method
*/
public void testVFE8() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_22
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
//@uses dot.junit.opcodes.invoke_super_range.d.TPlain
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_22");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint B10
* @title assignment incompatible references when accessing
* public method
*/
public void testVFE9() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_23
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper2
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_23");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint n/a
* @title Attempt to call static method.
*/
public void testVFE10() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_5
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
try {
new T_invoke_super_range_5().run();
fail("expected IncompatibleClassChangeError");
} catch (IncompatibleClassChangeError t) {
}
}
/**
* @constraint n/a
* @title Attempt to invoke non-existing method.
*/
public void testVFE12() {
try {
new T_invoke_super_range_15().run();
fail("expected NoSuchMethodError");
} catch (NoSuchMethodError t) {
}
}
/**
* @constraint n/a
* @title Attempt to invoke private method of other class.
*/
public void testVFE13() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_18
//@uses dot.junit.opcodes.invoke_super_range.TestStubs
try {
new T_invoke_super_range_18().run(new TestStubs());
fail("expected IllegalAccessError");
} catch (IllegalAccessError t) {
}
}
/**
* @constraint B12
* @title Attempt to invoke protected method of unrelated class.
*/
public void testVFE14() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_20
//@uses dot.junit.opcodes.invoke_super_range.TestStubs
try {
new T_invoke_super_range_20().run(new TestStubs());
fail("expected IllegalAccessError");
} catch (IllegalAccessError t) {
}
}
/**
* @constraint n/a
* @title Method has different signature.
*/
public void testVFE15() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_19
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
try {
new T_invoke_super_range_19().run();
fail("expected NoSuchMethodError");
} catch (NoSuchMethodError t) {
}
}
/**
* @constraint n/a
* @title invoke-super/range shall be used to invoke private methods
*/
public void testVFE16() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_13
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_13");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint A23
* @title number of registers
*/
public void testVFE17() {
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_9");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint A14
* @title attempt to invoke interface method
*/
public void testVFE18() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_24
try {
new T_invoke_super_range_24().run();
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
/**
* @constraint B6
* @title instance methods may only be invoked on already initialized instances.
*/
public void testVFE19() {
//@uses dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_25
//@uses dot.junit.opcodes.invoke_super_range.d.TSuper
try {
Class.forName("dot.junit.opcodes.invoke_super_range.d.T_invoke_super_range_25");
fail("expected a verification exception");
} catch (Throwable t) {
DxUtil.checkVerifyException(t);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.reggie;
import com.sun.jini.proxy.MarshalledWrapper;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.rmi.MarshalException;
import java.rmi.UnmarshalException;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
import net.jini.loader.ClassLoading;
/**
* An EntryClass is a descriptor for an entry class, packaged up for
* transmission between client-side proxies and the registrar server.
* Instances are never visible to clients, they are private to the
* communication between the proxies and the server. Note that we don't
* transmit information about interfaces implemented by the class, because it
* isn't necessary given the specific use of type information for entries.
* <p>
* This class only has a bare minimum of methods, to minimize
* the amount of code downloaded into clients.
* <p>
*
* @author Sun Microsystems, Inc.
*
* @see ClassMapper
*/
class EntryClass implements Serializable {
private static final long serialVersionUID = 2L;
/**
* Class name
*
* @serial
*/
protected String name;
/**
* Hash for the type
*
* @serial
*/
protected long hash;
/**
* Descriptor for the superclass
*
* @serial
*/
protected EntryClass superclass;
/**
* Number of public fields
*
* @serial
*/
protected int numFields;
/** Number of instances of this class in service registrations */
protected transient int numInstances;
/** Number of templates of this class in event registrations */
protected transient int numTemplates;
/**
* An instance containing only name and hash, no superclass info.
* This is only used on the registrar side, to minimize the amount
* of info transmitted back to clients.
*/
protected transient EntryClass replacement;
/**
* Flag set to true if this instance was unmarshalled from an
* integrity-protected stream, or false otherwise
*/
private transient boolean integrity = false;
/** Should only be called by ClassMapper */
public EntryClass(Class clazz, EntryClass superclass)
throws MarshalException
{
name = clazz.getName();
this.superclass = superclass;
ClassMapper.EntryField[] fields = ClassMapper.getFields(clazz);
numFields = fields.length;
computeHash(fields);
}
/**
* Constructor used for creating replacement instances,
* containing only name and hash.
*/
private EntryClass(EntryClass orig) {
name = orig.name;
hash = orig.hash;
}
/** Return the superclass descriptor */
public EntryClass getSuperclass() {
return superclass;
}
/** Return the number of public fields (including superclasses) */
public int getNumFields() {
return numFields;
}
/** Set the number of instances of this class */
public void setNumInstances(int numInstances) {
this.numInstances = numInstances;
}
/** Set the number of templates of this class */
public void setNumTemplates(int numTemplates) {
this.numTemplates = numTemplates;
}
/** Return the replacement, if any, containing only name and rep. */
public synchronized EntryClass getReplacement() {
if (replacement == null)
replacement = new EntryClass(this);
return replacement;
}
/**
* This is really only needed in the registrar, but it's very
* convenient to have here.
* @see Class#isAssignableFrom
*/
public boolean isAssignableFrom(EntryClass cls) {
for (EntryClass sup = cls; sup != null; sup = sup.superclass) {
if (hash == sup.hash)
return true;
}
return false;
}
/**
* Returns the number of times this type is used in service
* registrations
* @return number of instances of this type in use in service
* registrations
*/
public int getNumInstances() {
return numInstances;
}
/**
* Returns the number of times this type is used in event
* registrations
* @return number of times this type is used in event registrations
*/
public int getNumTemplates() {
return numTemplates;
}
// Converts this type descriptor to a Class object
public Class toClass(String codebase)
throws IOException, ClassNotFoundException
{
Class cls =
ClassLoading.loadClass(codebase, name, null, integrity, null);
EntryClass local;
try {
local = ClassMapper.toEntryClassBase(cls).eclass;
} catch (MarshalException e) {
throw new UnmarshalException("problem obtaining local version of "
+ toString(), e);
}
if (hash != local.hash)
throw new UnmarshalException("incoming entry type: " + toString()
+ " is not assignable to the local"
+ " version of the type: " + local);
return cls;
}
/**
* Returns the name of this type
* @return the name of this type
*/
public String getName() {
return name;
}
/**
* Returns true if the object passed in is an instance of EntryClass
* with the same type hash as this object. Returns false otherwise.
* @param o object to compare this object against
* @return true if this object equals the object passed in; false
* otherwise.
*/
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof EntryClass))
return false;
EntryClass t = (EntryClass) o;
return hash == t.hash;
}
/**
* Return a hashcode for this type.
* @return int the hashcode for this type
*/
public int hashCode() {
return (int) (hash ^ (hash >>> 32));
}
/* Inherit javadoc */
public String toString() {
return getClass() + "[name=" + getName() + ", hash=" + hash + "]";
}
/**
* Computes a SHA-1 digest from the hash of the superclass, if there
* is a superclass, followed by the name of this class, followed by
* the name and type for each field, if any, declared by this class and
* ordered alphabetically by field name. The first 8 bytes of the digest
* are used to form the 64-bit hash value for this type.
*/
private void computeHash(ClassMapper.EntryField[] fields)
throws MarshalException
{
hash = 0;
try {
MessageDigest md = MessageDigest.getInstance("SHA");
DataOutputStream out = new DataOutputStream(
new DigestOutputStream(new ByteArrayOutputStream(127),md));
if (superclass != null)
out.writeLong(superclass.hash);
out.writeUTF(name);
int startDeclaredFields = superclass != null ?
superclass.numFields : 0;
for (int i = startDeclaredFields; i < fields.length; i++) {
out.writeUTF(fields[i].field.getName());
out.writeUTF(fields[i].field.getType().getName());
}
out.flush();
byte[] digest = md.digest();
for (int i = Math.min(8, digest.length); --i >= 0; ) {
hash += ((long) (digest[i] & 0xFF)) << (i * 8);
}
} catch (Exception e) {
throw new MarshalException("Unable to calculate type hash for "
+ name, e);
}
}
/**
* Samples integrity protection setting (if any) of the stream from which
* this instance is being deserialized and checks that valid values
* for this object have been read from the stream.
*/
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException
{
in.defaultReadObject();
if (name == null)
throw new InvalidObjectException("name cannot be null");
if (hash == 0)
throw new InvalidObjectException("hash cannot be zero");
integrity = MarshalledWrapper.integrityEnforced(in);
}
/**
* Throws InvalidObjectException, since data for this class is required.
*/
private void readObjectNoData() throws InvalidObjectException {
throw new InvalidObjectException("no data");
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop.impl.v2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobContextImpl;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.split.JobSplit;
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader;
import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils;
import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo;
import org.apache.ignite.internal.processors.hadoop.HadoopExternalSplit;
import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock;
import org.apache.ignite.internal.processors.hadoop.HadoopHelper;
import org.apache.ignite.hadoop.HadoopInputSplit;
import org.apache.ignite.internal.processors.hadoop.HadoopJobEx;
import org.apache.ignite.internal.processors.hadoop.HadoopJobId;
import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo;
import org.apache.ignite.internal.processors.hadoop.HadoopJobProperty;
import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext;
import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo;
import org.apache.ignite.internal.processors.hadoop.HadoopTaskType;
import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils;
import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopLazyConcurrentMap;
import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1Splitter;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import static org.apache.ignite.internal.processors.hadoop.HadoopJobProperty.JOB_SHARED_CLASSLOADER;
import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.jobLocalDir;
import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.taskLocalDir;
import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.transformException;
import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.FsCacheKey;
import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.createHadoopLazyConcurrentMap;
import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.fileSystemForMrUserWithCaching;
/**
* Hadoop job implementation for v2 API.
*/
public class HadoopV2Job extends HadoopJobEx {
/** */
private final JobConf jobConf;
/** */
private final JobContextImpl jobCtx;
/** */
private final HadoopHelper helper;
/** Hadoop job ID. */
private final HadoopJobId jobId;
/** Job info. */
protected final HadoopJobInfo jobInfo;
/** Native library names. */
private final String[] libNames;
/** */
private final JobID hadoopJobID;
/** */
private final HadoopV2JobResourceManager rsrcMgr;
/** */
private final ConcurrentMap<T2<HadoopTaskType, Integer>, GridFutureAdapter<HadoopTaskContext>> ctxs =
new ConcurrentHashMap8<>();
/** Pooling task context class and thus class loading environment. */
private final Queue<Class<? extends HadoopTaskContext>> taskCtxClsPool = new ConcurrentLinkedQueue<>();
/** All created contexts. */
private final Queue<Class<? extends HadoopTaskContext>> fullCtxClsQueue = new ConcurrentLinkedDeque<>();
/** File system cache map. */
private final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fsMap = createHadoopLazyConcurrentMap();
/** Logger. */
private final IgniteLogger log;
/** Shared class loader. */
private volatile HadoopClassLoader sharedClsLdr;
/** Local node ID */
private volatile UUID locNodeId;
/** Serialized JobConf. */
private volatile byte[] jobConfData;
/**
* Constructor.
*
* @param jobId Job ID.
* @param jobInfo Job info.
* @param log Logger.
* @param libNames Optional additional native library names.
* @param helper Hadoop helper.
*/
public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, IgniteLogger log,
@Nullable String[] libNames, HadoopHelper helper) {
assert jobId != null;
assert jobInfo != null;
this.jobId = jobId;
this.jobInfo = jobInfo;
this.libNames = libNames;
this.helper = helper;
this.log = log;
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader());
try {
hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId());
jobConf = new JobConf();
HadoopFileSystemsUtils.setupFileSystems(jobConf);
for (Map.Entry<String,String> e : jobInfo.properties().entrySet())
jobConf.set(e.getKey(), e.getValue());
jobCtx = new JobContextImpl(jobConf, hadoopJobID);
rsrcMgr = new HadoopV2JobResourceManager(jobId, jobCtx, log, this);
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@Override public HadoopJobId id() {
return jobId;
}
/** {@inheritDoc} */
@Override public HadoopJobInfo info() {
return jobInfo;
}
/** {@inheritDoc} */
@Override public Collection<HadoopInputSplit> input() {
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf.getClassLoader());
try {
String jobDirPath = jobConf.get(MRJobConfig.MAPREDUCE_JOB_DIR);
if (jobDirPath == null) { // Probably job was submitted not by hadoop client.
// Assume that we have needed classes and try to generate input splits ourself.
if (jobConf.getUseNewMapper())
return HadoopV2Splitter.splitJob(jobCtx);
else
return HadoopV1Splitter.splitJob(jobConf);
}
Path jobDir = new Path(jobDirPath);
try {
FileSystem fs = fileSystem(jobDir.toUri(), jobConf);
JobSplit.TaskSplitMetaInfo[] metaInfos = SplitMetaInfoReader.readSplitMetaInfo(hadoopJobID, fs, jobConf,
jobDir);
if (F.isEmpty(metaInfos))
throw new IgniteCheckedException("No input splits found.");
Path splitsFile = JobSubmissionFiles.getJobSplitFile(jobDir);
try (FSDataInputStream in = fs.open(splitsFile)) {
Collection<HadoopInputSplit> res = new ArrayList<>(metaInfos.length);
for (JobSplit.TaskSplitMetaInfo metaInfo : metaInfos) {
long off = metaInfo.getStartOffset();
String[] hosts = metaInfo.getLocations();
in.seek(off);
String clsName = Text.readString(in);
HadoopFileBlock block = HadoopV1Splitter.readFileBlock(clsName, in, hosts);
if (block == null)
block = HadoopV2Splitter.readFileBlock(clsName, in, hosts);
res.add(block != null ? block : new HadoopExternalSplit(hosts, off));
}
return res;
}
}
catch (Throwable e) {
if (e instanceof Error)
throw (Error)e;
else
throw transformException(e);
}
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@SuppressWarnings({"unchecked", "MismatchedQueryAndUpdateOfCollection" })
@Override public HadoopTaskContext getTaskContext(HadoopTaskInfo info) throws IgniteCheckedException {
T2<HadoopTaskType, Integer> locTaskId = new T2<>(info.type(), info.taskNumber());
GridFutureAdapter<HadoopTaskContext> fut = ctxs.get(locTaskId);
if (fut != null)
return fut.get();
GridFutureAdapter<HadoopTaskContext> old = ctxs.putIfAbsent(locTaskId, fut = new GridFutureAdapter<>());
if (old != null)
return old.get();
Class<? extends HadoopTaskContext> cls = taskCtxClsPool.poll();
try {
if (cls == null) {
// If there is no pooled class, then load new one.
// Note that the classloader identified by the task it was initially created for,
// but later it may be reused for other tasks.
HadoopClassLoader ldr = sharedClsLdr != null ?
sharedClsLdr : createClassLoader(HadoopClassLoader.nameForTask(info, false));
cls = (Class<? extends HadoopTaskContext>)ldr.loadClass(HadoopV2TaskContext.class.getName());
fullCtxClsQueue.add(cls);
}
Constructor<?> ctr = cls.getConstructor(HadoopTaskInfo.class, HadoopJobEx.class,
HadoopJobId.class, UUID.class, DataInput.class);
if (jobConfData == null)
synchronized(jobConf) {
if (jobConfData == null) {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
jobConf.write(new DataOutputStream(buf));
jobConfData = buf.toByteArray();
}
}
HadoopTaskContext res = (HadoopTaskContext)ctr.newInstance(info, this, jobId, locNodeId,
new DataInputStream(new ByteArrayInputStream(jobConfData)));
fut.onDone(res);
return res;
}
catch (Throwable e) {
IgniteCheckedException te = transformException(e);
fut.onDone(te);
if (e instanceof Error)
throw (Error)e;
throw te;
}
}
/** {@inheritDoc} */
@Override public void initialize(boolean external, UUID locNodeId) throws IgniteCheckedException {
assert locNodeId != null;
this.locNodeId = locNodeId;
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader());
try {
rsrcMgr.prepareJobEnvironment(!external, jobLocalDir(igniteWorkDirectory(), locNodeId, jobId));
if (HadoopJobProperty.get(jobInfo, JOB_SHARED_CLASSLOADER, true)) {
U.warn(log, JOB_SHARED_CLASSLOADER.propertyName() + " job property is set to true; please disable " +
"it if job tasks rely on mutable static state.");
sharedClsLdr = createClassLoader(HadoopClassLoader.nameForJob(jobId));
}
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@SuppressWarnings("ThrowFromFinallyBlock")
@Override public void dispose(boolean external) throws IgniteCheckedException {
try {
if (rsrcMgr != null && !external) {
File jobLocDir = jobLocalDir(igniteWorkDirectory(), locNodeId, jobId);
if (jobLocDir.exists())
U.delete(jobLocDir);
}
}
finally {
taskCtxClsPool.clear();
Throwable err = null;
// Stop the daemon threads that have been created
// with the task class loaders:
while (true) {
Class<? extends HadoopTaskContext> cls = fullCtxClsQueue.poll();
if (cls == null)
break;
try {
final ClassLoader ldr = cls.getClassLoader();
try {
// Stop Hadoop daemons for this *task*:
stopHadoopFsDaemons(ldr);
}
catch (Exception e) {
if (err == null)
err = e;
}
// Also close all the FileSystems cached in
// HadoopLazyConcurrentMap for this *task* class loader:
closeCachedTaskFileSystems(ldr);
}
catch (Throwable e) {
if (err == null)
err = e;
if (e instanceof Error)
throw (Error)e;
}
}
assert fullCtxClsQueue.isEmpty();
try {
// Close all cached file systems for this *Job*:
fsMap.close();
}
catch (Exception e) {
if (err == null)
err = e;
}
if (err != null)
throw U.cast(err);
}
}
/**
* Stops Hadoop Fs daemon threads.
* @param ldr The task ClassLoader to stop the daemons for.
* @throws Exception On error.
*/
private void stopHadoopFsDaemons(ClassLoader ldr) throws Exception {
Class<?> daemonCls = ldr.loadClass(HadoopClassLoader.CLS_DAEMON);
Method m = daemonCls.getMethod("dequeueAndStopAll");
m.invoke(null);
}
/**
* Closes all the file systems user by task
* @param ldr The task class loader.
* @throws Exception On error.
*/
private void closeCachedTaskFileSystems(ClassLoader ldr) throws Exception {
Class<?> clazz = ldr.loadClass(HadoopV2TaskContext.class.getName());
Method m = clazz.getMethod("close");
m.invoke(null);
}
/** {@inheritDoc} */
@Override public void prepareTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
rsrcMgr.prepareTaskWorkDir(taskLocalDir(igniteWorkDirectory(), locNodeId, info));
}
/** {@inheritDoc} */
@Override public void cleanupTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
HadoopTaskContext ctx = ctxs.remove(new T2<>(info.type(), info.taskNumber())).get();
taskCtxClsPool.add(ctx.getClass());
File locDir = taskLocalDir(igniteWorkDirectory(), locNodeId, info);
if (locDir.exists())
U.delete(locDir);
}
/** {@inheritDoc} */
@Override public void cleanupStagingDirectory() {
rsrcMgr.cleanupStagingDirectory();
}
/** {@inheritDoc} */
@Override public String igniteWorkDirectory() {
return helper.workDirectory();
}
/**
* Getter for job configuration.
* @return The job configuration.
*/
public JobConf jobConf() {
return jobConf;
}
/**
* Gets file system for this job.
* @param uri The uri.
* @param cfg The configuration.
* @return The file system.
* @throws IOException On error.
*/
public FileSystem fileSystem(@Nullable URI uri, Configuration cfg) throws IOException {
return fileSystemForMrUserWithCaching(uri, cfg, fsMap);
}
/**
* Create class loader with the given name.
*
* @param name Name.
* @return Class loader.
*/
private HadoopClassLoader createClassLoader(String name) {
return new HadoopClassLoader(rsrcMgr.classPath(), name, libNames, helper);
}
}
|
|
package org.apache.helix.integration;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.HashSet;
import java.util.UUID;
import org.apache.helix.Criteria;
import org.apache.helix.InstanceType;
import org.apache.helix.NotificationContext;
import org.apache.helix.Criteria.DataSource;
import org.apache.helix.messaging.AsyncCallback;
import org.apache.helix.messaging.handling.HelixTaskResult;
import org.apache.helix.messaging.handling.MessageHandler;
import org.apache.helix.messaging.handling.MessageHandlerFactory;
import org.apache.helix.model.Message;
import org.apache.helix.model.Message.MessageState;
import org.apache.helix.model.Message.MessageType;
import org.testng.AssertJUnit;
import org.testng.annotations.Test;
public class TestMessagingService extends ZkStandAloneCMTestBase {
public static class TestMessagingHandlerFactory implements MessageHandlerFactory {
public static HashSet<String> _processedMsgIds = new HashSet<String>();
@Override
public MessageHandler createHandler(Message message, NotificationContext context) {
return new TestMessagingHandler(message, context);
}
@Override
public String getMessageType() {
return "TestExtensibility";
}
@Override
public void reset() {
// TODO Auto-generated method stub
}
public static class TestMessagingHandler extends MessageHandler {
public TestMessagingHandler(Message message, NotificationContext context) {
super(message, context);
// TODO Auto-generated constructor stub
}
@Override
public HelixTaskResult handleMessage() throws InterruptedException {
HelixTaskResult result = new HelixTaskResult();
result.setSuccess(true);
Thread.sleep(1000);
System.out.println("TestMessagingHandler " + _message.getMsgId());
_processedMsgIds.add(_message.getRecord().getSimpleField("TestMessagingPara"));
result.getTaskResultMap().put("ReplyMessage", "TestReplyMessage");
return result;
}
@Override
public void onError(Exception e, ErrorCode code, ErrorType type) {
// TODO Auto-generated method stub
}
}
}
@Test()
public void TestMessageSimpleSend() throws Exception {
String hostSrc = "localhost_" + START_PORT;
String hostDest = "localhost_" + (START_PORT + 1);
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
_participants[1].getMessagingService().registerMessageHandlerFactory(factory.getMessageType(),
factory);
String msgId = new UUID(123, 456).toString();
Message msg = new Message(factory.getMessageType(), msgId);
msg.setMsgId(msgId);
msg.setSrcName(hostSrc);
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
String para = "Testing messaging para";
msg.getRecord().setSimpleField("TestMessagingPara", para);
Criteria cr = new Criteria();
cr.setInstanceName(hostDest);
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
// int nMsgs = _startCMResultMap.get(hostSrc)._manager.getMessagingService().send(cr, msg);
int nMsgs = _participants[0].getMessagingService().send(cr, msg);
AssertJUnit.assertTrue(nMsgs == 1);
Thread.sleep(2500);
// Thread.currentThread().join();
AssertJUnit.assertTrue(TestMessagingHandlerFactory._processedMsgIds.contains(para));
cr = new Criteria();
cr.setInstanceName(hostDest);
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
cr.setDataSource(DataSource.IDEALSTATES);
// nMsgs = _startCMResultMap.get(hostSrc)._manager.getMessagingService().send(cr, msg);
nMsgs = _participants[0].getMessagingService().send(cr, msg);
AssertJUnit.assertTrue(nMsgs == 1);
Thread.sleep(2500);
// Thread.currentThread().join();
AssertJUnit.assertTrue(TestMessagingHandlerFactory._processedMsgIds.contains(para));
}
public static class MockAsyncCallback extends AsyncCallback {
public MockAsyncCallback() {
}
@Override
public void onTimeOut() {
// TODO Auto-generated method stub
}
@Override
public void onReplyMessage(Message message) {
// TODO Auto-generated method stub
}
}
public static class TestAsyncCallback extends AsyncCallback {
public TestAsyncCallback(long timeout) {
super(timeout);
}
static HashSet<String> _replyedMessageContents = new HashSet<String>();
public boolean timeout = false;
@Override
public void onTimeOut() {
timeout = true;
}
@Override
public void onReplyMessage(Message message) {
// TODO Auto-generated method stub
System.out.println("OnreplyMessage: "
+ message.getRecord().getMapField(Message.Attributes.MESSAGE_RESULT.toString())
.get("ReplyMessage"));
if (message.getRecord().getMapField(Message.Attributes.MESSAGE_RESULT.toString())
.get("ReplyMessage") == null) {
}
_replyedMessageContents.add(message.getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ReplyMessage"));
}
}
@Test()
public void TestMessageSimpleSendReceiveAsync() throws Exception {
String hostSrc = "localhost_" + START_PORT;
String hostDest = "localhost_" + (START_PORT + 1);
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
_participants[1].getMessagingService().registerMessageHandlerFactory(factory.getMessageType(),
factory);
_participants[0].getMessagingService().registerMessageHandlerFactory(factory.getMessageType(),
factory);
String msgId = new UUID(123, 456).toString();
Message msg = new Message(factory.getMessageType(), msgId);
msg.setMsgId(msgId);
msg.setSrcName(hostSrc);
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
String para = "Testing messaging para";
msg.getRecord().setSimpleField("TestMessagingPara", para);
Criteria cr = new Criteria();
cr.setInstanceName(hostDest);
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
TestAsyncCallback callback = new TestAsyncCallback(60000);
_participants[0].getMessagingService().send(cr, msg, callback, 60000);
Thread.sleep(2000);
// Thread.currentThread().join();
AssertJUnit.assertTrue(TestAsyncCallback._replyedMessageContents.contains("TestReplyMessage"));
AssertJUnit.assertTrue(callback.getMessageReplied().size() == 1);
TestAsyncCallback callback2 = new TestAsyncCallback(500);
_participants[0].getMessagingService().send(cr, msg, callback2, 500);
Thread.sleep(3000);
// Thread.currentThread().join();
AssertJUnit.assertTrue(callback2.isTimedOut());
cr = new Criteria();
cr.setInstanceName(hostDest);
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
cr.setDataSource(DataSource.IDEALSTATES);
callback = new TestAsyncCallback(60000);
_participants[0].getMessagingService().send(cr, msg, callback, 60000);
Thread.sleep(2000);
// Thread.currentThread().join();
AssertJUnit.assertTrue(TestAsyncCallback._replyedMessageContents.contains("TestReplyMessage"));
AssertJUnit.assertTrue(callback.getMessageReplied().size() == 1);
callback2 = new TestAsyncCallback(500);
_participants[0].getMessagingService().send(cr, msg, callback2, 500);
Thread.sleep(3000);
// Thread.currentThread().join();
AssertJUnit.assertTrue(callback2.isTimedOut());
}
@Test()
public void TestBlockingSendReceive() throws Exception {
String hostSrc = "localhost_" + START_PORT;
String hostDest = "localhost_" + (START_PORT + 1);
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
_participants[1].getMessagingService().registerMessageHandlerFactory(factory.getMessageType(),
factory);
String msgId = new UUID(123, 456).toString();
Message msg = new Message(factory.getMessageType(), msgId);
msg.setMsgId(msgId);
msg.setSrcName(hostSrc);
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
String para = "Testing messaging para";
msg.getRecord().setSimpleField("TestMessagingPara", para);
Criteria cr = new Criteria();
cr.setInstanceName(hostDest);
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
AsyncCallback asyncCallback = new MockAsyncCallback();
int messagesSent =
_participants[0].getMessagingService().sendAndWait(cr, msg, asyncCallback, 60000);
AssertJUnit.assertTrue(asyncCallback.getMessageReplied().get(0).getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ReplyMessage")
.equals("TestReplyMessage"));
AssertJUnit.assertTrue(asyncCallback.getMessageReplied().size() == 1);
AsyncCallback asyncCallback2 = new MockAsyncCallback();
messagesSent = _participants[0].getMessagingService().sendAndWait(cr, msg, asyncCallback2, 500);
AssertJUnit.assertTrue(asyncCallback2.isTimedOut());
}
@Test()
public void TestMultiMessageCriteria() throws Exception {
String hostSrc = "localhost_" + START_PORT;
for (int i = 0; i < NODE_NR; i++) {
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
String hostDest = "localhost_" + (START_PORT + i);
_participants[i].getMessagingService().registerMessageHandlerFactory(
factory.getMessageType(), factory);
}
String msgId = new UUID(123, 456).toString();
Message msg = new Message(new TestMessagingHandlerFactory().getMessageType(), msgId);
msg.setMsgId(msgId);
msg.setSrcName(hostSrc);
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
String para = "Testing messaging para";
msg.getRecord().setSimpleField("TestMessagingPara", para);
Criteria cr = new Criteria();
cr.setInstanceName("%");
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
AsyncCallback callback1 = new MockAsyncCallback();
int messageSent1 =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback1, 10000);
AssertJUnit.assertTrue(callback1.getMessageReplied().get(0).getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ReplyMessage")
.equals("TestReplyMessage"));
AssertJUnit.assertTrue(callback1.getMessageReplied().size() == NODE_NR - 1);
AsyncCallback callback2 = new MockAsyncCallback();
int messageSent2 = _participants[0].getMessagingService().sendAndWait(cr, msg, callback2, 500);
AssertJUnit.assertTrue(callback2.isTimedOut());
cr.setPartition("TestDB_17");
AsyncCallback callback3 = new MockAsyncCallback();
int messageSent3 =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback3, 10000);
AssertJUnit.assertTrue(callback3.getMessageReplied().size() == _replica - 1);
cr.setPartition("TestDB_15");
AsyncCallback callback4 = new MockAsyncCallback();
int messageSent4 =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback4, 10000);
AssertJUnit.assertTrue(callback4.getMessageReplied().size() == _replica);
cr.setPartitionState("SLAVE");
AsyncCallback callback5 = new MockAsyncCallback();
int messageSent5 =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback5, 10000);
AssertJUnit.assertTrue(callback5.getMessageReplied().size() == _replica - 1);
cr.setDataSource(DataSource.IDEALSTATES);
AsyncCallback callback6 = new MockAsyncCallback();
int messageSent6 =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback6, 10000);
AssertJUnit.assertTrue(callback6.getMessageReplied().size() == _replica - 1);
}
@Test()
public void sendSelfMsg() {
String hostSrc = "localhost_" + START_PORT;
for (int i = 0; i < NODE_NR; i++) {
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
String hostDest = "localhost_" + (START_PORT + i);
_participants[i].getMessagingService().registerMessageHandlerFactory(
factory.getMessageType(), factory);
}
String msgId = new UUID(123, 456).toString();
Message msg = new Message(new TestMessagingHandlerFactory().getMessageType(), msgId);
msg.setMsgId(msgId);
msg.setSrcName(hostSrc);
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
String para = "Testing messaging para";
msg.getRecord().setSimpleField("TestMessagingPara", para);
Criteria cr = new Criteria();
cr.setInstanceName("%");
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
cr.setSelfExcluded(false);
AsyncCallback callback1 = new MockAsyncCallback();
int messageSent1 =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback1, 10000);
AssertJUnit.assertTrue(callback1.getMessageReplied().size() == NODE_NR);
AssertJUnit.assertTrue(callback1.getMessageReplied().get(0).getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ReplyMessage")
.equals("TestReplyMessage"));
}
@Test()
public void TestControllerMessage() throws Exception {
String hostSrc = "localhost_" + START_PORT;
for (int i = 0; i < NODE_NR; i++) {
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
String hostDest = "localhost_" + (START_PORT + i);
_participants[i].getMessagingService().registerMessageHandlerFactory(
factory.getMessageType(), factory);
}
String msgId = new UUID(123, 456).toString();
Message msg = new Message(MessageType.CONTROLLER_MSG, msgId);
msg.setMsgId(msgId);
msg.setSrcName(hostSrc);
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
String para = "Testing messaging para";
msg.getRecord().setSimpleField("TestMessagingPara", para);
Criteria cr = new Criteria();
cr.setInstanceName("*");
cr.setRecipientInstanceType(InstanceType.CONTROLLER);
cr.setSessionSpecific(false);
AsyncCallback callback1 = new MockAsyncCallback();
int messagesSent =
_participants[0].getMessagingService().sendAndWait(cr, msg, callback1, 10000);
AssertJUnit.assertTrue(callback1.getMessageReplied().get(0).getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ControllerResult")
.indexOf(hostSrc) != -1);
AssertJUnit.assertTrue(callback1.getMessageReplied().size() == 1);
msgId = UUID.randomUUID().toString();
msg.setMsgId(msgId);
cr.setPartition("TestDB_17");
AsyncCallback callback2 = new MockAsyncCallback();
messagesSent = _participants[0].getMessagingService().sendAndWait(cr, msg, callback2, 10000);
AssertJUnit.assertTrue(callback2.getMessageReplied().get(0).getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ControllerResult")
.indexOf(hostSrc) != -1);
AssertJUnit.assertTrue(callback2.getMessageReplied().size() == 1);
msgId = UUID.randomUUID().toString();
msg.setMsgId(msgId);
cr.setPartitionState("SLAVE");
AsyncCallback callback3 = new MockAsyncCallback();
messagesSent = _participants[0].getMessagingService().sendAndWait(cr, msg, callback3, 10000);
AssertJUnit.assertTrue(callback3.getMessageReplied().get(0).getRecord()
.getMapField(Message.Attributes.MESSAGE_RESULT.toString()).get("ControllerResult")
.indexOf(hostSrc) != -1);
AssertJUnit.assertTrue(callback3.getMessageReplied().size() == 1);
}
}
|
|
package org.apache.maven.plugin.internal;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.RepositoryUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.classrealm.ClassRealmManager;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.execution.scope.internal.MojoExecutionScopeModule;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.ContextEnabled;
import org.apache.maven.plugin.DebugConfigurationListener;
import org.apache.maven.plugin.ExtensionRealmCache;
import org.apache.maven.plugin.InvalidPluginDescriptorException;
import org.apache.maven.plugin.MavenPluginManager;
import org.apache.maven.plugin.Mojo;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.plugin.MojoNotFoundException;
import org.apache.maven.plugin.PluginArtifactsCache;
import org.apache.maven.plugin.PluginConfigurationException;
import org.apache.maven.plugin.PluginContainerException;
import org.apache.maven.plugin.PluginDescriptorCache;
import org.apache.maven.plugin.PluginDescriptorParsingException;
import org.apache.maven.plugin.PluginIncompatibleException;
import org.apache.maven.plugin.PluginManagerException;
import org.apache.maven.plugin.PluginParameterException;
import org.apache.maven.plugin.PluginParameterExpressionEvaluator;
import org.apache.maven.plugin.PluginRealmCache;
import org.apache.maven.plugin.PluginResolutionException;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.descriptor.Parameter;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptorBuilder;
import org.apache.maven.plugin.version.DefaultPluginVersionRequest;
import org.apache.maven.plugin.version.PluginVersionRequest;
import org.apache.maven.plugin.version.PluginVersionResolutionException;
import org.apache.maven.plugin.version.PluginVersionResolver;
import org.apache.maven.project.ExtensionDescriptor;
import org.apache.maven.project.ExtensionDescriptorBuilder;
import org.apache.maven.project.MavenProject;
import org.apache.maven.rtinfo.RuntimeInformation;
import org.apache.maven.session.scope.internal.SessionScopeModule;
import org.codehaus.plexus.DefaultPlexusContainer;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.classworlds.realm.ClassRealm;
import org.codehaus.plexus.component.composition.CycleDetectedInComponentGraphException;
import org.codehaus.plexus.component.configurator.ComponentConfigurationException;
import org.codehaus.plexus.component.configurator.ComponentConfigurator;
import org.codehaus.plexus.component.configurator.ConfigurationListener;
import org.codehaus.plexus.component.configurator.expression.ExpressionEvaluationException;
import org.codehaus.plexus.component.configurator.expression.ExpressionEvaluator;
import org.codehaus.plexus.component.repository.ComponentDescriptor;
import org.codehaus.plexus.component.repository.exception.ComponentLifecycleException;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.configuration.PlexusConfiguration;
import org.codehaus.plexus.configuration.PlexusConfigurationException;
import org.codehaus.plexus.configuration.xml.XmlPlexusConfiguration;
import org.codehaus.plexus.util.ReaderFactory;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.graph.DependencyFilter;
import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.util.filter.AndDependencyFilter;
import org.eclipse.aether.util.graph.visitor.PreorderNodeListGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
/**
* Provides basic services to manage Maven plugins and their mojos. This component is kept general in its design such
* that the plugins/mojos can be used in arbitrary contexts. In particular, the mojos can be used for ordinary build
* plugins as well as special purpose plugins like reports.
*
* @author Benjamin Bentmann
* @since 3.0
*/
@Named
@Singleton
public class DefaultMavenPluginManager
implements MavenPluginManager
{
/**
* <p>
* PluginId => ExtensionRealmCache.CacheRecord map MavenProject context value key. The map is used to ensure the
* same class realm is used to load build extensions and load mojos for extensions=true plugins.
* </p>
* <strong>Note:</strong> This is part of internal implementation and may be changed or removed without notice
*
* @since 3.3.0
*/
public static final String KEY_EXTENSIONS_REALMS = DefaultMavenPluginManager.class.getName() + "/extensionsRealms";
private final Logger logger = LoggerFactory.getLogger( getClass() );
private PlexusContainer container;
private ClassRealmManager classRealmManager;
private PluginDescriptorCache pluginDescriptorCache;
private PluginRealmCache pluginRealmCache;
private PluginDependenciesResolver pluginDependenciesResolver;
private RuntimeInformation runtimeInformation;
private ExtensionRealmCache extensionRealmCache;
private PluginVersionResolver pluginVersionResolver;
private PluginArtifactsCache pluginArtifactsCache;
private MavenPluginValidator pluginValidator;
private final ExtensionDescriptorBuilder extensionDescriptorBuilder = new ExtensionDescriptorBuilder();
private final PluginDescriptorBuilder builder = new PluginDescriptorBuilder();
@Inject
public DefaultMavenPluginManager(
PlexusContainer container,
ClassRealmManager classRealmManager,
PluginDescriptorCache pluginDescriptorCache,
PluginRealmCache pluginRealmCache,
PluginDependenciesResolver pluginDependenciesResolver,
RuntimeInformation runtimeInformation,
ExtensionRealmCache extensionRealmCache,
PluginVersionResolver pluginVersionResolver,
PluginArtifactsCache pluginArtifactsCache,
MavenPluginValidator pluginValidator )
{
this.container = container;
this.classRealmManager = classRealmManager;
this.pluginDescriptorCache = pluginDescriptorCache;
this.pluginRealmCache = pluginRealmCache;
this.pluginDependenciesResolver = pluginDependenciesResolver;
this.runtimeInformation = runtimeInformation;
this.extensionRealmCache = extensionRealmCache;
this.pluginVersionResolver = pluginVersionResolver;
this.pluginArtifactsCache = pluginArtifactsCache;
this.pluginValidator = pluginValidator;
}
public synchronized PluginDescriptor getPluginDescriptor( Plugin plugin, List<RemoteRepository> repositories,
RepositorySystemSession session )
throws PluginResolutionException, PluginDescriptorParsingException, InvalidPluginDescriptorException
{
PluginDescriptorCache.Key cacheKey = pluginDescriptorCache.createKey( plugin, repositories, session );
PluginDescriptor pluginDescriptor = pluginDescriptorCache.get( cacheKey );
if ( pluginDescriptor == null )
{
org.eclipse.aether.artifact.Artifact artifact =
pluginDependenciesResolver.resolve( plugin, repositories, session );
Artifact pluginArtifact = RepositoryUtils.toArtifact( artifact );
pluginDescriptor = extractPluginDescriptor( pluginArtifact, plugin );
pluginDescriptor.setRequiredMavenVersion( artifact.getProperty( "requiredMavenVersion", null ) );
pluginDescriptorCache.put( cacheKey, pluginDescriptor );
}
pluginDescriptor.setPlugin( plugin );
return pluginDescriptor;
}
private PluginDescriptor extractPluginDescriptor( Artifact pluginArtifact, Plugin plugin )
throws PluginDescriptorParsingException, InvalidPluginDescriptorException
{
PluginDescriptor pluginDescriptor = null;
File pluginFile = pluginArtifact.getFile();
try
{
if ( pluginFile.isFile() )
{
try ( JarFile pluginJar = new JarFile( pluginFile, false ) )
{
ZipEntry pluginDescriptorEntry = pluginJar.getEntry( getPluginDescriptorLocation() );
if ( pluginDescriptorEntry != null )
{
InputStream is = pluginJar.getInputStream( pluginDescriptorEntry );
pluginDescriptor = parsePluginDescriptor( is, plugin, pluginFile.getAbsolutePath() );
}
}
}
else
{
File pluginXml = new File( pluginFile, getPluginDescriptorLocation() );
if ( pluginXml.isFile() )
{
try ( InputStream is = new BufferedInputStream( new FileInputStream( pluginXml ) ) )
{
pluginDescriptor = parsePluginDescriptor( is, plugin, pluginXml.getAbsolutePath() );
}
}
}
if ( pluginDescriptor == null )
{
throw new IOException( "No plugin descriptor found at " + getPluginDescriptorLocation() );
}
}
catch ( IOException e )
{
throw new PluginDescriptorParsingException( plugin, pluginFile.getAbsolutePath(), e );
}
List<String> errors = new ArrayList<>();
pluginValidator.validate( pluginArtifact, pluginDescriptor, errors );
if ( !errors.isEmpty() )
{
throw new InvalidPluginDescriptorException(
"Invalid plugin descriptor for " + plugin.getId() + " (" + pluginFile + ")", errors );
}
pluginDescriptor.setPluginArtifact( pluginArtifact );
return pluginDescriptor;
}
private String getPluginDescriptorLocation()
{
return "META-INF/maven/plugin.xml";
}
private PluginDescriptor parsePluginDescriptor( InputStream is, Plugin plugin, String descriptorLocation )
throws PluginDescriptorParsingException
{
try
{
Reader reader = ReaderFactory.newXmlReader( is );
return builder.build( reader, descriptorLocation );
}
catch ( IOException | PlexusConfigurationException e )
{
throw new PluginDescriptorParsingException( plugin, descriptorLocation, e );
}
}
public MojoDescriptor getMojoDescriptor( Plugin plugin, String goal, List<RemoteRepository> repositories,
RepositorySystemSession session )
throws MojoNotFoundException, PluginResolutionException, PluginDescriptorParsingException,
InvalidPluginDescriptorException
{
PluginDescriptor pluginDescriptor = getPluginDescriptor( plugin, repositories, session );
MojoDescriptor mojoDescriptor = pluginDescriptor.getMojo( goal );
if ( mojoDescriptor == null )
{
throw new MojoNotFoundException( goal, pluginDescriptor );
}
return mojoDescriptor;
}
public void checkRequiredMavenVersion( PluginDescriptor pluginDescriptor )
throws PluginIncompatibleException
{
String requiredMavenVersion = pluginDescriptor.getRequiredMavenVersion();
if ( StringUtils.isNotBlank( requiredMavenVersion ) )
{
try
{
if ( !runtimeInformation.isMavenVersion( requiredMavenVersion ) )
{
throw new PluginIncompatibleException( pluginDescriptor.getPlugin(),
"The plugin " + pluginDescriptor.getId()
+ " requires Maven version " + requiredMavenVersion );
}
}
catch ( RuntimeException e )
{
logger.warn( "Could not verify plugin's Maven prerequisite: " + e.getMessage() );
}
}
}
public synchronized void setupPluginRealm( PluginDescriptor pluginDescriptor, MavenSession session,
ClassLoader parent, List<String> imports, DependencyFilter filter )
throws PluginResolutionException, PluginContainerException
{
Plugin plugin = pluginDescriptor.getPlugin();
MavenProject project = session.getCurrentProject();
if ( plugin.isExtensions() )
{
ExtensionRealmCache.CacheRecord extensionRecord;
try
{
RepositorySystemSession repositorySession = session.getRepositorySession();
extensionRecord = setupExtensionsRealm( project, plugin, repositorySession );
}
catch ( PluginManagerException e )
{
// extensions realm is expected to be fully setup at this point
// any exception means a problem in maven code, not a user error
throw new IllegalStateException( e );
}
ClassRealm pluginRealm = extensionRecord.getRealm();
List<Artifact> pluginArtifacts = extensionRecord.getArtifacts();
for ( ComponentDescriptor<?> componentDescriptor : pluginDescriptor.getComponents() )
{
componentDescriptor.setRealm( pluginRealm );
}
pluginDescriptor.setClassRealm( pluginRealm );
pluginDescriptor.setArtifacts( pluginArtifacts );
}
else
{
Map<String, ClassLoader> foreignImports = calcImports( project, parent, imports );
PluginRealmCache.Key cacheKey = pluginRealmCache.createKey( plugin, parent, foreignImports, filter,
project.getRemotePluginRepositories(),
session.getRepositorySession() );
PluginRealmCache.CacheRecord cacheRecord = pluginRealmCache.get( cacheKey );
if ( cacheRecord != null )
{
pluginDescriptor.setClassRealm( cacheRecord.getRealm() );
pluginDescriptor.setArtifacts( new ArrayList<>( cacheRecord.getArtifacts() ) );
for ( ComponentDescriptor<?> componentDescriptor : pluginDescriptor.getComponents() )
{
componentDescriptor.setRealm( cacheRecord.getRealm() );
}
}
else
{
createPluginRealm( pluginDescriptor, session, parent, foreignImports, filter );
cacheRecord =
pluginRealmCache.put( cacheKey, pluginDescriptor.getClassRealm(), pluginDescriptor.getArtifacts() );
}
pluginRealmCache.register( project, cacheKey, cacheRecord );
}
}
private void createPluginRealm( PluginDescriptor pluginDescriptor, MavenSession session, ClassLoader parent,
Map<String, ClassLoader> foreignImports, DependencyFilter filter )
throws PluginResolutionException, PluginContainerException
{
Plugin plugin =
Objects.requireNonNull( pluginDescriptor.getPlugin(), "pluginDescriptor.plugin cannot be null" );
Artifact pluginArtifact = Objects.requireNonNull( pluginDescriptor.getPluginArtifact(),
"pluginDescriptor.pluginArtifact cannot be null" );
MavenProject project = session.getCurrentProject();
final ClassRealm pluginRealm;
final List<Artifact> pluginArtifacts;
RepositorySystemSession repositorySession = session.getRepositorySession();
DependencyFilter dependencyFilter = project.getExtensionDependencyFilter();
dependencyFilter = AndDependencyFilter.newInstance( dependencyFilter, filter );
DependencyNode root =
pluginDependenciesResolver.resolve( plugin, RepositoryUtils.toArtifact( pluginArtifact ), dependencyFilter,
project.getRemotePluginRepositories(), repositorySession );
PreorderNodeListGenerator nlg = new PreorderNodeListGenerator();
root.accept( nlg );
pluginArtifacts = toMavenArtifacts( root, nlg );
pluginRealm = classRealmManager.createPluginRealm( plugin, parent, null, foreignImports,
toAetherArtifacts( pluginArtifacts ) );
discoverPluginComponents( pluginRealm, plugin, pluginDescriptor );
pluginDescriptor.setClassRealm( pluginRealm );
pluginDescriptor.setArtifacts( pluginArtifacts );
}
private void discoverPluginComponents( final ClassRealm pluginRealm, Plugin plugin,
PluginDescriptor pluginDescriptor )
throws PluginContainerException
{
try
{
if ( pluginDescriptor != null )
{
for ( ComponentDescriptor<?> componentDescriptor : pluginDescriptor.getComponents() )
{
componentDescriptor.setRealm( pluginRealm );
container.addComponentDescriptor( componentDescriptor );
}
}
( (DefaultPlexusContainer) container ).discoverComponents( pluginRealm, new SessionScopeModule( container ),
new MojoExecutionScopeModule( container ) );
}
catch ( ComponentLookupException | CycleDetectedInComponentGraphException e )
{
throw new PluginContainerException( plugin, pluginRealm,
"Error in component graph of plugin " + plugin.getId() + ": "
+ e.getMessage(), e );
}
}
private List<org.eclipse.aether.artifact.Artifact> toAetherArtifacts( final List<Artifact> pluginArtifacts )
{
return new ArrayList<>( RepositoryUtils.toArtifacts( pluginArtifacts ) );
}
private List<Artifact> toMavenArtifacts( DependencyNode root, PreorderNodeListGenerator nlg )
{
List<Artifact> artifacts = new ArrayList<>( nlg.getNodes().size() );
RepositoryUtils.toArtifacts( artifacts, Collections.singleton( root ), Collections.emptyList(), null );
artifacts.removeIf( artifact -> artifact.getFile() == null );
return Collections.unmodifiableList( artifacts );
}
private Map<String, ClassLoader> calcImports( MavenProject project, ClassLoader parent, List<String> imports )
{
Map<String, ClassLoader> foreignImports = new HashMap<>();
ClassLoader projectRealm = project.getClassRealm();
if ( projectRealm != null )
{
foreignImports.put( "", projectRealm );
}
else
{
foreignImports.put( "", classRealmManager.getMavenApiRealm() );
}
if ( parent != null && imports != null )
{
for ( String parentImport : imports )
{
foreignImports.put( parentImport, parent );
}
}
return foreignImports;
}
public <T> T getConfiguredMojo( Class<T> mojoInterface, MavenSession session, MojoExecution mojoExecution )
throws PluginConfigurationException, PluginContainerException
{
MojoDescriptor mojoDescriptor = mojoExecution.getMojoDescriptor();
PluginDescriptor pluginDescriptor = mojoDescriptor.getPluginDescriptor();
ClassRealm pluginRealm = pluginDescriptor.getClassRealm();
if ( pluginRealm == null )
{
try
{
setupPluginRealm( pluginDescriptor, session, null, null, null );
}
catch ( PluginResolutionException e )
{
String msg = "Cannot setup plugin realm [mojoDescriptor=" + mojoDescriptor.getId()
+ ", pluginDescriptor=" + pluginDescriptor.getId() + "]";
throw new PluginConfigurationException( pluginDescriptor, msg, e );
}
pluginRealm = pluginDescriptor.getClassRealm();
}
if ( logger.isDebugEnabled() )
{
logger.debug( "Configuring mojo " + mojoDescriptor.getId() + " from plugin realm " + pluginRealm );
}
// We are forcing the use of the plugin realm for all lookups that might occur during
// the lifecycle that is part of the lookup. Here we are specifically trying to keep
// lookups that occur in contextualize calls in line with the right realm.
ClassRealm oldLookupRealm = container.setLookupRealm( pluginRealm );
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader( pluginRealm );
try
{
T mojo;
try
{
mojo = container.lookup( mojoInterface, mojoDescriptor.getRoleHint() );
}
catch ( ComponentLookupException e )
{
Throwable cause = e.getCause();
while ( cause != null && !( cause instanceof LinkageError )
&& !( cause instanceof ClassNotFoundException ) )
{
cause = cause.getCause();
}
if ( ( cause instanceof NoClassDefFoundError ) || ( cause instanceof ClassNotFoundException ) )
{
ByteArrayOutputStream os = new ByteArrayOutputStream( 1024 );
PrintStream ps = new PrintStream( os );
ps.println( "Unable to load the mojo '" + mojoDescriptor.getGoal() + "' in the plugin '"
+ pluginDescriptor.getId() + "'. A required class is missing: "
+ cause.getMessage() );
pluginRealm.display( ps );
throw new PluginContainerException( mojoDescriptor, pluginRealm, os.toString(), cause );
}
else if ( cause instanceof LinkageError )
{
ByteArrayOutputStream os = new ByteArrayOutputStream( 1024 );
PrintStream ps = new PrintStream( os );
ps.println( "Unable to load the mojo '" + mojoDescriptor.getGoal() + "' in the plugin '"
+ pluginDescriptor.getId() + "' due to an API incompatibility: "
+ e.getClass().getName() + ": " + cause.getMessage() );
pluginRealm.display( ps );
throw new PluginContainerException( mojoDescriptor, pluginRealm, os.toString(), cause );
}
throw new PluginContainerException( mojoDescriptor, pluginRealm,
"Unable to load the mojo '" + mojoDescriptor.getGoal()
+ "' (or one of its required components) from the plugin '"
+ pluginDescriptor.getId() + "'", e );
}
if ( mojo instanceof ContextEnabled )
{
MavenProject project = session.getCurrentProject();
Map<String, Object> pluginContext = session.getPluginContext( pluginDescriptor, project );
if ( pluginContext != null )
{
pluginContext.put( "project", project );
pluginContext.put( "pluginDescriptor", pluginDescriptor );
( (ContextEnabled) mojo ).setPluginContext( pluginContext );
}
}
if ( mojo instanceof Mojo )
{
Logger mojoLogger = LoggerFactory.getLogger( mojoDescriptor.getImplementation() );
( (Mojo) mojo ).setLog( new MojoLogWrapper( mojoLogger ) );
}
Xpp3Dom dom = mojoExecution.getConfiguration();
PlexusConfiguration pomConfiguration;
if ( dom == null )
{
pomConfiguration = new XmlPlexusConfiguration( "configuration" );
}
else
{
pomConfiguration = new XmlPlexusConfiguration( dom );
}
ExpressionEvaluator expressionEvaluator = new PluginParameterExpressionEvaluator( session, mojoExecution );
populatePluginFields( mojo, mojoDescriptor, pluginRealm, pomConfiguration, expressionEvaluator );
return mojo;
}
finally
{
Thread.currentThread().setContextClassLoader( oldClassLoader );
container.setLookupRealm( oldLookupRealm );
}
}
private void populatePluginFields( Object mojo, MojoDescriptor mojoDescriptor, ClassRealm pluginRealm,
PlexusConfiguration configuration, ExpressionEvaluator expressionEvaluator )
throws PluginConfigurationException
{
ComponentConfigurator configurator = null;
String configuratorId = mojoDescriptor.getComponentConfigurator();
if ( StringUtils.isEmpty( configuratorId ) )
{
configuratorId = "basic";
}
try
{
// TODO could the configuration be passed to lookup and the configurator known to plexus via the descriptor
// so that this method could entirely be handled by a plexus lookup?
configurator = container.lookup( ComponentConfigurator.class, configuratorId );
ConfigurationListener listener = new DebugConfigurationListener( logger );
ValidatingConfigurationListener validator =
new ValidatingConfigurationListener( mojo, mojoDescriptor, listener );
logger.debug(
"Configuring mojo '" + mojoDescriptor.getId() + "' with " + configuratorId + " configurator -->" );
configurator.configureComponent( mojo, configuration, expressionEvaluator, pluginRealm, validator );
logger.debug( "-- end configuration --" );
Collection<Parameter> missingParameters = validator.getMissingParameters();
if ( !missingParameters.isEmpty() )
{
if ( "basic".equals( configuratorId ) )
{
throw new PluginParameterException( mojoDescriptor, new ArrayList<>( missingParameters ) );
}
else
{
/*
* NOTE: Other configurators like the map-oriented one don't call into the listener, so do it the
* hard way.
*/
validateParameters( mojoDescriptor, configuration, expressionEvaluator );
}
}
}
catch ( ComponentConfigurationException e )
{
String message = "Unable to parse configuration of mojo " + mojoDescriptor.getId();
if ( e.getFailedConfiguration() != null )
{
message += " for parameter " + e.getFailedConfiguration().getName();
}
message += ": " + e.getMessage();
throw new PluginConfigurationException( mojoDescriptor.getPluginDescriptor(), message, e );
}
catch ( ComponentLookupException e )
{
throw new PluginConfigurationException( mojoDescriptor.getPluginDescriptor(),
"Unable to retrieve component configurator " + configuratorId
+ " for configuration of mojo " + mojoDescriptor.getId(), e );
}
catch ( NoClassDefFoundError e )
{
ByteArrayOutputStream os = new ByteArrayOutputStream( 1024 );
PrintStream ps = new PrintStream( os );
ps.println( "A required class was missing during configuration of mojo " + mojoDescriptor.getId() + ": "
+ e.getMessage() );
pluginRealm.display( ps );
throw new PluginConfigurationException( mojoDescriptor.getPluginDescriptor(), os.toString(), e );
}
catch ( LinkageError e )
{
ByteArrayOutputStream os = new ByteArrayOutputStream( 1024 );
PrintStream ps = new PrintStream( os );
ps.println(
"An API incompatibility was encountered during configuration of mojo " + mojoDescriptor.getId() + ": "
+ e.getClass().getName() + ": " + e.getMessage() );
pluginRealm.display( ps );
throw new PluginConfigurationException( mojoDescriptor.getPluginDescriptor(), os.toString(), e );
}
finally
{
if ( configurator != null )
{
try
{
container.release( configurator );
}
catch ( ComponentLifecycleException e )
{
logger.debug( "Failed to release mojo configurator - ignoring." );
}
}
}
}
private void validateParameters( MojoDescriptor mojoDescriptor, PlexusConfiguration configuration,
ExpressionEvaluator expressionEvaluator )
throws ComponentConfigurationException, PluginParameterException
{
if ( mojoDescriptor.getParameters() == null )
{
return;
}
List<Parameter> invalidParameters = new ArrayList<>();
for ( Parameter parameter : mojoDescriptor.getParameters() )
{
if ( !parameter.isRequired() )
{
continue;
}
Object value = null;
PlexusConfiguration config = configuration.getChild( parameter.getName(), false );
if ( config != null )
{
String expression = config.getValue( null );
try
{
value = expressionEvaluator.evaluate( expression );
if ( value == null )
{
value = config.getAttribute( "default-value", null );
}
}
catch ( ExpressionEvaluationException e )
{
String msg = "Error evaluating the expression '" + expression + "' for configuration value '"
+ configuration.getName() + "'";
throw new ComponentConfigurationException( configuration, msg, e );
}
}
if ( value == null && ( config == null || config.getChildCount() <= 0 ) )
{
invalidParameters.add( parameter );
}
}
if ( !invalidParameters.isEmpty() )
{
throw new PluginParameterException( mojoDescriptor, invalidParameters );
}
}
public void releaseMojo( Object mojo, MojoExecution mojoExecution )
{
if ( mojo != null )
{
try
{
container.release( mojo );
}
catch ( ComponentLifecycleException e )
{
String goalExecId = mojoExecution.getGoal();
if ( mojoExecution.getExecutionId() != null )
{
goalExecId += " {execution: " + mojoExecution.getExecutionId() + "}";
}
logger.debug( "Error releasing mojo for " + goalExecId, e );
}
}
}
public ExtensionRealmCache.CacheRecord setupExtensionsRealm( MavenProject project, Plugin plugin,
RepositorySystemSession session )
throws PluginManagerException
{
@SuppressWarnings( "unchecked" ) Map<String, ExtensionRealmCache.CacheRecord> pluginRealms =
(Map<String, ExtensionRealmCache.CacheRecord>) project.getContextValue( KEY_EXTENSIONS_REALMS );
if ( pluginRealms == null )
{
pluginRealms = new HashMap<>();
project.setContextValue( KEY_EXTENSIONS_REALMS, pluginRealms );
}
final String pluginKey = plugin.getId();
ExtensionRealmCache.CacheRecord extensionRecord = pluginRealms.get( pluginKey );
if ( extensionRecord != null )
{
return extensionRecord;
}
final List<RemoteRepository> repositories = project.getRemotePluginRepositories();
// resolve plugin version as necessary
if ( plugin.getVersion() == null )
{
PluginVersionRequest versionRequest = new DefaultPluginVersionRequest( plugin, session, repositories );
try
{
plugin.setVersion( pluginVersionResolver.resolve( versionRequest ).getVersion() );
}
catch ( PluginVersionResolutionException e )
{
throw new PluginManagerException( plugin, e.getMessage(), e );
}
}
// resolve plugin artifacts
List<Artifact> artifacts;
PluginArtifactsCache.Key cacheKey = pluginArtifactsCache.createKey( plugin, null, repositories, session );
PluginArtifactsCache.CacheRecord recordArtifacts;
try
{
recordArtifacts = pluginArtifactsCache.get( cacheKey );
}
catch ( PluginResolutionException e )
{
throw new PluginManagerException( plugin, e.getMessage(), e );
}
if ( recordArtifacts != null )
{
artifacts = recordArtifacts.getArtifacts();
}
else
{
try
{
artifacts = resolveExtensionArtifacts( plugin, repositories, session );
recordArtifacts = pluginArtifactsCache.put( cacheKey, artifacts );
}
catch ( PluginResolutionException e )
{
pluginArtifactsCache.put( cacheKey, e );
pluginArtifactsCache.register( project, cacheKey, recordArtifacts );
throw new PluginManagerException( plugin, e.getMessage(), e );
}
}
pluginArtifactsCache.register( project, cacheKey, recordArtifacts );
// create and cache extensions realms
final ExtensionRealmCache.Key extensionKey = extensionRealmCache.createKey( artifacts );
extensionRecord = extensionRealmCache.get( extensionKey );
if ( extensionRecord == null )
{
ClassRealm extensionRealm =
classRealmManager.createExtensionRealm( plugin, toAetherArtifacts( artifacts ) );
// TODO figure out how to use the same PluginDescriptor when running mojos
PluginDescriptor pluginDescriptor = null;
if ( plugin.isExtensions() && !artifacts.isEmpty() )
{
// ignore plugin descriptor parsing errors at this point
// these errors will reported during calculation of project build execution plan
try
{
pluginDescriptor = extractPluginDescriptor( artifacts.get( 0 ), plugin );
}
catch ( PluginDescriptorParsingException | InvalidPluginDescriptorException e )
{
// ignore, see above
}
}
discoverPluginComponents( extensionRealm, plugin, pluginDescriptor );
ExtensionDescriptor extensionDescriptor = null;
Artifact extensionArtifact = artifacts.get( 0 );
try
{
extensionDescriptor = extensionDescriptorBuilder.build( extensionArtifact.getFile() );
}
catch ( IOException e )
{
String message = "Invalid extension descriptor for " + plugin.getId() + ": " + e.getMessage();
if ( logger.isDebugEnabled() )
{
logger.error( message, e );
}
else
{
logger.error( message );
}
}
extensionRecord = extensionRealmCache.put( extensionKey, extensionRealm, extensionDescriptor, artifacts );
}
extensionRealmCache.register( project, extensionKey, extensionRecord );
pluginRealms.put( pluginKey, extensionRecord );
return extensionRecord;
}
private List<Artifact> resolveExtensionArtifacts( Plugin extensionPlugin, List<RemoteRepository> repositories,
RepositorySystemSession session )
throws PluginResolutionException
{
DependencyNode root = pluginDependenciesResolver.resolve( extensionPlugin, null, null, repositories, session );
PreorderNodeListGenerator nlg = new PreorderNodeListGenerator();
root.accept( nlg );
return toMavenArtifacts( root, nlg );
}
}
|
|
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.impl.gui.main.call;
import java.awt.*;
import java.beans.*;
import java.net.*;
import java.util.*;
import java.util.List;
import javax.swing.*;
import javax.swing.text.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.plugin.desktoputil.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.media.*;
import net.java.sip.communicator.util.*;
import org.ice4j.ice.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.service.resources.*;
import org.jitsi.util.*;
import com.explodingpixels.macwidgets.*;
import java.security.cert.*;
import javax.swing.event.*;
/**
* The frame displaying the statistical information for a telephony conference.
*
* @author Vincent Lucas
* @author Yana Stamcheva
*/
public class CallInfoFrame
implements CallTitleListener,
PropertyChangeListener,
HyperlinkListener
{
/**
* The telephony conference to compute and display the statistics of.
*/
private CallConference callConference;
/**
* The call info window.
*/
private final JDialog callInfoWindow;
/**
* The information text pane.
*/
private final JEditorPane infoTextPane;
/**
* The font color.
*/
private String fontColor;
/**
* The resource management service.
*/
private final ResourceManagementService resources
= GuiActivator.getResources();
/**
* Indicates if the info window has any text to display.
*/
private boolean hasCallInfo;
/**
* Dummy URL to indicate that the certificate should be displayed.
*/
private final String CERTIFICATE_URL = "jitsi://viewCertificate";
/**
* Creates a new frame containing the statistical information for a specific
* telephony conference.
*
* @param callConference the telephony conference to compute and display the
* statistics of
*/
public CallInfoFrame(CallConference callConference)
{
this.callConference = callConference;
JScrollPane scrollPane = new JScrollPane();
scrollPane.setOpaque(false);
scrollPane.getViewport().setOpaque(false);
infoTextPane = createGeneralInfoPane();
Caret caret = infoTextPane.getCaret();
if (caret instanceof DefaultCaret)
{
((DefaultCaret) caret).setUpdatePolicy(DefaultCaret.NEVER_UPDATE);
}
scrollPane.getViewport().add(infoTextPane);
callInfoWindow
= createCallInfoWindow(
GuiActivator.getResources().getI18NString(
"service.gui.callinfo.TECHNICAL_CALL_INFO"));
callInfoWindow.getContentPane().add(scrollPane);
hasCallInfo = this.constructCallInfo();
}
/**
* Creates different types of windows depending on the operating system.
*
* @param title the title of the created window
*/
private JDialog createCallInfoWindow( String title)
{
JDialog callInfoWindow = null;
if (OSUtils.IS_MAC)
{
HudWindow window = new HudWindow();
JDialog dialog = window.getJDialog();
dialog.setTitle(title);
callInfoWindow = window.getJDialog();
callInfoWindow.setResizable(true);
fontColor = "FFFFFF";
}
else
{
SIPCommDialog dialog = new SIPCommDialog(false);
callInfoWindow = dialog;
callInfoWindow.setTitle(title);
fontColor = "000000";
}
return callInfoWindow;
}
/**
* Create call info text pane.
*
* @return the created call info text pane
*/
private JEditorPane createGeneralInfoPane()
{
JEditorPane infoTextPane = new JEditorPane();
/*
* Make JEditorPane respect our default font because we will be using it
* to just display text.
*/
infoTextPane.putClientProperty(
JEditorPane.HONOR_DISPLAY_PROPERTIES,
true);
infoTextPane.setOpaque(false);
infoTextPane.setEditable(false);
infoTextPane.setContentType("text/html");
infoTextPane.addHyperlinkListener(this);
return infoTextPane;
}
/**
* Returns an HTML string corresponding to the given labelText and infoText,
* that could be easily added to the information text pane.
*
* @param labelText the label text that would be shown in bold
* @param infoText the info text that would be shown in plain text
* @return the newly constructed HTML string
*/
private String getLineString(String labelText, String infoText)
{
return "<b>" + labelText + "</b> : " + infoText + "<br/>";
}
/**
* Constructs the call info text.
* @return true if call info could be found, false otherwise
*/
private boolean constructCallInfo()
{
StringBuffer stringBuffer = new StringBuffer();
stringBuffer.append(
"<html><body><p align=\"left\">"
+ "<font color=\"" + fontColor + "\" size=\"3\">");
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.CALL_INFORMATION"), ""));
List<Call> calls = callConference.getCalls();
/*
* TODO A telephony conference may consist of a single Call with
* multiple CallPeers but it may as well consist of multiple Calls.
*/
if (calls.size() <= 0)
{
return false;
}
else
{
Call aCall = calls.get(0);
stringBuffer.append(
getLineString(
resources.getI18NString(
"service.gui.callinfo.CALL_IDENTITY"),
aCall.getProtocolProvider().getAccountID()
.getDisplayName()));
int callPeerCount = callConference.getCallPeerCount();
if (callPeerCount > 1)
{
stringBuffer.append(
getLineString(resources.getI18NString(
"service.gui.callinfo.PEER_COUNT"),
String.valueOf(callPeerCount)));
}
boolean isConfFocus = callConference.isConferenceFocus();
if (isConfFocus)
{
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.IS_CONFERENCE_FOCUS"),
String.valueOf(isConfFocus)));
}
TransportProtocol preferredTransport
= aCall.getProtocolProvider().getTransportProtocol();
if (preferredTransport != TransportProtocol.UNKNOWN)
stringBuffer.append(getLineString(
resources.getI18NString("service.gui.callinfo.CALL_TRANSPORT"),
preferredTransport.toString()));
final OperationSetTLS opSetTls = aCall.getProtocolProvider()
.getOperationSet(OperationSetTLS.class);
if (opSetTls != null)
{
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.TLS_PROTOCOL"),
opSetTls.getProtocol()));
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.TLS_CIPHER_SUITE"),
opSetTls.getCipherSuite()));
stringBuffer.append("<b><a href=\"")
.append(CERTIFICATE_URL)
.append("\">")
.append(resources.getI18NString(
"service.gui.callinfo.VIEW_CERTIFICATE"))
.append("</a></b><br/>");
}
constructCallPeersInfo(stringBuffer);
stringBuffer.append("</font></p></body></html>");
infoTextPane.setText(stringBuffer.toString());
infoTextPane.revalidate();
infoTextPane.repaint();
return true;
}
}
/**
* Constructs call peers' info.
*
* @param stringBuffer the <tt>StringBuffer</tt>, where call peer info will
* be added
*/
private void constructCallPeersInfo(StringBuffer stringBuffer)
{
for (CallPeer callPeer : callConference.getCallPeers())
{
if(callPeer instanceof MediaAwareCallPeer)
{
((MediaAwareCallPeer<?,?,?>) callPeer)
.getMediaHandler()
.addPropertyChangeListener(this);
}
stringBuffer.append("<br/>");
constructPeerInfo(callPeer, stringBuffer);
}
}
/**
* Constructs peer info.
*
* @param callPeer the <tt>CallPeer</tt>, for which we'll construct the info
* @param stringBuffer the <tt>StringBuffer</tt>, where call peer info will
* be added
*/
private void constructPeerInfo(CallPeer callPeer, StringBuffer stringBuffer)
{
stringBuffer.append(getLineString(callPeer.getAddress(), ""));
if(callPeer.getCallDurationStartTime() !=
CallPeer.CALL_DURATION_START_TIME_UNKNOWN)
{
Date startTime = new Date(callPeer.getCallDurationStartTime());
stringBuffer.append(getLineString(
resources.getI18NString("service.gui.callinfo.CALL_DURATION"),
GuiUtils.formatTime(startTime.getTime(),
System.currentTimeMillis())));
}
if(callPeer instanceof MediaAwareCallPeer)
{
CallPeerMediaHandler<?> callPeerMediaHandler
= ((MediaAwareCallPeer<?,?,?>) callPeer).getMediaHandler();
if(callPeerMediaHandler != null)
{
MediaStream mediaStream =
callPeerMediaHandler.getStream(MediaType.AUDIO);
if (mediaStream != null && mediaStream.isStarted())
{
stringBuffer.append("<br/>");
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.AUDIO_INFO"), ""));
this.appendStreamEncryptionMethod(
stringBuffer,
callPeerMediaHandler,
mediaStream,
MediaType.AUDIO);
constructAudioVideoInfo(
callPeerMediaHandler,
mediaStream,
stringBuffer,
MediaType.AUDIO);
}
mediaStream = callPeerMediaHandler.getStream(MediaType.VIDEO);
if (mediaStream != null && mediaStream.isStarted())
{
stringBuffer.append("<br/>");
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.VIDEO_INFO"), ""));
this.appendStreamEncryptionMethod(
stringBuffer,
callPeerMediaHandler,
mediaStream,
MediaType.VIDEO);
constructAudioVideoInfo(
callPeerMediaHandler,
mediaStream,
stringBuffer,
MediaType.VIDEO);
}
stringBuffer.append("<br/>");
// ICE state
String iceState = callPeerMediaHandler.getICEState();
if(iceState != null && !iceState.equals("Terminated"))
{
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.ICE_STATE"),
resources.getI18NString(
"service.gui.callinfo.ICE_STATE."
+ iceState.toUpperCase())));
}
stringBuffer.append("<br/>");
// Total harvesting time.
long harvestingTime
= callPeerMediaHandler.getTotalHarvestingTime();
if(harvestingTime != 0)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.TOTAL_HARVESTING_TIME"
),
harvestingTime
+ " "
+ resources.getI18NString(
"service.gui.callinfo.HARVESTING_MS_FOR")
+ " "
+ callPeerMediaHandler.getNbHarvesting()
+ " "
+ resources.getI18NString(
"service.gui.callinfo.HARVESTS")));
}
// Current harvester time if ICE agent is harvesting.
String[] harvesterNames =
{
"GoogleTurnCandidateHarvester",
"GoogleTurnSSLCandidateHarvester",
"HostCandidateHarvester",
"JingleNodesHarvester",
"StunCandidateHarvester",
"TurnCandidateHarvester",
"UPNPHarvester"
};
for(int i = 0; i < harvesterNames.length; ++i)
{
harvestingTime = callPeerMediaHandler.getHarvestingTime(
harvesterNames[i]);
if(harvestingTime != 0)
{
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.HARVESTING_TIME")
+ " " + harvesterNames[i],
harvestingTime
+ " "
+ resources.getI18NString(
"service.gui.callinfo.HARVESTING_MS_FOR"
)
+ " "
+ callPeerMediaHandler.getNbHarvesting(
harvesterNames[i])
+ " "
+ resources.getI18NString(
"service.gui.callinfo.HARVESTS")));
}
}
}
}
}
/**
* Constructs audio video peer info.
*
* @param callPeerMediaHandler The <tt>CallPeerMadiaHandler</tt> containing
* the AUDIO/VIDEO stream.
* @param mediaStream the <tt>MediaStream</tt> that gives us access to
* audio video info
* @param stringBuffer the <tt>StringBuffer</tt>, where call peer info will
* be added
* @param mediaType The media type used to determine which stream of the
* media handler must returns it encryption method.
*/
private void constructAudioVideoInfo(
CallPeerMediaHandler<?> callPeerMediaHandler,
MediaStream mediaStream,
StringBuffer stringBuffer,
MediaType mediaType)
{
MediaStreamStats mediaStreamStats
= mediaStream.getMediaStreamStats();
if(mediaStreamStats == null)
return;
mediaStreamStats.updateStats();
if(mediaType == MediaType.VIDEO)
{
Dimension downloadVideoSize =
mediaStreamStats.getDownloadVideoSize();
Dimension uploadVideoSize = mediaStreamStats.getUploadVideoSize();
// Checks that at least one video stream is active.
if(downloadVideoSize != null || uploadVideoSize != null)
{
stringBuffer.append(
getLineString(resources.getI18NString(
"service.gui.callinfo.VIDEO_SIZE"),
"↓ "
+ this.videoSizeToString(downloadVideoSize)
+ " ↑ "
+ this.videoSizeToString(uploadVideoSize)));
}
// Otherwise, quit the stats for this video stream.
else
{
return;
}
}
stringBuffer.append(
getLineString(
resources.getI18NString("service.gui.callinfo.CODEC"),
mediaStreamStats.getEncoding()
+ " / " + mediaStreamStats.getEncodingClockRate() + " Hz"));
boolean displayedIpPort = false;
// ICE candidate type
String iceCandidateExtendedType =
callPeerMediaHandler.getICECandidateExtendedType(
mediaType.toString());
if(iceCandidateExtendedType != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_CANDIDATE_EXTENDED_TYPE"),
iceCandidateExtendedType));
displayedIpPort = true;
}
// Local host address
InetSocketAddress iceLocalHostAddress =
callPeerMediaHandler.getICELocalHostAddress(mediaType.toString());
if(iceLocalHostAddress != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_LOCAL_HOST_ADDRESS"),
iceLocalHostAddress.getAddress().getHostAddress()
+ "/" + iceLocalHostAddress.getPort()));
displayedIpPort = true;
}
// Local reflexive address
InetSocketAddress iceLocalReflexiveAddress =
callPeerMediaHandler.getICELocalReflexiveAddress(
mediaType.toString());
if(iceLocalReflexiveAddress != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_LOCAL_REFLEXIVE_ADDRESS"),
iceLocalReflexiveAddress.getAddress()
.getHostAddress()
+ "/" + iceLocalReflexiveAddress.getPort()));
displayedIpPort = true;
}
// Local relayed address
InetSocketAddress iceLocalRelayedAddress =
callPeerMediaHandler.getICELocalRelayedAddress(
mediaType.toString());
if(iceLocalRelayedAddress != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_LOCAL_RELAYED_ADDRESS"),
iceLocalRelayedAddress.getAddress()
.getHostAddress()
+ "/" + iceLocalRelayedAddress.getPort()));
displayedIpPort = true;
}
// Remote relayed address
InetSocketAddress iceRemoteRelayedAddress =
callPeerMediaHandler.getICERemoteRelayedAddress(
mediaType.toString());
if(iceRemoteRelayedAddress != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_REMOTE_RELAYED_ADDRESS"),
iceRemoteRelayedAddress.getAddress()
.getHostAddress()
+ "/" + iceRemoteRelayedAddress.getPort()));
displayedIpPort = true;
}
// Remote reflexive address
InetSocketAddress iceRemoteReflexiveAddress =
callPeerMediaHandler.getICERemoteReflexiveAddress(
mediaType.toString());
if(iceRemoteReflexiveAddress != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_REMOTE_REFLEXIVE_ADDRESS"),
iceRemoteReflexiveAddress.getAddress()
.getHostAddress()
+ "/" + iceRemoteReflexiveAddress.getPort()));
displayedIpPort = true;
}
// Remote host address
InetSocketAddress iceRemoteHostAddress =
callPeerMediaHandler.getICERemoteHostAddress(mediaType.toString());
if(iceRemoteHostAddress != null)
{
stringBuffer.append(getLineString(resources.getI18NString(
"service.gui.callinfo.ICE_REMOTE_HOST_ADDRESS"),
iceRemoteHostAddress.getAddress().getHostAddress()
+ "/" + iceRemoteHostAddress.getPort()));
displayedIpPort = true;
}
// If the stream does not use ICE, then show the transport IP/port.
if(!displayedIpPort)
{
stringBuffer.append(
getLineString(
resources.getI18NString("service.gui.callinfo.LOCAL_IP"),
mediaStreamStats.getLocalIPAddress()
+ " / "
+ String.valueOf(mediaStreamStats.getLocalPort())));
stringBuffer.append(
getLineString(
resources.getI18NString("service.gui.callinfo.REMOTE_IP"),
mediaStreamStats.getRemoteIPAddress()
+ " / "
+ String.valueOf(mediaStreamStats.getRemotePort())));
}
stringBuffer.append(
getLineString(
resources.getI18NString(
"service.gui.callinfo.BANDWITH"),
"↓ "
+ (int) mediaStreamStats.getDownloadRateKiloBitPerSec()
+ " Kbps "
+ " ↑ "
+ (int) mediaStreamStats.getUploadRateKiloBitPerSec()
+ " Kbps"));
stringBuffer.append(
getLineString(
resources.getI18NString("service.gui.callinfo.LOSS_RATE"),
"↓ " + (int) mediaStreamStats.getDownloadPercentLoss()
+ "% ↑ "
+ (int) mediaStreamStats.getUploadPercentLoss()
+ "%"));
stringBuffer.append(
getLineString(
resources.getI18NString(
"service.gui.callinfo.DECODED_WITH_FEC"),
String.valueOf(mediaStreamStats.getNbFec())));
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.DISCARDED_PERCENT"),
String.valueOf((int)mediaStreamStats.getPercentDiscarded()
+ "%")));
stringBuffer.append(getLineString(
resources.getI18NString("service.gui.callinfo.DISCARDED_TOTAL"),
String.valueOf(mediaStreamStats.getNbDiscarded())
+ " (" + mediaStreamStats.getNbDiscardedLate() + " late, "
+ mediaStreamStats.getNbDiscardedFull() + " full, "
+ mediaStreamStats.getNbDiscardedShrink() + " shrink, "
+ mediaStreamStats.getNbDiscardedReset() + " reset)"));
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.ADAPTIVE_JITTER_BUFFER"),
mediaStreamStats.isAdaptiveBufferEnabled()
? "enabled" : "disabled"));
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.JITTER_BUFFER_DELAY"),
"~" + mediaStreamStats.getJitterBufferDelayMs()
+ "ms; currently in queue: "
+ mediaStreamStats.getPacketQueueCountPackets() + "/"
+ mediaStreamStats.getPacketQueueSize() + " packets"));
long rttMs = mediaStreamStats.getRttMs();
if(rttMs != -1)
{
stringBuffer.append(
getLineString(resources.getI18NString(
"service.gui.callinfo.RTT"),
rttMs + " ms"));
}
stringBuffer.append(
getLineString(resources.getI18NString(
"service.gui.callinfo.JITTER"),
"↓ " + (int) mediaStreamStats.getDownloadJitterMs()
+ " ms ↑ "
+ (int) mediaStreamStats.getUploadJitterMs() + " ms"));
}
/**
* Called when the title of the given CallPanel changes.
*
* @param callContainer the <tt>CallContainer</tt>, which title has changed
*/
public void callTitleChanged(CallPanel callContainer)
{
String selectedText = infoTextPane.getSelectedText();
// If there's a selection do not update call info, otherwise the user
// would not be able to copy the selected text.
if (selectedText != null && selectedText.length() > 0)
return;
hasCallInfo = this.constructCallInfo();
}
/**
* Shows/hides the corresponding window.
*
* @param isVisible <tt>true</tt> to show the window, <tt>false</tt> to
* hide it
*/
public void setVisible(boolean isVisible)
{
if (isVisible)
{
callInfoWindow.pack();
callInfoWindow.setPreferredSize(new Dimension(300, 450));
callInfoWindow.setSize(300, 450);
callInfoWindow.setLocationRelativeTo(null);
}
callInfoWindow.setVisible(isVisible);
}
/**
* Indicates if the corresponding window is visible.
*
* @return <tt>true</tt> if the window is visible, <tt>false</tt> -
* otherwise
*/
public boolean isVisible()
{
return callInfoWindow.isVisible();
}
/**
* Indicates if the call info window has any text to display
*
* @return <tt>true</tt> if the window contains call info,
* <tt>false</tt> otherwise
*/
public boolean hasCallInfo()
{
return hasCallInfo;
}
/**
* Disposes the corresponding window.
*/
public void dispose()
{
callInfoWindow.dispose();
}
/**
* Appends to the string buffer the stream encryption method (null, MIKEY,
* SDES, ZRTP) used for a given media stream (type AUDIO or VIDEO).
*
* @param stringBuffer The string buffer containing the call information
* statistics.
* @param callPeerMediaHandler The media handler containing the different
* media streams.
* @param mediaStream the <tt>MediaStream</tt> that gives us access to
* audio/video info.
* @param mediaType The media type used to determine which stream of the
* media handler must returns it encryption method.
*/
private void appendStreamEncryptionMethod(
StringBuffer stringBuffer,
CallPeerMediaHandler<?> callPeerMediaHandler,
MediaStream mediaStream,
MediaType mediaType)
{
String transportProtocolString = "";
StreamConnector.Protocol transportProtocol =
mediaStream.getTransportProtocol();
if(transportProtocol != null)
{
transportProtocolString = transportProtocol.toString();
}
String rtpType;
SrtpControl srtpControl
= callPeerMediaHandler.getEncryptionMethod(mediaType);
// If the stream is secured.
if(srtpControl != null)
{
String info;
if (srtpControl instanceof ZrtpControl)
{
info = "ZRTP " + ((ZrtpControl)srtpControl).getCipherString();
}
else
{
info = srtpControl.getSrtpControlType().toString();
}
rtpType = resources.getI18NString(
"service.gui.callinfo.MEDIA_STREAM_SRTP")
+ " ("
+ resources.getI18NString(
"service.gui.callinfo.KEY_EXCHANGE_PROTOCOL")
+ ": "
+ info
+ ")";
}
// If the stream is not secured.
else
{
rtpType = resources.getI18NString(
"service.gui.callinfo.MEDIA_STREAM_RTP");
}
// Appends the encryption status String.
stringBuffer.append(getLineString(
resources.getI18NString(
"service.gui.callinfo.MEDIA_STREAM_TRANSPORT_PROTOCOL"),
transportProtocolString + " / " + rtpType));
}
/**
* Listen for ice property change to trigger call info update.
* @param evt the event for state change.
*/
public void propertyChange(PropertyChangeEvent evt)
{
if(evt.getPropertyName().equals(Agent.PROPERTY_ICE_PROCESSING_STATE))
{
callTitleChanged(null);
}
}
/**
* Converts a video size Dimension into its String representation.
*
* @param videoSize The video size Dimension, containing the width and the
* hieght of the video.
*
* @return The String representation of the video width and height, or a
* String with "Not Available (N.A.)" if the videoSize is null.
*/
private String videoSizeToString(Dimension videoSize)
{
if(videoSize == null)
{
return resources.getI18NString("service.gui.callinfo.NA");
}
return ((int) videoSize.getWidth()) + " x " + ((int) videoSize.getHeight());
}
/**
* Invoked when user clicks a link in the editor pane.
* @param e the event
*/
public void hyperlinkUpdate(HyperlinkEvent e)
{
// Handle "View certificate" link
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED
&& CERTIFICATE_URL.equals(e.getDescription()))
{
List<Call> calls = callConference.getCalls();
if (!calls.isEmpty())
{
Call aCall = calls.get(0);
Certificate[] chain = aCall.getProtocolProvider()
.getOperationSet(OperationSetTLS.class)
.getServerCertificates();
ViewCertificateFrame certFrame =
new ViewCertificateFrame(chain, null,
resources.getI18NString(
"service.gui.callinfo.TLS_CERTIFICATE_CONTENT"));
certFrame.setVisible(true);
}
}
}
}
|
|
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.plugin.skinmanager;
import java.awt.event.*;
import java.io.*;
import java.net.*;
import java.util.zip.*;
import net.java.sip.communicator.service.gui.*;
import net.java.sip.communicator.util.*;
import net.java.sip.communicator.plugin.desktoputil.*;
import org.osgi.framework.*;
/**
* Selection listener for the <tt>SkinSelector</tt>.
* @author Adam Netocny
*/
public class SkinSelectionListener implements ActionListener
{
/**
* The object used for logging.
*/
private Logger logger = Logger.getLogger(SkinSelectionListener.class);
/**
* Currently selected item.
*/
private Object current = null;
/**
* Suppress value for events. If true the event will be ignored.
*/
private boolean suppressed = false;
/**
* Invoked when an action occurs.
* @param e <tt>ActionEvent</tt>
*/
public void actionPerformed(ActionEvent e)
{
SkinSelector selector = (SkinSelector) e.getSource();
if (current != null && current.equals(selector.getSelectedItem()))
return;
if(suppressed)
{
current = selector.getSelectedItem();
return;
}
if(selector.getSelectedItem() instanceof String)
{
String selectedItem = (String) selector.getSelectedItem();
if(selectedItem.equals(SkinSelector.ADD_TEXT))
{
selector.hidePopup();
SipCommFileChooser chooser = createFileChooser();
File newBundleFile = chooser.getFileFromDialog();
if(newBundleFile != null)
{
try
{
File jar = null;
try
{
jar = Resources.getResources()
.prepareSkinBundleFromZip(newBundleFile);
}
catch (Exception ex)
{
logger.info("Failed to load skin from zip.", ex);
SkinManagerActivator.getUIService().getPopupDialog()
.showMessagePopupDialog(ex.getClass() + ": "
+ ex.getMessage(), "Error",
PopupDialog.ERROR_MESSAGE);
}
if (jar != null)
{
try
{
Bundle newBundle = SkinManagerActivator
.bundleContext.installBundle(
jar.toURI().toURL().toString());
selector.selectNoSkin();
newBundle.start();
}
catch (MalformedURLException ex)
{
logger.info("Failed to load skin from zip.", ex);
}
}
}
catch (BundleException ex)
{
logger.info("Failed to install bundle.", ex);
SkinManagerActivator.getUIService().getPopupDialog()
.showMessagePopupDialog(ex.getMessage(), "Error",
PopupDialog.ERROR_MESSAGE);
}
catch (Throwable ex)
{
logger.info("Failed to install bundle.", ex);
}
}
else
{
if(current != null)
{
selector.setSelectedItem(current);
}
else
{
selector.setSelectedIndex(0);
}
}
}
else if(selectedItem.equals(SkinSelector.DEFAULT_TEXT))
{
selector.selectNoSkin();
}
else if(selectedItem.equals(SkinSelectorRenderer.SEPARATOR))
{
if(current != null)
{
selector.setSelectedItem(current);
}
else
{
selector.setSelectedIndex(0);
}
}
}
else if(selector.getSelectedItem() instanceof Bundle)
{
Bundle select = (Bundle)selector.getSelectedItem();
selector.selectNoSkin();
try
{
select.start();
}
catch (BundleException ex)
{
// ex.printStackTrace();
}
}
current = selector.getSelectedItem();
}
/**
* Sets if the catched event should be ignored or not.
* @param supp If true the event will be ignored.
*/
public void suppressAction(boolean supp)
{
suppressed = supp;
}
/**
* Creates the file chooser used to install a new skin.
*
* @return the created file chooser
*/
private SipCommFileChooser createFileChooser()
{
SipCommFileChooser chooser = GenericFileDialog.create(
null, "New bundle...",
SipCommFileChooser.LOAD_FILE_OPERATION);
chooser.addFilter(new SipCommFileFilter()
{
@Override
public boolean accept(File f)
{
if (f.isDirectory())
return true;
boolean good = true;
try
{
new ZipFile(f);
}
catch (IOException ex)
{
good = false;
}
if (!f.getName().toLowerCase().endsWith(".zip"))
{
good = false;
}
return good;
}
@Override
public String getDescription()
{
return "Zip files (*.zip)";
}
});
return chooser;
}
}
|
|
/*******************************************************************************
* Copyright 2015, The IKANOW Open Source Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.ikanow.aleph2.analytics.storm.utils;
import static org.junit.Assert.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import storm.kafka.BrokerHosts;
import storm.kafka.KafkaSpout;
import storm.kafka.SpoutConfig;
import storm.kafka.StringScheme;
import storm.kafka.ZkHosts;
import backtype.storm.spout.SchemeAsMultiScheme;
import backtype.storm.topology.TopologyBuilder;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.ikanow.aleph2.analytics.storm.data_model.IStormController;
import com.ikanow.aleph2.core.shared.utils.JarBuilderUtil;
import com.ikanow.aleph2.data_model.objects.shared.GlobalPropertiesBean;
//import com.ikanow.aleph2.storm.samples.bolts.SampleKafkaBolt;
//import com.ikanow.aleph2.storm.samples.bolts.SampleKafkaOutputFileBolt;
//import com.ikanow.aleph2.storm.samples.bolts.SampleWordParserBolt;
import com.ikanow.aleph2.data_model.utils.BeanTemplateUtils;
import com.ikanow.aleph2.data_model.utils.PropertiesUtils;
import com.ikanow.aleph2.distributed_services.data_model.DistributedServicesPropertyBean;
import com.ikanow.aleph2.distributed_services.services.CoreDistributedServices;
import com.ikanow.aleph2.distributed_services.services.ICoreDistributedServices;
import com.ikanow.aleph2.distributed_services.services.MockCoreDistributedServices;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
@SuppressWarnings("unused")
public class TestStormControllerUtil_Cache {
private static IStormController storm_cluster;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
storm_cluster = StormControllerUtil.getLocalStormController();
}
@Before
public void setupCoreDistributedServices() throws Exception {
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
// @Test
// public void test() throws Exception {
// TopologyBuilder builder = new TopologyBuilder();
// builder.setSpout("spout1", new SampleWebReaderSpout("http://lifehacker.com/the-best-board-games-for-developing-valuable-real-life-1714642211"));
// builder.setBolt("bolt1", new SampleWordParserBolt()).shuffleGrouping("spout1");
//
// StormControllerUtil.submitJob(storm_cluster, "test_job", null, builder.createTopology());
// //storm_cluster.submitJob("test_job", null, builder.createTopology());
//
// Thread.sleep(10000);
//
// assertTrue(true);
// }
//
// @Ignore
// @Test
// public void testKafkaSpout() throws Exception {
// final String TOPIC_NAME = "TEST_KAFKA_SPOUT";
// final String sample_jar_file = "C:/Users/Burch/Desktop/aleph2_storm_samples-0.0.1-SNAPSHOT-jar-with-dependencies.jar";
// //STEP 1: Run the CDS kafka test to create data in the kafka queue: TEST_KAFKA_SPOUT
// //TODO port that code over here (need to create a CDS)
//
// //STEP 2: create a topology and submit it to that cluster
// TopologyBuilder builder = new TopologyBuilder();
// BrokerHosts hosts = new ZkHosts("api001.dev.ikanow.com:2181");
// SpoutConfig spout_config = new SpoutConfig(hosts, TOPIC_NAME, "/" + TOPIC_NAME, "1");
// spout_config.scheme = new SchemeAsMultiScheme(new StringScheme());
// KafkaSpout kafka_spout = new KafkaSpout(spout_config);
// builder.setSpout("spout1", kafka_spout );
// builder.setBolt("bolt1", new SampleKafkaBolt()).shuffleGrouping("spout1");
// builder.setBolt("bolt2", new SampleKafkaOutputFileBolt()).shuffleGrouping("bolt1");
//
// String nimbus_host = "api001.dev.ikanow.com";
// int nimbus_thrift_port = 6627;
// String storm_thrift_transport_plugin = "backtype.storm.security.auth.SimpleTransportPlugin";
// IStormController storm = StormControllerUtil.getRemoteStormController(nimbus_host, nimbus_thrift_port, storm_thrift_transport_plugin);
// List<String> jars_to_merge = new ArrayList<String>();
// jars_to_merge.add(sample_jar_file);
// //TODO should merge in data_model
// StormControllerUtil.submitJob(storm, "test_kafka_spout", StormControllerUtil.buildStormTopologyJar(jars_to_merge), builder.createTopology());
// }
// @Test
// public void testKafkaSpout() throws JsonParseException, JsonMappingException, IOException {
// Map<String, Object> map = new HashMap<String, Object>();
// map.put("globals.local_yarn_config_dir", "C:/Users/Burch/Desktop/yarn_config/");
// Config config = ConfigFactory.parseMap(map);
// final Config subconfig = PropertiesUtils.getSubConfig(config, GlobalPropertiesBean.PROPERTIES_ROOT).orElse(null);
// final GlobalPropertiesBean globals = BeanTemplateUtils.from(subconfig, GlobalPropertiesBean.class);
// System.out.println(globals.local_yarn_config_dir());
//
// final String TOPIC_NAME = "TEST_KAFKA_SPOUT";
//
// //1. create local kafka
// //This is handled when MockCDS starts, should be fine
//
// //2. create local storm
// IStormController storm_controller = StormControllerUtil.getStormControllerFromYarnConfig(globals.local_yarn_config_dir());// new LocalStormController();
//
// //3. create kafka queue
//
// //4. create storm topology using kafka spout
//
// //5. produce to kafka queue
//
// //6. observe storm output receiving produced stuff
// }
// @Test
// public void testCaching() throws Exception {
// TopologyBuilder builder = new TopologyBuilder();
// builder.setSpout("spout1", new SampleWebReaderSpout("http://lifehacker.com/the-best-board-games-for-developing-valuable-real-life-1714642211"));
// builder.setBolt("bolt1", new SampleWordParserBolt()).shuffleGrouping("spout1");
//
// //StormControllerUtil.submitJob(storm_cluster, "test_job", null, builder.createTopology());
// //storm_cluster.submitJob("test_job", null, builder.createTopology());
//
// String nimbus_host = "api001.dev.ikanow.com";
// int nimbus_thrift_port = 6627;
// String storm_thrift_transport_plugin = "backtype.storm.security.auth.SimpleTransportPlugin";
// IStormController storm_controller = StormControllerUtil.getRemoteStormController(nimbus_host, nimbus_thrift_port, storm_thrift_transport_plugin);
// //StormControllerUtil.startJob(storm_controller, bucket, context, user_lib_paths, enrichment_toplogy);
//
// Thread.sleep(10000);
//
// assertTrue(true);
// }
@Test
public void testCache() throws IOException, InterruptedException, ExecutionException {
final String jar_location = System.getProperty("java.io.tmpdir");
File file1 = createFakeZipFile(null);//File.createTempFile("recent_date_test_", null);
Thread.sleep(1500);
File file2 = createFakeZipFile(null);//File.createTempFile("recent_date_test_", null);
Thread.sleep(1500);
File file3 = createFakeZipFile(null);//File.createTempFile("recent_date_test_", null);
List<String> files1 = Arrays.asList(file1.getCanonicalPath(),file2.getCanonicalPath(),file3.getCanonicalPath());
String input_jar_location = JarBuilderUtil.getHashedJarName(files1, jar_location);
File input_jar = new File(input_jar_location);
input_jar.delete();
assertFalse(input_jar.exists());
//first time it should create
final CompletableFuture<String> jar_future1 = StormControllerUtil.buildOrReturnCachedStormTopologyJar(files1, jar_location);
jar_future1.get();
assertTrue(input_jar.exists());
//second time it should cache
long file_mod_time = getFileModifiedTime(input_jar);
final CompletableFuture<String> jar_future2 = StormControllerUtil.buildOrReturnCachedStormTopologyJar(files1, jar_location);
jar_future2.get();
assertEquals(file_mod_time, getFileModifiedTime(input_jar));
//third time modify a file, it should no longer cache
Thread.sleep(1500); //sleep a ms so the modified time updates
file1.delete();
file1 = createFakeZipFile(file2.getCanonicalPath());
final CompletableFuture<String> jar_future3 = StormControllerUtil.buildOrReturnCachedStormTopologyJar(files1, jar_location);
jar_future3.get();
assertNotEquals(file_mod_time, getFileModifiedTime(input_jar)); //original jar creation time should not match its current modified time (it should have been remade)
//cleanup
file1.delete();
file2.delete();
file3.delete();
new File(input_jar_location).delete();
}
private long getFileModifiedTime(File input_jar) throws IOException {
ZipInputStream inputZip = new ZipInputStream(new FileInputStream(input_jar));
ZipEntry e = inputZip.getNextEntry();
long time = e.getLastModifiedTime().toMillis();
inputZip.close();
return time;
}
private static File createFakeZipFile(String file_name) throws IOException {
File file;
if ( file_name == null )
file = File.createTempFile("recent_date_test_", ".zip");
else
file = new File(file_name);
Random r = new Random();
ZipOutputStream outputZip = new ZipOutputStream(new FileOutputStream(file));
ZipEntry e = new ZipEntry("some_file.tmp");
outputZip.putNextEntry(e);
outputZip.write(r.nextInt());
outputZip.close();
return file;
}
}
|
|
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.core.connectors.blackboard.service.impl;
import com.dytech.devlib.Base64;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.tle.annotation.NonNullByDefault;
import com.tle.annotation.Nullable;
import com.tle.beans.Institution;
import com.tle.beans.item.IItem;
import com.tle.beans.item.ViewableItemType;
import com.tle.common.PathUtils;
import com.tle.common.connectors.ConnectorContent;
import com.tle.common.connectors.ConnectorCourse;
import com.tle.common.connectors.ConnectorFolder;
import com.tle.common.connectors.ConnectorTerminology;
import com.tle.common.connectors.entity.Connector;
import com.tle.common.searching.SearchResults;
import com.tle.common.util.BlindSSLSocketFactory;
import com.tle.core.connectors.blackboard.BlackboardRESTConnectorConstants;
import com.tle.core.connectors.blackboard.beans.*;
import com.tle.core.connectors.blackboard.service.BlackboardRESTConnectorService;
import com.tle.core.connectors.exception.LmsUserNotFoundException;
import com.tle.core.connectors.service.AbstractIntegrationConnectorRespository;
import com.tle.core.connectors.service.ConnectorRepositoryService;
import com.tle.core.connectors.service.ConnectorService;
import com.tle.core.encryption.EncryptionService;
import com.tle.core.guice.Bind;
import com.tle.core.institution.InstitutionCache;
import com.tle.core.institution.InstitutionService;
import com.tle.core.plugins.AbstractPluginService;
import com.tle.core.services.HttpService;
import com.tle.core.services.http.Request;
import com.tle.core.services.http.Response;
import com.tle.core.settings.service.ConfigurationService;
import com.tle.web.integration.Integration;
import com.tle.web.selection.SelectedResource;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
@NonNullByDefault
@SuppressWarnings({"nls", "deprecation"})
@Bind(BlackboardRESTConnectorService.class)
@Singleton
public class BlackboardRESTConnectorServiceImpl extends AbstractIntegrationConnectorRespository
implements BlackboardRESTConnectorService {
private static final Logger LOGGER = Logger.getLogger(BlackboardRESTConnectorService.class);
private static final String KEY_PFX =
AbstractPluginService.getMyPluginId(BlackboardRESTConnectorService.class) + ".";
private static final String API_ROOT = "/learn/api/public/v1";
@Inject private HttpService httpService;
@Inject private ConfigurationService configService;
@Inject private ConnectorService connectorService;
@Inject private EncryptionService encryptionService;
private static final String TOKEN_KEY = "TOKEN";
private InstitutionCache<LoadingCache<String, LoadingCache<String, String>>> tokenCache;
private static final ObjectMapper jsonMapper = new ObjectMapper();
private static final ObjectMapper prettyJsonMapper = new ObjectMapper();
static {
jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
jsonMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
prettyJsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
prettyJsonMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
prettyJsonMapper.configure(SerializationFeature.INDENT_OUTPUT, true);
}
public BlackboardRESTConnectorServiceImpl() {
// Ewwww
BlindSSLSocketFactory.register();
// Turn off spurious Pre-emptive Authentication bollocks
Logger.getLogger("org.apache.commons.httpclient.HttpMethodDirector").setLevel(Level.ERROR);
}
@Inject
public void setInstitutionService(InstitutionService service) {
tokenCache =
service.newInstitutionAwareCache(
new CacheLoader<Institution, LoadingCache<String, LoadingCache<String, String>>>() {
@Override
public LoadingCache<String, LoadingCache<String, String>> load(Institution key) {
// MaximumSize is set to 200, which would allow for 200 Blackboard REST connectors,
// which should be more than enough for anyone.
return CacheBuilder.newBuilder()
.maximumSize(200)
.expireAfterAccess(60, TimeUnit.MINUTES)
.build(
new CacheLoader<String, LoadingCache<String, String>>() {
@Override
public LoadingCache<String, String> load(final String connectorUuid)
throws Exception {
// BB tokens last one hour, so no point holding onto it longer than
// that. Of course, we need to handle the case
// where we are still holding onto an expired token.
return CacheBuilder.newBuilder()
.expireAfterWrite(60, TimeUnit.MINUTES)
.build(
new CacheLoader<String, String>() {
@Override
public String load(String fixedKey) {
// fixedKey is ignored. It's always TOKEN
final Connector connector =
connectorService.getByUuid(connectorUuid);
final String apiKey =
connector.getAttribute(
BlackboardRESTConnectorConstants.FIELD_API_KEY);
final String apiSecret =
encryptionService.decrypt(
connector.getAttribute(
BlackboardRESTConnectorConstants
.FIELD_API_SECRET));
final String b64 =
new Base64()
.encode((apiKey + ":" + apiSecret).getBytes())
.replace("\n", "")
.replace("\r", "");
final Request req =
new Request(
PathUtils.urlPath(
connector.getServerUrl(),
"learn/api/public/v1/oauth2/token"));
req.setMethod(Request.Method.POST);
req.setMimeType("application/x-www-form-urlencoded");
req.addHeader("Authorization", "Basic " + b64);
req.setBody("grant_type=client_credentials");
try (final Response resp =
httpService.getWebContent(
req, configService.getProxyDetails())) {
final Token token =
jsonMapper.readValue(
resp.getInputStream(), Token.class);
return token.getAccessToken();
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
});
}
});
}
});
}
@Override
protected ViewableItemType getViewableItemType() {
return ViewableItemType.GENERIC;
}
@Override
protected String getIntegrationId() {
return "gen";
}
@Override
protected boolean isRelativeUrls() {
return false;
}
@Override
public boolean isRequiresAuthentication(Connector connector) {
return false;
}
@Override
public String getAuthorisationUrl(Connector connector, String forwardUrl, String authData) {
return null;
}
@Override
public String getCourseCode(Connector connector, String username, String courseId)
throws LmsUserNotFoundException {
return null;
}
@Override
public List<ConnectorCourse> getCourses(
Connector connector,
String username,
boolean editableOnly,
boolean archived,
boolean management)
throws LmsUserNotFoundException {
final List<ConnectorCourse> list = new ArrayList<>();
// FIXME: courses for current user...?
// TODO - since v3400.8.0, this endpoint should use v2
String url = API_ROOT + "/courses";
/*
if( !archived )
{
url += "&active=true";
}*/
final List<Course> allCourses = new ArrayList<>();
// TODO: a more generic way of doing paged results. Contents also does paging
Courses courses = sendBlackboardData(connector, url, Courses.class, null, Request.Method.GET);
allCourses.addAll(courses.getResults());
Paging paging = courses.getPaging();
while (paging != null && paging.getNextPage() != null) {
// FIXME: construct nextUrl from the base URL we know about and the relative URL from
// getNextPage
final String nextUrl = paging.getNextPage();
courses = sendBlackboardData(connector, nextUrl, Courses.class, null, Request.Method.GET);
allCourses.addAll(courses.getResults());
paging = courses.getPaging();
}
for (Course course : allCourses) {
// Display all courses if the archived flag is set, otherwise, just the 'available' ones
if (archived || Availability.YES.equals(course.getAvailability().getAvailable())) {
final ConnectorCourse cc = new ConnectorCourse(course.getId());
cc.setCourseCode(course.getCourseId());
cc.setName(course.getName());
cc.setAvailable(true);
list.add(cc);
}
}
return list;
}
private Course getCourseBean(Connector connector, String courseID) {
// FIXME: courses for current user...?
// TODO - since v3400.8.0, this endpoint should use v2
String url = API_ROOT + "/courses/" + courseID;
final Course course =
sendBlackboardData(connector, url, Course.class, null, Request.Method.GET);
return course;
}
private Content getContentBean(Connector connector, String courseID, String folderID) {
// FIXME: courses for current user...?
// TODO - since v3400.8.0, this endpoint should use v2
String url = API_ROOT + "/courses/" + courseID + "/contents/" + folderID;
final Content folder =
sendBlackboardData(connector, url, Content.class, null, Request.Method.GET);
return folder;
}
@Override
public List<ConnectorFolder> getFoldersForCourse(
Connector connector, String username, String courseId, boolean management)
throws LmsUserNotFoundException {
// FIXME: courses for current user...?
final String url = API_ROOT + "/courses/" + courseId + "/contents";
return retrieveFolders(connector, url, username, courseId, management);
}
@Override
public List<ConnectorFolder> getFoldersForFolder(
Connector connector, String username, String courseId, String folderId, boolean management)
throws LmsUserNotFoundException {
// FIXME: courses for current user...?
final String url = API_ROOT + "/courses/" + courseId + "/contents/" + folderId + "/children/";
return retrieveFolders(connector, url, username, courseId, management);
}
private List<ConnectorFolder> retrieveFolders(
Connector connector, String url, String username, String courseId, boolean management) {
final List<ConnectorFolder> list = new ArrayList<>();
final Contents contents =
sendBlackboardData(connector, url, Contents.class, null, Request.Method.GET);
final ConnectorCourse course = new ConnectorCourse(courseId);
final List<Content> results = contents.getResults();
for (Content content : results) {
final Content.ContentHandler handler = content.getContentHandler();
if (handler != null && Content.ContentHandler.RESOURCE_FOLDER.equals(handler.getId())) {
// Unavailable folders are inaccessible to students,
// but should be available for instructors to push content to.
final ConnectorFolder cc = new ConnectorFolder(content.getId(), course);
if (content.getAvailability() != null) {
cc.setAvailable(Availability.YES.equals(content.getAvailability().getAvailable()));
} else {
// FIXME: Is this an appropriate default?
cc.setAvailable(false);
}
cc.setName(content.getTitle());
cc.setLeaf(content.getHasChildren() != null && !content.getHasChildren());
list.add(cc);
}
}
return list;
}
@Override
public ConnectorFolder addItemToCourse(
Connector connector,
String username,
String courseId,
String folderId,
IItem<?> item,
SelectedResource selectedResource)
throws LmsUserNotFoundException {
final String url = API_ROOT + "/courses/" + courseId + "/contents/" + folderId + "/children";
final Integration.LmsLinkInfo linkInfo = getLmsLink(item, selectedResource);
final Integration.LmsLink lmsLink = linkInfo.getLmsLink();
final Content content = new Content();
content.setTitle(lmsLink.getName());
// TODO consider a nicer way to handle this. Bb needs the description to be 250 chars or less
// Using TextUtils.INSTANCE.ensureWrap(lmsLink.getDescription(),250, 250, true) doesn't work
// because it still can produce a **raw** string longer than 250 characters. Bb content link
// descriptions can handle html formatting. Doesn't look there is a configuration to
// change the ensureWrap behavior - I'm reverting this so long descriptions won't block the
// integration.
final String lmsLinkDesc = lmsLink.getDescription();
content.setDescription(
lmsLinkDesc.substring(0, (lmsLinkDesc.length() > 250) ? 250 : lmsLinkDesc.length()));
final Content.ContentHandler contentHandler = new Content.ContentHandler();
contentHandler.setId(Content.ContentHandler.RESOURCE_LTI_LINK);
contentHandler.setUrl(lmsLink.getUrl());
content.setContentHandler(contentHandler);
final Availability availability = new Availability();
availability.setAvailable(Availability.YES);
availability.setAllowGuests(true);
content.setAvailability(availability);
sendBlackboardData(connector, url, null, content, Request.Method.POST);
LOGGER.trace("Returning a courseId = [" + courseId + "], and folderId = [" + folderId + "]");
ConnectorFolder cf = new ConnectorFolder(folderId, new ConnectorCourse(courseId));
// CB: Is there a better way to get the name of the folder and the course?
// AH: Unfortunately not. We could cache them, but it probably isn't worth the additional
// complexity
Content folder = getContentBean(connector, courseId, folderId);
cf.setName(folder.getTitle());
Course course = getCourseBean(connector, courseId);
cf.getCourse().setName(course.getName());
return cf;
}
@Override
public List<ConnectorContent> findUsages(
Connector connector,
String username,
String uuid,
int version,
boolean versionIsLatest,
boolean archived,
boolean allVersion)
throws LmsUserNotFoundException {
return null;
}
@Override
public SearchResults<ConnectorContent> findAllUsages(
Connector connector,
String username,
String query,
String courseId,
String folderId,
boolean archived,
int offset,
int count,
ConnectorRepositoryService.ExternalContentSortType sortType,
boolean reverseSort)
throws LmsUserNotFoundException {
return null;
}
@Override
public int getUnfilteredAllUsagesCount(
Connector connector, String username, String query, boolean archived)
throws LmsUserNotFoundException {
return 0;
}
@Override
public boolean deleteContent(Connector connector, String username, String contentId)
throws LmsUserNotFoundException {
return false;
}
@Override
public boolean editContent(
Connector connector, String username, String contentId, String title, String description)
throws LmsUserNotFoundException {
return false;
}
@Override
public boolean moveContent(
Connector connector, String username, String contentId, String courseId, String folderId)
throws LmsUserNotFoundException {
return false;
}
@Override
public ConnectorTerminology getConnectorTerminology() {
final ConnectorTerminology terms = new ConnectorTerminology();
terms.setShowArchived(getKey("finduses.showarchived"));
terms.setShowArchivedLocations(getKey("finduses.showarchived.courses"));
terms.setCourseHeading(getKey("finduses.course"));
terms.setLocationHeading(getKey("finduses.location"));
return terms;
}
@Override
public boolean supportsExport() {
return true;
}
@Override
public boolean supportsEdit() {
return true;
}
@Override
public boolean supportsView() {
return true;
}
@Override
public boolean supportsDelete() {
return true;
}
@Override
public boolean supportsCourses() {
return false;
}
@Override
public boolean supportsFindUses() {
return false;
}
@Override
public boolean supportsReverseSort() {
return false;
}
@Nullable
private <T> T sendBlackboardData(
Connector connector,
String path,
@Nullable Class<T> returnType,
@Nullable Object data,
Request.Method method) {
return sendBlackboardData(connector, path, returnType, data, method, true);
}
@Nullable
private <T> T sendBlackboardData(
Connector connector,
String path,
@Nullable Class<T> returnType,
@Nullable Object data,
Request.Method method,
boolean firstTime) {
try {
final URI uri = URI.create(PathUtils.urlPath(connector.getServerUrl(), path));
final Request request = new Request(uri.toString());
request.setMethod(method);
request.addHeader("Accept", "application/json");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace(method + " to Blackboard: " + request.getUrl());
}
final String body;
if (data != null) {
body = jsonMapper.writeValueAsString(data);
} else {
body = "";
}
request.setBody(body);
if (body.length() > 0) {
request.addHeader("Content-Type", "application/json");
}
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Sending " + prettyJson(body));
}
// attach cached token. (Cache knows how to get a new one)
request.addHeader("Authorization", "Bearer " + getToken(connector.getUuid()));
try (Response response =
httpService.getWebContent(request, configService.getProxyDetails())) {
final String responseBody = response.getBody();
final int code = response.getCode();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Received from Blackboard (" + code + "):");
LOGGER.trace(prettyJson(responseBody));
}
if (code == 401 && firstTime) {
// Unauthorized request. Retry once to obtain a new token (assumes the current token is
// expired)
LOGGER.trace(
"Received a 401 from Blackboard. Token for connector ["
+ connector.getUuid()
+ "] is likely expired. Retrying...");
tokenCache.getCache().get(connector.getUuid()).invalidate(TOKEN_KEY);
return sendBlackboardData(connector, path, returnType, data, method, false);
}
if (code >= 300) {
throw new RuntimeException(
"Received " + code + " from Blackboard. Body = " + responseBody);
}
if (returnType != null) {
final T content = jsonMapper.readValue(responseBody, returnType);
return content;
}
return null;
}
} catch (ExecutionException | IOException ex) {
throw Throwables.propagate(ex);
}
}
@Nullable
private String prettyJson(@Nullable String json) {
if (Strings.isNullOrEmpty(json)) {
return json;
}
try {
return prettyJsonMapper.writeValueAsString(prettyJsonMapper.readTree(json));
} catch (IOException io) {
return json;
}
}
private String getToken(String connectorUuid) {
try {
return tokenCache.getCache().get(connectorUuid).get(TOKEN_KEY);
} catch (ExecutionException e) {
throw Throwables.propagate(e);
}
}
private String getKey(String partKey) {
return KEY_PFX + "blackboardrest." + partKey;
}
}
|
|
/* TQuotYear.java - year of quotation,
* SQL operations with the table 'quot_year' in Wiktionary parsed database.
*
* Copyright (c) 2011 Andrew Krizhanovsky <andrew.krizhanovsky at gmail.com>
* Distributed under EPL/LGPL/GPL/AL/BSD multi-license.
*/
package wikokit.base.wikt.sql.quote;
import java.sql.*;
import wikokit.base.wikipedia.sql.Connect;
/** Year of quotation and
* operations with the table 'quot_year' in MySQL Wiktionary parsed database. */
public class TQuotYear {
/** Inique identifier of the year(s). */
private int id;
/** Start date of a writing book with the quote. */
private int from;
/** End date of a writing book with the quote,
* if quote contains only one date, then to = from. */
private int to;
public TQuotYear(int _id,int _from,int _to)
{
id = _id;
from = _from;
to = _to;
}
public TQuotYear(int _id,int _from)
{
id = _id;
from = _from;
to = _from;
}
/** Gets unique ID from database */
public int getID() {
return id;
}
/** Gets start date of a writing book with the quote. */
public int getFrom() {
return from;
}
/** Gets finish date of a writing book with the quote. */
public int getTo() {
return to;
}
/** Inserts record into the table 'quot_year'.<br><br>
* INSERT INTO quot_year (`from`,`to`) VALUES (1956,1956);
*
* @param _from start date of a writing book with the quote
* @param _to finish date of a writing book with the quote
* @return inserted record, or null if the insertion failed
*/
public static TQuotYear insert (Connect connect,int _from) {
return insert(connect, _from, _from);
}
/** Inserts record into the table 'quot_year'.<br><br>
* INSERT INTO quot_year (`from`,`to`) VALUES (1956,1988);
*
* @param _from start date of a writing book with the quote
* @param _to finish date of a writing book with the quote
* @return inserted record, or null if the insertion failed
*/
public static TQuotYear insert (Connect connect,int _from,int _to) {
if(-1 == _from || -1 == _to) // it means that there is no info about years
return null;
if(_from < 0 || _to < 0 || _from > _to) {
System.out.println("Warning (TQuotYear.insert()):: invalid years: from='"+_from+"', to='"+_to+"'.");
return null;
}
StringBuilder str_sql = new StringBuilder();
str_sql.append("INSERT INTO quot_year (`from`,`to`) VALUES (");
str_sql.append(_from);
str_sql.append(",");
str_sql.append(_to);
str_sql.append(")");
TQuotYear result = null;
try
{
Statement s = connect.conn.createStatement ();
try {
s.executeUpdate (str_sql.toString());
} finally {
s.close();
}
s = connect.conn.createStatement ();
try {
ResultSet rs = s.executeQuery ("SELECT LAST_INSERT_ID() as id");
try {
if (rs.next ())
result = new TQuotYear(rs.getInt("id"), _from, _to);
} finally {
rs.close();
}
} finally {
s.close();
}
}catch(SQLException ex) {
System.out.println("SQLException (TQuotYear.insert):: _from="+_from+"; _to="+_to+"; sql='" + str_sql.toString() + "' error=" + ex.getMessage());
}
return result;
}
/** Selects row from the table 'quot_year' by ID.<br><br>
*
* SELECT `from`,`to` FROM quot_year WHERE id=1
*
* @return null if data is absent
*/
public static TQuotYear getByID (Connect connect,int id) {
StringBuilder str_sql = new StringBuilder();
str_sql.append("SELECT `from`,`to` FROM quot_year WHERE id=");
str_sql.append(id);
TQuotYear quot_year = null;
try {
Statement s = connect.conn.createStatement ();
try {
ResultSet rs = s.executeQuery (str_sql.toString());
try {
if (rs.next ())
{
int _from = rs.getInt("from");
int _to = rs.getInt("to");
quot_year = new TQuotYear(id, _from, _to);
}
} finally {
rs.close();
}
} finally {
s.close();
}
} catch(SQLException ex) {
System.out.println("SQLException (TQuotYear.getByID()):: sql='" + str_sql.toString() + "' " + ex.getMessage());
}
return quot_year;
}
/** Get's a record from the table 'quot_year' by a date of a book with a quote.<br><br>
* SELECT id FROM quot_year WHERE `from`=1956 AND `to`=1956;
*
* @param _text name of the source
* @return NULL if data is absent
*/
public static TQuotYear get (Connect connect,int _from, String page_title) {
return get(connect, _from, _from, page_title);
}
/** Get's a record from the table 'quot_year' by a date of a book with a quote.<br><br>
* SELECT id FROM quot_year WHERE `from`=1956 AND `to`=1988;
*
* @param page_title word which are described in this article
* @return NULL if data is absent
*/
public static TQuotYear get (Connect connect,int _from,int _to, String page_title) {
if(_from < 0 || _to < 0 || _from > _to) {
System.out.println("Warning (TQuotYear.get()):: entry '" + page_title + "', invalid years: from='"+_from+"', to='"+_to+"'.");
return null;
}
StringBuilder str_sql = new StringBuilder();
str_sql.append("SELECT id FROM quot_year WHERE `from`=");
str_sql.append(_from);
str_sql.append(" AND `to`=");
str_sql.append(_to);
TQuotYear result = null;
try {
Statement s = connect.conn.createStatement ();
try {
ResultSet rs = s.executeQuery (str_sql.toString());
try {
if (rs.next ())
{
int _id = rs.getInt("id");
result = new TQuotYear(_id, _from, _to);
}
} finally {
rs.close();
}
} finally {
s.close();
}
} catch(SQLException ex) {
System.out.println("SQLException (TQuotYear.get()):: entry '" + page_title + "', years: _from="+_from+"; _to="+_to+"; sql='" + ex.getMessage());
}
return result;
}
/** Gets ID of a record or inserts record (if it is absent)
* into the table 'quot_year'.
*
* @param _from start date of a writing book with the quote
* @param _to finish date of a writing book with the quote
* @param page_title word which are described in this article
*/
public static TQuotYear getOrInsert (Connect connect,int _from,int _to, String page_title) {
if(-1 == _from || -1 == _to) // it means that there is no info about years
return null;
if(_from < 0 || _to < 0 || _from > _to) {
System.out.println("Warning (TQuotYear.getOrInsert()):: invalid years: from='"+_from+"', to='"+_to+"', for the word '" + page_title + "'.");
return null;
}
TQuotYear y = TQuotYear.get(connect, _from, _to, page_title);
if(null == y)
y = TQuotYear.insert(connect, _from, _to);
return y;
}
/** Deletes row from the table 'quot_year' by a value of ID.<br><br>
* DELETE FROM quot_year WHERE id=4;
*/
public void delete (Connect connect) {
StringBuilder str_sql = new StringBuilder();
str_sql.append("DELETE FROM quot_year WHERE id=");
str_sql.append( id );
try {
Statement s = connect.conn.createStatement ();
try {
s.execute (str_sql.toString());
} finally {
s.close();
}
} catch(SQLException ex) {
System.out.println("SQLException (TQuotYear.delete()):: sql='" + str_sql.toString() + "' " + ex.getMessage());
}
}
}
|
|
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.schedulers;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.mockito.*;
import rx.*;
import rx.Observable.OnSubscribe;
import rx.functions.*;
import rx.observers.TestSubscriber;
public class TestSchedulerTest {
@SuppressWarnings("unchecked")
// mocking is unchecked, unfortunately
@Test
public final void testPeriodicScheduling() {
final Func1<Long, Void> calledOp = mock(Func1.class);
final TestScheduler scheduler = new TestScheduler();
final Scheduler.Worker inner = scheduler.createWorker();
try {
inner.schedulePeriodically(new Action0() {
@Override
public void call() {
System.out.println(scheduler.now());
calledOp.call(scheduler.now());
}
}, 1, 2, TimeUnit.SECONDS);
verify(calledOp, never()).call(anyLong());
InOrder inOrder = Mockito.inOrder(calledOp);
scheduler.advanceTimeBy(999L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, never()).call(anyLong());
scheduler.advanceTimeBy(1L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, times(1)).call(1000L);
scheduler.advanceTimeBy(1999L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, never()).call(3000L);
scheduler.advanceTimeBy(1L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, times(1)).call(3000L);
scheduler.advanceTimeBy(5L, TimeUnit.SECONDS);
inOrder.verify(calledOp, times(1)).call(5000L);
inOrder.verify(calledOp, times(1)).call(7000L);
inner.unsubscribe();
scheduler.advanceTimeBy(11L, TimeUnit.SECONDS);
inOrder.verify(calledOp, never()).call(anyLong());
} finally {
inner.unsubscribe();
}
}
@SuppressWarnings("unchecked")
// mocking is unchecked, unfortunately
@Test
public final void testPeriodicSchedulingUnsubscription() {
final Func1<Long, Void> calledOp = mock(Func1.class);
final TestScheduler scheduler = new TestScheduler();
final Scheduler.Worker inner = scheduler.createWorker();
try {
final Subscription subscription = inner.schedulePeriodically(new Action0() {
@Override
public void call() {
System.out.println(scheduler.now());
calledOp.call(scheduler.now());
}
}, 1, 2, TimeUnit.SECONDS);
verify(calledOp, never()).call(anyLong());
InOrder inOrder = Mockito.inOrder(calledOp);
scheduler.advanceTimeBy(999L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, never()).call(anyLong());
scheduler.advanceTimeBy(1L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, times(1)).call(1000L);
scheduler.advanceTimeBy(1999L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, never()).call(3000L);
scheduler.advanceTimeBy(1L, TimeUnit.MILLISECONDS);
inOrder.verify(calledOp, times(1)).call(3000L);
scheduler.advanceTimeBy(5L, TimeUnit.SECONDS);
inOrder.verify(calledOp, times(1)).call(5000L);
inOrder.verify(calledOp, times(1)).call(7000L);
subscription.unsubscribe();
scheduler.advanceTimeBy(11L, TimeUnit.SECONDS);
inOrder.verify(calledOp, never()).call(anyLong());
} finally {
inner.unsubscribe();
}
}
@Test
public final void testImmediateUnsubscribes() {
TestScheduler s = new TestScheduler();
final Scheduler.Worker inner = s.createWorker();
final AtomicInteger counter = new AtomicInteger(0);
try {
inner.schedule(new Action0() {
@Override
public void call() {
counter.incrementAndGet();
System.out.println("counter: " + counter.get());
inner.schedule(this);
}
});
inner.unsubscribe();
assertEquals(0, counter.get());
} finally {
inner.unsubscribe();
}
}
@Test
public final void testImmediateUnsubscribes2() {
TestScheduler s = new TestScheduler();
final Scheduler.Worker inner = s.createWorker();
try {
final AtomicInteger counter = new AtomicInteger(0);
final Subscription subscription = inner.schedule(new Action0() {
@Override
public void call() {
counter.incrementAndGet();
System.out.println("counter: " + counter.get());
inner.schedule(this);
}
});
subscription.unsubscribe();
assertEquals(0, counter.get());
} finally {
inner.unsubscribe();
}
}
@Test
public final void testNestedSchedule() {
final TestScheduler scheduler = new TestScheduler();
final Scheduler.Worker inner = scheduler.createWorker();
try {
final Action0 calledOp = mock(Action0.class);
Observable<Object> poller;
poller = Observable.unsafeCreate(new OnSubscribe<Object>() {
@Override
public void call(final Subscriber<? super Object> aSubscriber) {
inner.schedule(new Action0() {
@Override
public void call() {
if (!aSubscriber.isUnsubscribed()) {
calledOp.call();
inner.schedule(this, 5, TimeUnit.SECONDS);
}
}
});
}
});
InOrder inOrder = Mockito.inOrder(calledOp);
Subscription sub;
sub = poller.subscribe();
scheduler.advanceTimeTo(6, TimeUnit.SECONDS);
inOrder.verify(calledOp, times(2)).call();
sub.unsubscribe();
scheduler.advanceTimeTo(11, TimeUnit.SECONDS);
inOrder.verify(calledOp, never()).call();
sub = poller.subscribe();
scheduler.advanceTimeTo(12, TimeUnit.SECONDS);
inOrder.verify(calledOp, times(1)).call();
} finally {
inner.unsubscribe();
}
}
@Test
public void resolution() {
for (final TimeUnit unit : TimeUnit.values()) {
TestScheduler scheduler = new TestScheduler();
TestSubscriber<String> testSubscriber = new TestSubscriber<String>();
Observable.interval(30, unit, scheduler)
.map(new Func1<Long, String>() {
@Override
public String call(Long v) {
return v + "-" + unit;
}
})
.subscribe(testSubscriber);
scheduler.advanceTimeTo(60, unit);
testSubscriber.assertValues("0-" + unit, "1-" + unit);
}
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.dmn.entity.repository;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.impl.Page;
import org.camunda.bpm.engine.impl.ProcessEngineLogger;
import org.camunda.bpm.engine.impl.cfg.auth.ResourceAuthorizationProvider;
import org.camunda.bpm.engine.impl.db.EnginePersistenceLogger;
import org.camunda.bpm.engine.impl.db.ListQueryParameterObject;
import org.camunda.bpm.engine.impl.persistence.AbstractManager;
import org.camunda.bpm.engine.impl.persistence.entity.AuthorizationEntity;
import org.camunda.bpm.engine.repository.DecisionDefinition;
import org.camunda.bpm.engine.repository.DecisionRequirementsDefinition;
public class DecisionDefinitionManager extends AbstractManager {
protected static final EnginePersistenceLogger LOG = ProcessEngineLogger.PERSISTENCE_LOGGER;
public void insertDecisionDefinition(DecisionDefinitionEntity decisionDefinition) {
getDbEntityManager().insert(decisionDefinition);
createDefaultAuthorizations(decisionDefinition);
}
public void deleteDecisionDefinitionsByDeploymentId(String deploymentId) {
getDbEntityManager().delete(DecisionDefinitionEntity.class, "deleteDecisionDefinitionsByDeploymentId", deploymentId);
}
public DecisionDefinitionEntity findDecisionDefinitionById(String decisionDefinitionId) {
return getDbEntityManager().selectById(DecisionDefinitionEntity.class, decisionDefinitionId);
}
/**
* @return the latest version of the decision definition with the given key (from any tenant)
*
* @throws ProcessEngineException if more than one tenant has a decision definition with the given key
*
* @see #findLatestDecisionDefinitionByKeyAndTenantId(String, String)
*/
public DecisionDefinitionEntity findLatestDecisionDefinitionByKey(String decisionDefinitionKey) {
@SuppressWarnings("unchecked")
List<DecisionDefinitionEntity> decisionDefinitions = getDbEntityManager().selectList("selectLatestDecisionDefinitionByKey", configureParameterizedQuery(decisionDefinitionKey));
if (decisionDefinitions.isEmpty()) {
return null;
} else if (decisionDefinitions.size() == 1) {
return decisionDefinitions.iterator().next();
} else {
throw LOG.multipleTenantsForDecisionDefinitionKeyException(decisionDefinitionKey);
}
}
/**
* @return the latest version of the decision definition with the given key and tenant id
*
* @see #findLatestDecisionDefinitionByKey(String)
*/
public DecisionDefinitionEntity findLatestDecisionDefinitionByKeyAndTenantId(String decisionDefinitionKey, String tenantId) {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("decisionDefinitionKey", decisionDefinitionKey);
parameters.put("tenantId", tenantId);
if (tenantId == null) {
return (DecisionDefinitionEntity) getDbEntityManager().selectOne("selectLatestDecisionDefinitionByKeyWithoutTenantId", parameters);
} else {
return (DecisionDefinitionEntity) getDbEntityManager().selectOne("selectLatestDecisionDefinitionByKeyAndTenantId", parameters);
}
}
public DecisionDefinitionEntity findDecisionDefinitionByKeyAndVersion(String decisionDefinitionKey, Integer decisionDefinitionVersion) {
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("decisionDefinitionVersion", decisionDefinitionVersion);
parameters.put("decisionDefinitionKey", decisionDefinitionKey);
return (DecisionDefinitionEntity) getDbEntityManager().selectOne("selectDecisionDefinitionByKeyAndVersion", configureParameterizedQuery(parameters));
}
public DecisionDefinitionEntity findDecisionDefinitionByKeyVersionAndTenantId(String decisionDefinitionKey, Integer decisionDefinitionVersion, String tenantId) {
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("decisionDefinitionVersion", decisionDefinitionVersion);
parameters.put("decisionDefinitionKey", decisionDefinitionKey);
parameters.put("tenantId", tenantId);
if (tenantId == null) {
return (DecisionDefinitionEntity) getDbEntityManager().selectOne("selectDecisionDefinitionByKeyVersionWithoutTenantId", parameters);
} else {
return (DecisionDefinitionEntity) getDbEntityManager().selectOne("selectDecisionDefinitionByKeyVersionAndTenantId", parameters);
}
}
public DecisionDefinitionEntity findDecisionDefinitionByDeploymentAndKey(String deploymentId, String decisionDefinitionKey) {
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("deploymentId", deploymentId);
parameters.put("decisionDefinitionKey", decisionDefinitionKey);
return (DecisionDefinitionEntity) getDbEntityManager().selectOne("selectDecisionDefinitionByDeploymentAndKey", parameters);
}
@SuppressWarnings("unchecked")
public List<DecisionDefinition> findDecisionDefinitionsByQueryCriteria(DecisionDefinitionQueryImpl decisionDefinitionQuery, Page page) {
configureDecisionDefinitionQuery(decisionDefinitionQuery);
return getDbEntityManager().selectList("selectDecisionDefinitionsByQueryCriteria", decisionDefinitionQuery, page);
}
public long findDecisionDefinitionCountByQueryCriteria(DecisionDefinitionQueryImpl decisionDefinitionQuery) {
configureDecisionDefinitionQuery(decisionDefinitionQuery);
return (Long) getDbEntityManager().selectOne("selectDecisionDefinitionCountByQueryCriteria", decisionDefinitionQuery);
}
public String findPreviousDecisionDefinitionId(String decisionDefinitionKey, Integer version, String tenantId) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("key", decisionDefinitionKey);
params.put("version", version);
params.put("tenantId", tenantId);
return (String) getDbEntityManager().selectOne("selectPreviousDecisionDefinitionId", params);
}
@SuppressWarnings("unchecked")
public List<DecisionDefinition> findDecisionDefinitionByDeploymentId(String deploymentId) {
return getDbEntityManager().selectList("selectDecisionDefinitionByDeploymentId", deploymentId);
}
public void insertDecisionRequirementsDefinition(DecisionRequirementsDefinitionEntity decisionRequirementsDefinition) {
getDbEntityManager().insert(decisionRequirementsDefinition);
createDefaultAuthorizations(decisionRequirementsDefinition);
}
public void deleteDecisionRequirementsDefinitionsByDeploymentId(String deploymentId) {
getDbEntityManager().delete(DecisionDefinitionEntity.class, "deleteDecisionRequirementsDefinitionsByDeploymentId", deploymentId);
}
public DecisionRequirementsDefinitionEntity findDecisionRequirementsDefinitionById(String decisionRequirementsDefinitionId) {
return getDbEntityManager().selectById(DecisionRequirementsDefinitionEntity.class, decisionRequirementsDefinitionId);
}
public String findPreviousDecisionRequirementsDefinitionId(String decisionRequirementsDefinitionKey, Integer version, String tenantId) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("key", decisionRequirementsDefinitionKey);
params.put("version", version);
params.put("tenantId", tenantId);
return (String) getDbEntityManager().selectOne("selectPreviousDecisionRequirementsDefinitionId", params);
}
@SuppressWarnings("unchecked")
public List<DecisionRequirementsDefinition> findDecisionRequirementsDefinitionByDeploymentId(String deploymentId) {
return getDbEntityManager().selectList("selectDecisionRequirementsDefinitionByDeploymentId", deploymentId);
}
public DecisionRequirementsDefinitionEntity findDecisionRequirementsDefinitionByDeploymentAndKey(String deploymentId, String decisionRequirementsDefinitionKey) {
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("deploymentId", deploymentId);
parameters.put("decisionRequirementsDefinitionKey", decisionRequirementsDefinitionKey);
return (DecisionRequirementsDefinitionEntity) getDbEntityManager().selectOne("selectDecisionRequirementsDefinitionByDeploymentAndKey", parameters);
}
/**
* @return the latest version of the decision requirements definition with the given key and tenant id
*/
public DecisionRequirementsDefinitionEntity findLatestDecisionRequirementsDefinitionByKeyAndTenantId(String decisionRequirementsDefinitionKey, String tenantId) {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("decisionRequirementsDefinitionKey", decisionRequirementsDefinitionKey);
parameters.put("tenantId", tenantId);
if (tenantId == null) {
return (DecisionRequirementsDefinitionEntity) getDbEntityManager().selectOne("selectLatestDecisionRequirementsDefinitionByKeyWithoutTenantId", parameters);
} else {
return (DecisionRequirementsDefinitionEntity) getDbEntityManager().selectOne("selectLatestDecisionRequirementsDefinitionByKeyAndTenantId", parameters);
}
}
@SuppressWarnings("unchecked")
public List<DecisionRequirementsDefinition> findDecisionRequirementsDefinitionsByQueryCriteria(DecisionRequirementsDefinitionQueryImpl query, Page page) {
configureDecisionRequirementsDefinitionQuery(query);
return getDbEntityManager().selectList("selectDecisionRequirementsDefinitionsByQueryCriteria", query, page);
}
public long findDecisionRequirementsDefinitionCountByQueryCriteria(DecisionRequirementsDefinitionQueryImpl query) {
configureDecisionRequirementsDefinitionQuery(query);
return (Long) getDbEntityManager().selectOne("selectDecisionRequirementsDefinitionCountByQueryCriteria", query);
}
protected void createDefaultAuthorizations(DecisionDefinition decisionDefinition) {
if(isAuthorizationEnabled()) {
ResourceAuthorizationProvider provider = getResourceAuthorizationProvider();
AuthorizationEntity[] authorizations = provider.newDecisionDefinition(decisionDefinition);
saveDefaultAuthorizations(authorizations);
}
}
protected void createDefaultAuthorizations(DecisionRequirementsDefinition decisionRequirementsDefinition) {
if(isAuthorizationEnabled()) {
ResourceAuthorizationProvider provider = getResourceAuthorizationProvider();
AuthorizationEntity[] authorizations = provider.newDecisionRequirementsDefinition(decisionRequirementsDefinition);
saveDefaultAuthorizations(authorizations);
}
}
protected void configureDecisionDefinitionQuery(DecisionDefinitionQueryImpl query) {
getAuthorizationManager().configureDecisionDefinitionQuery(query);
getTenantManager().configureQuery(query);
}
protected void configureDecisionRequirementsDefinitionQuery(DecisionRequirementsDefinitionQueryImpl query) {
getAuthorizationManager().configureDecisionRequirementsDefinitionQuery(query);
getTenantManager().configureQuery(query);
}
protected ListQueryParameterObject configureParameterizedQuery(Object parameter) {
return getTenantManager().configureQuery(parameter);
}
}
|
|
/* Copyright 2016 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.transformer;
import com.google.api.codegen.CollectionConfig;
import com.google.api.codegen.MethodConfig;
import com.google.api.codegen.metacode.InitValueConfig;
import com.google.api.codegen.util.CommonRenderingUtil;
import com.google.api.codegen.util.Name;
import com.google.api.codegen.util.NameFormatter;
import com.google.api.codegen.util.NameFormatterDelegator;
import com.google.api.codegen.util.NamePath;
import com.google.api.codegen.util.TypeNameConverter;
import com.google.api.tools.framework.aspects.documentation.model.DocumentationUtil;
import com.google.api.tools.framework.model.Field;
import com.google.api.tools.framework.model.Interface;
import com.google.api.tools.framework.model.Method;
import com.google.api.tools.framework.model.ProtoElement;
import com.google.api.tools.framework.model.TypeRef;
import java.util.ArrayList;
import java.util.List;
/**
* A SurfaceNamer provides language-specific names for specific components of a view for a surface.
*
* Naming is composed of two steps:
*
* 1. Composing a Name instance with the name pieces
* 2. Formatting the Name for the particular type of identifier needed.
*
* This class delegates step 2 to the provided name formatter, which generally
* would be a language-specific namer.
*/
public class SurfaceNamer extends NameFormatterDelegator {
private ModelTypeFormatter modelTypeFormatter;
private TypeNameConverter typeNameConverter;
public SurfaceNamer(
NameFormatter languageNamer,
ModelTypeFormatter modelTypeFormatter,
TypeNameConverter typeNameConverter) {
super(languageNamer);
this.modelTypeFormatter = modelTypeFormatter;
this.typeNameConverter = typeNameConverter;
}
public ModelTypeFormatter getModelTypeFormatter() {
return modelTypeFormatter;
}
public String getNotImplementedString(String feature) {
return "$ NOT IMPLEMENTED: " + feature + " $";
}
/** The name of the class that implements a particular proto interface. */
public String getApiWrapperClassName(Interface interfaze) {
return className(Name.upperCamel(interfaze.getSimpleName(), "Api"));
}
/**
* The name of a variable that holds an instance of the class that implements
* a particular proto interface.
*/
public String getApiWrapperVariableName(Interface interfaze) {
return varName(Name.upperCamel(interfaze.getSimpleName(), "Api"));
}
/**
* The name of the settings class for a particular proto interface;
* not used in most languages.
*/
public String getApiSettingsClassName(Interface interfaze) {
return className(Name.upperCamel(interfaze.getSimpleName(), "Settings"));
}
/**
* The name of a variable that holds the settings class for a particular
* proto interface; not used in most languages.
*/
public String getApiSettingsVariableName(Interface interfaze) {
return varName(Name.upperCamel(interfaze.getSimpleName(), "Settings"));
}
/**
* The name of the builder class for the settings class for a particular
* proto interface; not used in most languages.
*/
public String getApiSettingsBuilderVarName(Interface interfaze) {
return varName(Name.upperCamel(interfaze.getSimpleName(), "SettingsBuilder"));
}
/**
* The variable name for the given identifier. If it has formatting config
* (specified by initValueConfig), then its name reflects that.
*/
public String getVariableName(Name identifier, InitValueConfig initValueConfig) {
if (initValueConfig == null || !initValueConfig.hasFormattingConfig()) {
return varName(identifier);
} else {
return varName(Name.from("formatted").join(identifier));
}
}
/** The function name to set the given proto field. */
public String getFieldSetFunctionName(Field field) {
return getFieldSetFunctionName(field.getType(), Name.from(field.getSimpleName()));
}
/** The function name to set a field having the given type and name. */
public String getFieldSetFunctionName(TypeRef type, Name identifier) {
if (type.isMap()) {
return methodName(Name.from("put", "all").join(identifier));
} else if (type.isRepeated()) {
return methodName(Name.from("add", "all").join(identifier));
} else {
return methodName(Name.from("set").join(identifier));
}
}
/** The function name to get the given proto field. */
public String getFieldGetFunctionName(Field field) {
return getFieldGetFunctionName(field.getType(), Name.from(field.getSimpleName()));
}
/** The function name to get a field having the given type and name. */
public String getFieldGetFunctionName(TypeRef type, Name identifier) {
if (type.isRepeated()) {
return methodName(Name.from("get").join(identifier).join("list"));
} else {
return methodName(Name.from("get").join(identifier));
}
}
/**
* The function name to get the count of elements in the given field.
*
* @throws IllegalArgumentException if the field is not a repeated field.
*/
public String getFieldCountGetFunctionName(Field field) {
if (field.isRepeated()) {
return methodName(Name.from("get", field.getSimpleName(), "count"));
} else {
throw new IllegalArgumentException(
"Non-repeated field " + field.getSimpleName() + " has no count function.");
}
}
/**
* The function name to get an element by index from the given field.
*
* @throws IllegalArgumentException if the field is not a repeated field.
*/
public String getByIndexGetFunctionName(Field field) {
if (field.isRepeated()) {
return methodName(Name.from("get", field.getSimpleName()));
} else {
throw new IllegalArgumentException(
"Non-repeated field " + field.getSimpleName() + " has no get-by-index function.");
}
}
/**
* The name of a path template constant for the given collection,
* to be held in an API wrapper class.
*/
public String getPathTemplateName(CollectionConfig collectionConfig) {
return inittedConstantName(Name.from(collectionConfig.getEntityName(), "path", "template"));
}
/** The name of a getter function to get a particular path template for the given collection. */
public String getPathTemplateNameGetter(CollectionConfig collectionConfig) {
return methodName(Name.from("get", collectionConfig.getEntityName(), "name", "template"));
}
/** The function name to format the entity for the given collection. */
public String getFormatFunctionName(CollectionConfig collectionConfig) {
return staticFunctionName(Name.from("format", collectionConfig.getEntityName(), "name"));
}
/**
* The function name to parse a variable from the string representing the entity for
* the given collection.
*/
public String getParseFunctionName(String var, CollectionConfig collectionConfig) {
return staticFunctionName(
Name.from("parse", var, "from", collectionConfig.getEntityName(), "name"));
}
/** The entity name for the given collection. */
public String getEntityName(CollectionConfig collectionConfig) {
return varName(Name.from(collectionConfig.getEntityName()));
}
/** The parameter name for the entity for the given collection config. */
public String getEntityNameParamName(CollectionConfig collectionConfig) {
return varName(Name.from(collectionConfig.getEntityName(), "name"));
}
/** The parameter name for the given lower-case field name. */
public String getParamName(String var) {
return varName(Name.from(var));
}
/** The page streaming descriptor name for the given method. */
public String getPageStreamingDescriptorName(Method method) {
return varName(Name.upperCamel(method.getSimpleName(), "PageStreamingDescriptor"));
}
/** The name of the constant to hold the page streaming descriptor for the given method. */
public String getPageStreamingDescriptorConstName(Method method) {
return inittedConstantName(Name.upperCamel(method.getSimpleName()).join("page_str_desc"));
}
/** The name of the constant to hold the bundling descriptor for the given method. */
public String getBundlingDescriptorConstName(Method method) {
return inittedConstantName(Name.upperCamel(method.getSimpleName()).join("bundling_desc"));
}
/** Adds the imports used in the implementation of page streaming descriptors. */
public void addPageStreamingDescriptorImports(ModelTypeTable typeTable) {
// do nothing
}
/** Adds the imports used in the implementation of bundling descriptors. */
public void addBundlingDescriptorImports(ModelTypeTable typeTable) {
// do nothing
}
/** Adds the imports used for page streaming call settings. */
public void addPageStreamingCallSettingsImports(ModelTypeTable typeTable) {
// do nothing
}
/** Adds the imports used for bundling call settings. */
public void addBundlingCallSettingsImports(ModelTypeTable typeTable) {
// do nothing
}
/** The key to use in a dictionary for the given method. */
public String getMethodKey(Method method) {
return keyName(Name.upperCamel(method.getSimpleName()));
}
/** The path to the client config for the given interface. */
public String getClientConfigPath(Interface service) {
return getNotImplementedString("SurfaceNamer.getClientConfigPath");
}
/**
* The type name of the Grpc client class.
* This needs to match what Grpc generates for the particular language.
*/
public String getGrpcClientTypeName(Interface service) {
NamePath namePath = typeNameConverter.getNamePath(modelTypeFormatter.getFullNameFor(service));
String className = className(Name.upperCamel(namePath.getHead(), "Client"));
return qualifiedName(namePath.withHead(className));
}
/**
* The type name of the Grpc container class.
* This needs to match what Grpc generates for the particular language.
*/
public String getGrpcContainerTypeName(Interface service) {
NamePath namePath = typeNameConverter.getNamePath(modelTypeFormatter.getFullNameFor(service));
String className = className(Name.upperCamel(namePath.getHead(), "Grpc"));
return qualifiedName(namePath.withHead(className));
}
/**
* The type name of the method constant in the Grpc container class.
* This needs to match what Grpc generates for the particular language.
*/
public String getGrpcMethodConstant(Method method) {
return inittedConstantName(Name.from("method").join(Name.upperCamel(method.getSimpleName())));
}
/** The name of the surface method which can call the given API method. */
public String getApiMethodName(Method method) {
return methodName(Name.upperCamel(method.getSimpleName()));
}
/**
* The name of a variable to hold a value for the given proto message field
* (such as a flattened parameter).
*/
public String getVariableName(Field field) {
return varName(Name.from(field.getSimpleName()));
}
/**
* Returns true if the request object param type for the given field should be imported.
*/
public boolean shouldImportRequestObjectParamType(Field field) {
return true;
}
/** Converts the given text to doc lines in the format of the current language. */
public List<String> getDocLines(String text) {
return CommonRenderingUtil.getDocLines(text);
}
/** Provides the doc lines for the given proto element in the current language. */
public List<String> getDocLines(ProtoElement element) {
return getDocLines(DocumentationUtil.getDescription(element));
}
/** The doc lines that declare what exception(s) are thrown for an API method. */
public List<String> getThrowsDocLines() {
return new ArrayList<>();
}
/** The public access modifier for the current language. */
public String getPublicAccessModifier() {
return "public";
}
/** The private access modifier for the current language. */
public String getPrivateAccessModifier() {
return "private";
}
/**
* The name used in Grpc for the given API method.
* This needs to match what Grpc generates.
*/
public String getGrpcMethodName(Method method) {
// This might seem silly, but it makes clear what we're dealing with (upper camel).
// This is language-independent because of gRPC conventions.
return Name.upperCamel(method.getSimpleName()).toUpperCamel();
}
/** The type name for retry settings. */
public String getRetrySettingsTypeName() {
return getNotImplementedString("SurfaceNamer.getRetrySettingsClassName");
}
/** The type name for an optional array argument; not used in most languages. */
public String getOptionalArrayTypeName() {
return getNotImplementedString("SurfaceNamer.getOptionalArrayTypeName");
}
/** The return type name in a dynamic language for the given method. */
public String getDynamicLangReturnTypeName(Method method, MethodConfig methodConfig) {
return getNotImplementedString("SurfaceNamer.getDynamicReturnTypeName");
}
/** The return type name in a static language for the given method. */
public String getStaticLangReturnTypeName(Method method, MethodConfig methodConfig) {
return getNotImplementedString("SurfaceNamer.getStaticReturnTypeName");
}
/** The name of the paged callable variant of the given method. */
public String getPagedCallableMethodName(Method method) {
return methodName(Name.upperCamel(method.getSimpleName(), "PagedCallable"));
}
/** The name of the callable for the paged callable variant of the given method. */
public String getPagedCallableName(Method method) {
return varName(Name.upperCamel(method.getSimpleName(), "PagedCallable"));
}
/** The name of the plain callable variant of the given method. */
public String getCallableMethodName(Method method) {
return methodName(Name.upperCamel(method.getSimpleName(), "Callable"));
}
/** The name of the plain callable for the given method. */
public String getCallableName(Method method) {
return varName(Name.upperCamel(method.getSimpleName(), "Callable"));
}
/** The name of the settings member name for the given method. */
public String getSettingsMemberName(Method method) {
return methodName(Name.upperCamel(method.getSimpleName(), "Settings"));
}
/** The getter function name for the settings for the given method. */
public String getSettingsFunctionName(Method method) {
return getSettingsMemberName(method);
}
/**
* The generic-aware response type name for the given type.
* For example, in Java, this will be the type used for ListenableFuture<...>.
*/
public String getGenericAwareResponseTypeName(TypeRef outputType) {
return getNotImplementedString("SurfaceNamer.getGenericAwareResponseType");
}
/**
* The function name to get the given proto field as a list.
*
* @throws IllegalArgumentException if the field is not a repeated field.
*/
public String getGetResourceListCallName(Field resourcesField) {
if (resourcesField.isRepeated()) {
return methodName(Name.from("get", resourcesField.getSimpleName(), "list"));
} else {
throw new IllegalArgumentException(
"Non-repeated field "
+ resourcesField.getSimpleName()
+ " cannot be accessed as a list.");
}
}
/**
* Computes the nickname of the response type name for the given resource type, saves it
* in the given type table, and returns it.
*/
public String getAndSavePagedResponseTypeName(ModelTypeTable typeTable, TypeRef resourceType) {
return getNotImplementedString("SurfaceNamer.getAndSavePagedResponseTypeName");
}
/** The test case name for the given method. */
public String getTestCaseName(Method method) {
return methodName(Name.upperCamel(method.getSimpleName(), "Test"));
}
/** The test class name for the given API service. */
public String getTestClassName(Interface service) {
return className(Name.upperCamel(service.getSimpleName(), "Test"));
}
/** The class name of the mock gRPC service for the given API service. */
public String getMockServiceClassName(Interface service) {
return className(Name.upperCamel("Mock", service.getSimpleName()));
}
/** The class name of the mock gRPC service for the given API service. */
public String getGetFunctionCallName(Name name) {
return methodName(Name.from("get").join(name));
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.aggregation;
import com.facebook.presto.RowPageBuilder;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.operator.aggregation.groupByAggregations.AggregationTestInput;
import com.facebook.presto.operator.aggregation.groupByAggregations.AggregationTestInputBuilder;
import com.facebook.presto.operator.aggregation.groupByAggregations.AggregationTestOutput;
import com.facebook.presto.operator.aggregation.groupByAggregations.GroupByAggregationTestUtils;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.type.ArrayType;
import com.facebook.presto.spi.type.MapType;
import com.facebook.presto.spi.type.RowType;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.primitives.Ints;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import static com.facebook.presto.metadata.FunctionKind.AGGREGATE;
import static com.facebook.presto.metadata.MetadataManager.createTestMetadataManager;
import static com.facebook.presto.operator.aggregation.AggregationTestUtils.assertAggregation;
import static com.facebook.presto.operator.aggregation.multimapagg.MultimapAggregationFunction.NAME;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.util.StructuralTestUtil.mapType;
import static com.google.common.base.Preconditions.checkState;
import static org.testng.Assert.assertTrue;
public class TestMultimapAggAggregation
{
private static final MetadataManager metadata = createTestMetadataManager();
@Test
public void testSingleValueMap()
{
testMultimapAgg(DOUBLE, ImmutableList.of(1.0), VARCHAR, ImmutableList.of("a"));
testMultimapAgg(VARCHAR, ImmutableList.of("a"), BIGINT, ImmutableList.of(1L));
}
@Test
public void testMultiValueMap()
{
testMultimapAgg(DOUBLE, ImmutableList.of(1.0, 1.0, 1.0), VARCHAR, ImmutableList.of("a", "b", "c"));
testMultimapAgg(DOUBLE, ImmutableList.of(1.0, 1.0, 2.0), VARCHAR, ImmutableList.of("a", "b", "c"));
}
@Test
public void testOrderValueMap()
{
testMultimapAgg(VARCHAR, ImmutableList.of("a", "a", "a"), BIGINT, ImmutableList.of(1L, 2L, 3L));
testMultimapAgg(VARCHAR, ImmutableList.of("a", "a", "a"), BIGINT, ImmutableList.of(2L, 1L, 3L));
testMultimapAgg(VARCHAR, ImmutableList.of("a", "a", "a"), BIGINT, ImmutableList.of(3L, 2L, 1L));
}
@Test
public void testDuplicateValueMap()
{
testMultimapAgg(VARCHAR, ImmutableList.of("a", "a", "a"), BIGINT, ImmutableList.of(1L, 1L, 1L));
testMultimapAgg(VARCHAR, ImmutableList.of("a", "b", "a", "b", "c"), BIGINT, ImmutableList.of(1L, 1L, 1L, 1L, 1L));
}
@Test
public void testNullMap()
{
testMultimapAgg(DOUBLE, ImmutableList.<Double>of(), VARCHAR, ImmutableList.<String>of());
}
@Test
public void testDoubleMapMultimap()
{
Type mapType = mapType(VARCHAR, BIGINT);
List<Double> expectedKeys = ImmutableList.of(1.0, 2.0, 3.0);
List<Map<String, Long>> expectedValues = ImmutableList.of(ImmutableMap.of("a", 1L), ImmutableMap.of("b", 2L, "c", 3L, "d", 4L), ImmutableMap.of("a", 1L));
testMultimapAgg(DOUBLE, expectedKeys, mapType, expectedValues);
}
@Test
public void testDoubleArrayMultimap()
{
Type arrayType = new ArrayType(VARCHAR);
List<Double> expectedKeys = ImmutableList.of(1.0, 2.0, 3.0);
List<List<String>> expectedValues = ImmutableList.of(ImmutableList.of("a", "b"), ImmutableList.of("c"), ImmutableList.of("d", "e", "f"));
testMultimapAgg(DOUBLE, expectedKeys, arrayType, expectedValues);
}
@Test
public void testDoubleRowMap()
{
RowType innerRowType = RowType.from(ImmutableList.of(
RowType.field("f1", BIGINT),
RowType.field("f2", DOUBLE)));
testMultimapAgg(DOUBLE, ImmutableList.of(1.0, 2.0, 3.0), innerRowType, ImmutableList.of(ImmutableList.of(1L, 1.0), ImmutableList.of(2L, 2.0), ImmutableList.of(3L, 3.0)));
}
@Test
public void testMultiplePages()
{
InternalAggregationFunction aggFunction = getInternalAggregationFunction(BIGINT, BIGINT);
GroupedAccumulator groupedAccumulator = getGroupedAccumulator(aggFunction);
testMultimapAggWithGroupBy(aggFunction, groupedAccumulator, 0, BIGINT, ImmutableList.of(1L, 1L), BIGINT, ImmutableList.of(2L, 3L));
}
@Test
public void testMultiplePagesAndGroups()
{
InternalAggregationFunction aggFunction = getInternalAggregationFunction(BIGINT, BIGINT);
GroupedAccumulator groupedAccumulator = getGroupedAccumulator(aggFunction);
testMultimapAggWithGroupBy(aggFunction, groupedAccumulator, 0, BIGINT, ImmutableList.of(1L, 1L), BIGINT, ImmutableList.of(2L, 3L));
testMultimapAggWithGroupBy(aggFunction, groupedAccumulator, 300, BIGINT, ImmutableList.of(7L, 7L), BIGINT, ImmutableList.of(8L, 9L));
}
@Test
public void testManyValues()
{
InternalAggregationFunction aggFunction = getInternalAggregationFunction(BIGINT, BIGINT);
GroupedAccumulator groupedAccumulator = getGroupedAccumulator(aggFunction);
int numGroups = 30000;
int numKeys = 10;
int numValueArraySize = 2;
Random random = new Random();
for (int group = 0; group < numGroups; group++) {
ImmutableList.Builder<Long> keyBuilder = ImmutableList.builder();
ImmutableList.Builder<Long> valueBuilder = ImmutableList.builder();
for (int i = 0; i < numKeys; i++) {
long key = random.nextLong();
for (int j = 0; j < numValueArraySize; j++) {
long value = random.nextLong();
keyBuilder.add(key);
valueBuilder.add(value);
}
}
testMultimapAggWithGroupBy(aggFunction, groupedAccumulator, group, BIGINT, keyBuilder.build(), BIGINT, valueBuilder.build());
}
}
@Test
public void testEmptyStateOutputIsNull()
{
InternalAggregationFunction aggregationFunction = getInternalAggregationFunction(BIGINT, BIGINT);
GroupedAccumulator groupedAccumulator = aggregationFunction.bind(Ints.asList(), Optional.empty()).createGroupedAccumulator();
BlockBuilder blockBuilder = groupedAccumulator.getFinalType().createBlockBuilder(null, 1);
groupedAccumulator.evaluateFinal(0, blockBuilder);
assertTrue(blockBuilder.isNull(0));
}
private static <K, V> void testMultimapAgg(Type keyType, List<K> expectedKeys, Type valueType, List<V> expectedValues)
{
checkState(expectedKeys.size() == expectedValues.size(), "expectedKeys and expectedValues should have equal size");
InternalAggregationFunction aggFunc = getInternalAggregationFunction(keyType, valueType);
testMultimapAgg(aggFunc, keyType, expectedKeys, valueType, expectedValues);
}
private static InternalAggregationFunction getInternalAggregationFunction(Type keyType, Type valueType)
{
MapType mapType = mapType(keyType, new ArrayType(valueType));
Signature signature = new Signature(NAME, AGGREGATE, mapType.getTypeSignature(), keyType.getTypeSignature(), valueType.getTypeSignature());
return metadata.getFunctionRegistry().getAggregateFunctionImplementation(signature);
}
private static <K, V> void testMultimapAgg(InternalAggregationFunction aggFunc, Type keyType, List<K> expectedKeys, Type valueType, List<V> expectedValues)
{
Map<K, List<V>> map = new HashMap<>();
for (int i = 0; i < expectedKeys.size(); i++) {
if (!map.containsKey(expectedKeys.get(i))) {
map.put(expectedKeys.get(i), new ArrayList<>());
}
map.get(expectedKeys.get(i)).add(expectedValues.get(i));
}
RowPageBuilder builder = RowPageBuilder.rowPageBuilder(keyType, valueType);
for (int i = 0; i < expectedKeys.size(); i++) {
builder.row(expectedKeys.get(i), expectedValues.get(i));
}
assertAggregation(aggFunc, map.isEmpty() ? null : map, builder.build());
}
private static <K, V> void testMultimapAggWithGroupBy(
InternalAggregationFunction aggregationFunction,
GroupedAccumulator groupedAccumulator,
int groupId,
Type keyType,
List<K> expectedKeys,
Type valueType,
List<V> expectedValues)
{
RowPageBuilder pageBuilder = RowPageBuilder.rowPageBuilder(keyType, valueType);
ImmutableMultimap.Builder<K, V> outputBuilder = ImmutableMultimap.builder();
for (int i = 0; i < expectedValues.size(); i++) {
pageBuilder.row(expectedKeys.get(i), expectedValues.get(i));
outputBuilder.put(expectedKeys.get(i), expectedValues.get(i));
}
Page page = pageBuilder.build();
AggregationTestInput input = new AggregationTestInputBuilder(
new Block[] {page.getBlock(0), page.getBlock(1)},
aggregationFunction).build();
AggregationTestOutput testOutput = new AggregationTestOutput(outputBuilder.build().asMap());
input.runPagesOnAccumulatorWithAssertion(groupId, groupedAccumulator, testOutput);
}
private GroupedAccumulator getGroupedAccumulator(InternalAggregationFunction aggFunction)
{
return aggFunction.bind(Ints.asList(GroupByAggregationTestUtils.createArgs(aggFunction)), Optional.empty()).createGroupedAccumulator();
}
}
|
|
/*
* Copyright (C) The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.ToxicBakery.androidmdemo.fragment.demo;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.example.ToxicBakery.androidmdemo.R;
import com.example.ToxicBakery.androidmdemo.camera.CameraSourcePreview;
import com.example.ToxicBakery.androidmdemo.camera.FaceGraphic;
import com.example.ToxicBakery.androidmdemo.camera.GraphicOverlay;
import com.example.ToxicBakery.androidmdemo.util.PermissionUtil;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.MultiProcessor;
import com.google.android.gms.vision.Tracker;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.FaceDetector;
public class FragmentFaceTracking extends Fragment {
private static final int REQUEST_CAMERA = 1;
private static final String[] PERMISSIONS = {
Manifest.permission.CAMERA
};
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_face_tracking, container, false);
mPreview = (CameraSourcePreview) view.findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) view.findViewById(R.id.faceOverlay);
Context context = getActivity().getApplicationContext();
FaceDetector detector = new FaceDetector.Builder(context).build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build());
if (!detector.isOperational()) {
// Note: The first time that an app using face API is installed on a device, GMS will
// download a native library to the device in order to do detection. Usually this
// completes before the app is run for the first time. But if that download has not yet
// completed, then the above call will not detect any faces.
//
// isOperational() can be used to check if the required native library is currently
// available. The detector will automatically become operational once the library
// download completes on device.
Log.w(TAG, "Face detector dependencies are not yet available.");
}
mCameraSource = new CameraSource.Builder(context, detector)
// .setRequestedPreviewSize(640, 480)
// .setFacing(CameraSource.CAMERA_FACING_BACK)
// .setRequestedFps(30.0f)
.build();
return view;
}
/**
* Restarts the camera.
*/
@Override
public void onResume() {
super.onResume();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
final boolean hasAllPermissions = PermissionUtil.hasAllPermissions(getActivity(), PERMISSIONS);
if (hasAllPermissions) {
startCameraSource();
} else {
// This is kind of bugged because we can not listen for the return call. Waiting on
// v23.0.0 appcompat in hopes of fragment support here.
getActivity().requestPermissions(
PERMISSIONS
, REQUEST_CAMERA
);
}
} else {
startCameraSource();
}
}
/**
* Stops the camera.
*/
@Override
public void onPause() {
super.onPause();
mPreview.stop();
}
/**
* Releases the resources associated with the camera source, the associated detector, and the
* rest of the processing pipeline.
*/
@Override
public void onDestroy() {
super.onDestroy();
mCameraSource.release();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case REQUEST_CAMERA:
if (resultCode == Activity.RESULT_OK) {
startCameraSource();
} else {
Toast.makeText(getActivity(), R.string.face_detection_camera_required, Toast.LENGTH_LONG)
.show();
}
break;
default:
super.onActivityResult(requestCode, resultCode, data);
}
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (Exception e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
@Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
@Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
/**
* Update the position/characteristics of the face within the overlay.
*/
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
@Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
@Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
}
|
|
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
O * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.ws.bridge.filter;
import java.util.Properties;
import org.apache.mina.core.filterchain.IoFilter;
import org.apache.mina.core.filterchain.IoFilterChain;
import org.apache.mina.core.future.IoFutureListener;
import org.apache.mina.core.future.WriteFuture;
import org.apache.mina.core.session.IdleStatus;
import org.apache.mina.core.session.IoSession;
import org.kaazing.gateway.resource.address.ws.WsResourceAddress;
import org.kaazing.gateway.transport.AbstractBridgeSession;
import org.kaazing.gateway.transport.IoFilterAdapter;
import org.kaazing.gateway.transport.ws.AbstractWsBridgeSession;
import org.kaazing.gateway.transport.ws.WsAcceptor;
import org.kaazing.gateway.transport.ws.WsMessage;
import org.kaazing.gateway.transport.ws.WsPingMessage;
import org.kaazing.mina.core.session.IoSessionConfigEx;
import org.kaazing.mina.core.session.IoSessionEx;
import org.slf4j.Logger;
/**
* The purpose of this filter is to periodically check that the WebSocket connection is still alive.
* This is necessary to implement detection of broken connections in cases where the immediate local network hop
* is not down, so the TCP layer (NioProcessor, etc) does not realize the client has gone (story KG-6379). This is
* done by sending a WebSocket PING to the client if no data has been received for some time, and closing the connection if
* no PONG is received from the client within the expected maximum round-trip time.
*/
public class WsCheckAliveFilter extends IoFilterAdapter<IoSessionEx> {
public static final long DISABLE_INACTIVITY_TIMEOUT = WsResourceAddress.INACTIVITY_TIMEOUT_DEFAULT;
// feature is disabled by default. If in future we want to enable by default, a suitable default would be "30sec".
public static final long DEFAULT_WS_INACTIVITY_TIMEOUT_MILLIS = DISABLE_INACTIVITY_TIMEOUT;
private static String OBSOLETE_INACTIVITY_TIMEOUT_PROPERTY = "org.kaazing.gateway.transport.ws.INACTIVITY_TIMEOUT";
private final Logger logger;
// The following values are in milliseconds
private final long maxExpectedRtt; // how long to wait for pong reply
private final long pingDelay; // how long to wait before sending ping
private final IoSession wsSession;
private enum NextAction {
PONG, // ping has been written, awaiting pong
PING // need to write ping
}
private NextAction nextAction = NextAction.PING;
private long pingSentTime = 0;
private final IoFutureListener<WriteFuture> setPingTimeOnWrite = new IoFutureListener<WriteFuture>() {
@Override
public void operationComplete(WriteFuture future) {
pingWritten(System.currentTimeMillis());
}
};
public static void validateSystemProperties(Properties configuration, Logger logger) {
// Fail gateway startup if the obsolete system property from JMS Edition release 3.5.3 is used (KG-7125)
if (configuration != null && configuration.containsKey(OBSOLETE_INACTIVITY_TIMEOUT_PROPERTY)) {
String message = String.format(
"System property %s is no longer supported, please use accept-option %s instead in the gateway configuration file",
OBSOLETE_INACTIVITY_TIMEOUT_PROPERTY, "ws.inactivity.timeout");
logger.error(message);
throw new RuntimeException(message);
}
}
public static void addIfFeatureEnabled(IoFilterChain filterChain, String filterName, long inactivityTimeoutIn, Logger logger) {
addIfFeatureEnabled(filterChain, filterName, inactivityTimeoutIn, null, logger);
}
public static void addIfFeatureEnabled(IoFilterChain filterChain, String filterName, long inactivityTimeoutIn,
IoSessionEx wsSession, Logger logger) {
long inactivityTimeout = getInactivityTimeoutMillis(inactivityTimeoutIn, logger);
if (inactivityTimeout > 0) {
filterChain.addLast(filterName, new WsCheckAliveFilter(inactivityTimeout, wsSession, logger));
if (logger.isDebugEnabled()) {
logger.debug(String.format("Configured WebSocket inactivity timeout (ws.inactivity.timeout) is %d milliseconds", inactivityTimeout));
}
}
}
public static void moveIfFeatureEnabled(IoFilterChain fromChain, IoFilterChain toChain,
String filterName, long inactivityTimeout, Logger logger) {
if (inactivityTimeout > 0) {
IoFilter filter = fromChain.remove(filterName);
if (logger.isDebugEnabled()) {
logger.debug(String.format("Moving %s filter %s to child filter chain", filterName, filter));
}
toChain.addLast(filterName, filter);
}
}
public static void updateExtensions(IoFilterChain filterChain) {
WsCheckAliveFilter filter = (WsCheckAliveFilter) filterChain.get(WsCheckAliveFilter.class);
if (filter != null) {
filter.init(filterChain);
}
}
WsCheckAliveFilter(long inactivityTimeout, Logger logger) {
this(inactivityTimeout, null, logger);
}
WsCheckAliveFilter(long inactivityTimeout, IoSession wsSession, Logger logger) {
assert inactivityTimeout > 0;
// KG-7057: Assume maximum possible round-trip time is half the configured inactivity timeout, but don't let it be 0
this.maxExpectedRtt = Math.max(inactivityTimeout / 2, 1);
this.pingDelay = maxExpectedRtt;
this.logger = logger;
this.wsSession = wsSession;
}
@Override
public void onPostAdd(IoFilterChain filterChain, String name, NextFilter nextFilter) throws Exception {
init(filterChain);
}
@Override
public void onPreRemove(IoFilterChain filterChain,
String name,
NextFilter nextFilter) {
filterChain.getSession().getConfig().setReaderIdleTime(0);
}
@Override
protected void doMessageReceived(NextFilter nextFilter, IoSessionEx session, Object message) throws Exception {
WsMessage wsMessage = (WsMessage) message;
switch (wsMessage.getKind()) {
case PONG:
if (nextAction != NextAction.PONG) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("WsCheckAliveFilter: Unsolicited PONG received (%s), nextAction = %s",
wsMessage, nextAction));
}
// Unsolicited PONG from (rogue) client, ignore
return;
}
long roundTripTime = System.currentTimeMillis() - pingSentTime;
// Print out observed rtt to satisfy the curious
if (logger.isTraceEnabled()) {
logger.trace(String.format("WsCheckAliveFilter: PONG received (%s), round-trip time = %d msec, nextAction = %s",
wsMessage, roundTripTime, nextAction));
}
if (wsSession != null) {
// wse case where session is not accessible for management
AbstractWsBridgeSession.LAST_ROUND_TRIP_LATENCY.set(wsSession, roundTripTime);
AbstractWsBridgeSession.LAST_ROUND_TRIP_LATENCY_TIMESTAMP.set(wsSession, pingSentTime);
} else {
AbstractWsBridgeSession.LAST_ROUND_TRIP_LATENCY.set(session, roundTripTime);
AbstractWsBridgeSession.LAST_ROUND_TRIP_LATENCY_TIMESTAMP.set(session, pingSentTime);
}
schedulePing(session);
return;
default:
break;
}
nextFilter.messageReceived(session, message);
}
@Override
protected void doSessionIdle(NextFilter nextFilter, IoSessionEx session, IdleStatus status) throws Exception {
if (status == IdleStatus.READER_IDLE) {
switch (nextAction) {
case PONG:
logger.info("Client connection {} has been aborted because network connectivity has been lost", session);
// Disable idle timeout so it doesn't fire while we're closing
session.getConfig().setReaderIdleTime(0);
// Make sure we don't attempt WS CLOSE handshake in wsn case (want to close the transport immediately)
// Alter this once we eliminate WsCloseFilter
IoFilterChain filterChain;
if (session instanceof AbstractBridgeSession<?,?>
&& ((AbstractBridgeSession<?,?>) session).getLocalAddress().getOption(WsResourceAddress.LIGHTWEIGHT)) {
// Extended handshake case, WsCloseFilter is on the parent session
filterChain = ((AbstractBridgeSession<?,?>) session).getParent().getFilterChain();
}
else {
filterChain = session.getFilterChain();
}
if (filterChain.contains(WsAcceptor.CLOSE_FILTER)) {
filterChain.remove(WsAcceptor.CLOSE_FILTER);
}
session.close(true);
break;
case PING:
writePing(nextFilter, session);
}
}
super.doSessionIdle(nextFilter, session, status);
}
private static long getInactivityTimeoutMillis(long inactivityTimeoutIn, Logger logger) {
if (inactivityTimeoutIn == DISABLE_INACTIVITY_TIMEOUT) {
if (logger.isDebugEnabled()) {
logger.debug(String.format("WebSocket inactivity timeout is disabled (you can use accept-option or connect-option \"%s\" to enable it)",
"ws.inactivity.timeout"));
}
}
return inactivityTimeoutIn;
}
private void init(IoFilterChain filterChain) {
IoSessionEx session = (IoSessionEx)filterChain.getSession();
schedulePing(session);
}
private void schedulePing(IoSessionEx session) {
nextAction = NextAction.PING;
setReadIdleTimeInMillis(session, pingDelay);
}
private void setReadIdleTimeInMillis(IoSessionEx session, long delay) {
IoSessionConfigEx config = session.getConfig();
if (logger.isTraceEnabled()) {
logger.trace("WsCheckAliveFilter.setReadIdleTimeInMillis(" + delay + ")");
}
if (delay == 0L) {
config.setIdleTimeInMillis(IdleStatus.READER_IDLE, 1L); // don't pass in 0, that disables idle timeout
}
else {
config.setIdleTimeInMillis(IdleStatus.READER_IDLE, delay);
}
}
void pingWritten(long currentTimeMillis) {
pingSentTime = currentTimeMillis;
if (logger.isTraceEnabled()) {
logger.trace("WsCheckAliveFilter.pingWritten at time " + pingSentTime);
}
}
private void writePing(NextFilter nextFilter, IoSessionEx session) throws Exception {
WsPingMessage emptyPing = new WsPingMessage();
setReadIdleTimeInMillis(session, maxExpectedRtt);
nextAction = NextAction.PONG;
if (logger.isTraceEnabled()) {
logger.trace(String.format("Writing %s at time %d", emptyPing, System.currentTimeMillis()));
}
pingSentTime = System.currentTimeMillis();
session.write(emptyPing);
}
// For unit test only
void flipNextAction() {
nextAction = nextAction == NextAction.PING ? NextAction.PONG : NextAction.PING;
}
}
|
|
/*
* Copyright 2012 Cyril A. Karpenko
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.redshape.utils.config;
import com.redshape.utils.config.sources.FileSource;
import com.redshape.utils.config.sources.IConfigSource;
import com.redshape.utils.helpers.XMLHelper;
import org.apache.log4j.Logger;
import org.w3c.dom.*;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.*;
/**
* XML configuration files support
*
* @author nikelin
*/
public class XMLConfig extends AbstractTSConfig {
protected class OnChangeCallback implements IConfigSource.OnChangeCallback {
@Override
public void onChanged() {
try {
init();
} catch ( ConfigException e ) {
log.error( e.getMessage(), e );
}
}
}
private static final Logger log = Logger.getLogger(XMLConfig.class);
private XMLHelper xmlHelper;
private Element node;
public XMLConfig( XMLHelper helper, String filePath ) throws ConfigException {
try {
this.xmlHelper = helper;
this.source = new FileSource( this.getXmlHelper().getLoader().loadFile(filePath), this.createOnChangeCallback() );
this.init();
} catch ( IOException e ) {
throw new ConfigException( e.getMessage(), e );
}
}
public XMLConfig(IConfig parent, String name, String value) {
super(parent, name, value);
}
public XMLConfig(String name, String value) {
super(name, value);
}
@Deprecated
public XMLConfig( XMLHelper helper, File file ) throws ConfigException {
this(helper, new FileSource(file, null) );
}
public XMLConfig( XMLHelper helper, IConfigSource source) throws ConfigException {
this.xmlHelper = helper;
this.source = source;
this.source.setCallback( this.createOnChangeCallback() );
this.init();
}
public XMLConfig(IConfigSource source) throws ConfigException {
super(source);
}
protected IConfigSource.OnChangeCallback createOnChangeCallback() {
return new OnChangeCallback();
}
public void setXmlHelper(XMLHelper helper) {
this.xmlHelper = helper;
}
public XMLHelper getXmlHelper() {
return this.xmlHelper;
}
@Override
protected void actualInit() throws ConfigException {
try {
this.clear();
String data = this.source.read();
if ( data.isEmpty() ) {
return;
}
this.init( this,
this.getXmlHelper().buildDocumentByData(data)
.getDocumentElement()
);
} catch ( Throwable e ) {
throw new ConfigException( e.getMessage(), e );
}
}
protected void init( XMLConfig config, Element element ) throws ConfigException {
/**
* Initialize attributes
*/
NamedNodeMap attributes = element.getAttributes();
for ( int i = 0; i < attributes.getLength(); i++ ) {
Node attribute = attributes.item(i);
config.attributes.put( attribute.getNodeName(), attribute.getNodeValue() );
}
config.set( element.getTextContent() );
config.name = element.getNodeName();
/**
* Initialize child nodes
*/
Node child = element.getFirstChild();
while ( child != null ) {
if ( child.getNodeType() == Node.ELEMENT_NODE ) {
XMLConfig childConfig = (XMLConfig) this.createChild(child.getNodeName());
this.init( childConfig, (Element) child );
config.append(childConfig);
}
child = child.getNextSibling();
}
}
@Override
protected IConfig createNull() {
XMLConfig config = new XMLConfig(null, null, null);
config.nulled = true;
return config;
}
@Override
public IConfig createChild(String name) throws ConfigException {
return new XMLConfig(this, name, null);
}
@Override
public String toString() {
return this.name();
}
@Override
public String serialize() throws ConfigException {
try {
waitReady();
return this.getXmlHelper().parseToXml(this.toDomDocument());
} catch (Throwable e) {
log.error(e.getMessage(), e);
throw new ConfigException(e.getMessage(), e );
}
}
public Document toDomDocument() {
assert !this.isNull();
return this.node.getOwnerDocument();
}
public static void writeConfig(File file, XMLConfig config) throws IOException, ConfigException {
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)));
String result = config.serialize();
writer.write(result);
writer.close();
}
public static XMLConfig build( XMLHelper helper, String declaration ) throws ConfigException {
try {
XMLConfig config = new XMLConfig("config", null);
config.init( config, helper.buildDocumentByData(declaration).getDocumentElement() );
return config;
} catch ( SAXException e ) {
throw new ConfigException("XML data parsing failed", e );
} catch ( ParserConfigurationException e ) {
throw new ConfigException("XML data parsing failed", e );
} catch ( IOException e ) {
throw new ConfigException("I/O related exception", e );
}
}
}
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.ext.vp9;
import com.google.android.exoplayer.CodecCounters;
import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.SampleSourceTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.InputBuffer;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
import com.google.android.exoplayer.util.MimeTypes;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
/**
* Decodes and renders video using the native VP9 decoder.
*/
public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
/**
* Interface definition for a callback to be notified of {@link LibvpxVideoTrackRenderer} events.
*/
public interface EventListener {
/**
* Invoked to report the number of frames dropped by the renderer. Dropped frames are reported
* whenever the renderer is stopped having dropped frames, and optionally, whenever the count
* reaches a specified threshold whilst the renderer is started.
*
* @param count The number of dropped frames.
* @param elapsed The duration in milliseconds over which the frames were dropped. This
* duration is timed from when the renderer was started or from when dropped frames were
* last reported (whichever was more recent), and not from when the first of the reported
* drops occurred.
*/
void onDroppedFrames(int count, long elapsed);
/**
* Invoked each time there's a change in the size of the video being rendered.
*
* @param width The video width in pixels.
* @param height The video height in pixels.
*/
void onVideoSizeChanged(int width, int height);
/**
* Invoked when a frame is rendered to a surface for the first time following that surface
* having been set as the target for the renderer.
*
* @param surface The surface to which a first frame has been rendered.
*/
void onDrawnToSurface(Surface surface);
/**
* Invoked when one of the following happens: libvpx initialization failure, decoder error,
* renderer error.
*
* @param e The corresponding exception.
*/
void onDecoderError(VpxDecoderException e);
}
/**
* The type of a message that can be passed to an instance of this class via
* {@link ExoPlayer#sendMessage} or {@link ExoPlayer#blockingSendMessage}. The message object
* should be the target {@link Surface}, or null.
*/
public static final int MSG_SET_SURFACE = 1;
public static final int MSG_SET_VPX_SURFACE_VIEW = 2;
public final CodecCounters codecCounters = new CodecCounters();
private final boolean scaleToFit;
private final Handler eventHandler;
private final EventListener eventListener;
private final int maxDroppedFrameCountToNotify;
private final MediaFormatHolder formatHolder;
private MediaFormat format;
private VpxDecoderWrapper decoder;
private InputBuffer inputBuffer;
private OutputBuffer outputBuffer;
private Bitmap bitmap;
private boolean drawnToSurface;
private boolean renderedFirstFrame;
private Surface surface;
private VpxVideoSurfaceView vpxVideoSurfaceView;
private boolean outputRgb;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean sourceIsReady;
private int previousWidth;
private int previousHeight;
private int droppedFrameCount;
private long droppedFrameAccumulationStartTimeMs;
/**
* @param source The upstream source from which the renderer obtains samples.
* @param scaleToFit Boolean that indicates if video frames should be scaled to fit when
* rendering.
*/
public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit) {
this(source, scaleToFit, null, null, 0);
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param scaleToFit Boolean that indicates if video frames should be scaled to fit when
* rendering.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
* invocations of {@link EventListener#onDroppedFrames(int, long)}.
*/
public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit,
Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) {
super(source);
this.scaleToFit = scaleToFit;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
previousWidth = -1;
previousHeight = -1;
formatHolder = new MediaFormatHolder();
}
@Override
protected boolean handlesTrack(MediaFormat mediaFormat) {
return MimeTypes.VIDEO_VP9.equalsIgnoreCase(mediaFormat.mimeType);
}
@Override
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (outputStreamEnded) {
return;
}
sourceIsReady = continueBufferingSource(positionUs);
checkForDiscontinuity(positionUs);
// Try and read a format if we don't have one already.
if (format == null && !readFormat(positionUs)) {
// We can't make progress without one.
return;
}
// If we don't have a decoder yet, we need to instantiate one.
// TODO: Add support for dynamic switching between one type of surface to another.
if (decoder == null) {
decoder = new VpxDecoderWrapper(outputRgb);
decoder.start();
}
// Rendering loop.
try {
processOutputBuffer(positionUs, elapsedRealtimeUs);
while (feedInputBuffer(positionUs)) {}
} catch (VpxDecoderException e) {
notifyDecoderError(e);
throw new ExoPlaybackException(e);
}
}
private void processOutputBuffer(long positionUs, long elapsedRealtimeUs)
throws VpxDecoderException {
if (outputStreamEnded) {
return;
}
if (outputBuffer == null) {
outputBuffer = decoder.dequeueOutputBuffer();
if (outputBuffer == null) {
return;
}
}
if (outputBuffer.flags == VpxDecoderWrapper.FLAG_END_OF_STREAM) {
outputStreamEnded = true;
releaseOutputBuffer();
return;
}
long elapsedSinceStartOfLoop = SystemClock.elapsedRealtime() * 1000 - elapsedRealtimeUs;
long timeToRenderUs = outputBuffer.timestampUs - positionUs - elapsedSinceStartOfLoop;
if (timeToRenderUs < -30000 || outputBuffer.timestampUs < positionUs) {
// Drop frame if we are too late.
codecCounters.droppedOutputBufferCount++;
droppedFrameCount++;
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
notifyAndResetDroppedFrameCount();
}
releaseOutputBuffer();
return;
}
// If we have not rendered any frame so far (either initially or immediately following a seek),
// render one frame irrespective of the state.
if (!renderedFirstFrame) {
renderBuffer();
renderedFirstFrame = true;
return;
}
// Do nothing if we are not playing or if we are too early to render the next frame.
if (getState() != TrackRenderer.STATE_STARTED || timeToRenderUs > 30000) {
return;
}
if (timeToRenderUs > 11000) {
try {
// Subtracting 10000 rather than 11000 ensures that the sleep time will be at least 1ms.
Thread.sleep((timeToRenderUs - 10000) / 1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
renderBuffer();
}
private void renderBuffer() throws VpxDecoderException {
codecCounters.renderedOutputBufferCount++;
notifyIfVideoSizeChanged(outputBuffer);
if (outputRgb) {
renderRgbFrame(outputBuffer, scaleToFit);
} else {
vpxVideoSurfaceView.renderFrame(outputBuffer);
}
if (!drawnToSurface) {
drawnToSurface = true;
notifyDrawnToSurface(surface);
}
releaseOutputBuffer();
}
private void releaseOutputBuffer() throws VpxDecoderException {
decoder.releaseOutputBuffer(outputBuffer);
outputBuffer = null;
}
private void renderRgbFrame(OutputBuffer outputBuffer, boolean scale) {
if (bitmap == null || bitmap.getWidth() != outputBuffer.width
|| bitmap.getHeight() != outputBuffer.height) {
bitmap = Bitmap.createBitmap(outputBuffer.width, outputBuffer.height, Bitmap.Config.RGB_565);
}
bitmap.copyPixelsFromBuffer(outputBuffer.data);
Canvas canvas = surface.lockCanvas(null);
if (scale) {
canvas.scale(((float) canvas.getWidth()) / outputBuffer.width,
((float) canvas.getHeight()) / outputBuffer.height);
}
canvas.drawBitmap(bitmap, 0, 0, null);
surface.unlockCanvasAndPost(canvas);
}
private boolean feedInputBuffer(long positionUs) throws VpxDecoderException {
if (inputStreamEnded) {
return false;
}
if (inputBuffer == null) {
inputBuffer = decoder.getInputBuffer();
if (inputBuffer == null) {
return false;
}
}
int result = readSource(positionUs, formatHolder, inputBuffer.sampleHolder,
false);
if (result == SampleSource.NOTHING_READ) {
return false;
}
if (result == SampleSource.DISCONTINUITY_READ) {
flushDecoder();
return true;
}
if (result == SampleSource.FORMAT_READ) {
format = formatHolder.format;
return true;
}
if (result == SampleSource.END_OF_STREAM) {
inputBuffer.flags = VpxDecoderWrapper.FLAG_END_OF_STREAM;
decoder.queueInputBuffer(inputBuffer);
inputBuffer = null;
inputStreamEnded = true;
return false;
}
inputBuffer.width = format.width;
inputBuffer.height = format.height;
decoder.queueInputBuffer(inputBuffer);
inputBuffer = null;
return true;
}
private void checkForDiscontinuity(long positionUs) {
if (decoder == null) {
return;
}
int result = readSource(positionUs, formatHolder, null, true);
if (result == SampleSource.DISCONTINUITY_READ) {
flushDecoder();
}
}
private void flushDecoder() {
inputBuffer = null;
outputBuffer = null;
decoder.flush();
}
@Override
protected boolean isEnded() {
return outputStreamEnded;
}
@Override
protected boolean isReady() {
return format != null && sourceIsReady;
}
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
super.seekTo(positionUs);
seekToInternal();
}
@Override
protected void onEnabled(int track, long positionUs, boolean joining)
throws ExoPlaybackException {
super.onEnabled(track, positionUs, joining);
seekToInternal();
}
private void seekToInternal() {
sourceIsReady = false;
inputStreamEnded = false;
outputStreamEnded = false;
renderedFirstFrame = false;
}
@Override
protected void onStarted() {
droppedFrameCount = 0;
droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
}
@Override
protected void onStopped() {
notifyAndResetDroppedFrameCount();
}
@Override
protected void onDisabled() throws ExoPlaybackException {
inputBuffer = null;
outputBuffer = null;
format = null;
try {
if (decoder != null) {
decoder.release();
decoder = null;
}
} finally {
super.onDisabled();
}
}
private boolean readFormat(long positionUs) {
int result = readSource(positionUs, formatHolder, null, false);
if (result == SampleSource.FORMAT_READ) {
format = formatHolder.format;
return true;
}
return false;
}
@Override
public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
if (messageType == MSG_SET_SURFACE) {
surface = (Surface) message;
vpxVideoSurfaceView = null;
outputRgb = true;
} else if (messageType == MSG_SET_VPX_SURFACE_VIEW) {
vpxVideoSurfaceView = (VpxVideoSurfaceView) message;
surface = null;
outputRgb = false;
} else {
super.handleMessage(messageType, message);
}
}
private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) {
if (previousWidth == -1 || previousHeight == -1
|| previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) {
previousWidth = outputBuffer.width;
previousHeight = outputBuffer.height;
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onVideoSizeChanged(outputBuffer.width, outputBuffer.height);
}
});
}
}
}
private void notifyAndResetDroppedFrameCount() {
if (eventHandler != null && eventListener != null && droppedFrameCount > 0) {
long now = SystemClock.elapsedRealtime();
final int countToNotify = droppedFrameCount;
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
droppedFrameCount = 0;
droppedFrameAccumulationStartTimeMs = now;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
}
});
}
}
private void notifyDrawnToSurface(final Surface surface) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDrawnToSurface(surface);
}
});
}
}
private void notifyDecoderError(final VpxDecoderException e) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDecoderError(e);
}
});
}
}
}
|
|
/*
* Copyright 2014-2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.lldp.impl;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Modified;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.onlab.packet.Ethernet;
import org.onosproject.cfg.ComponentConfigService;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.mastership.MastershipEvent;
import org.onosproject.mastership.MastershipListener;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.LinkKey;
import org.onosproject.net.Port;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.link.DefaultLinkDescription;
import org.onosproject.net.link.LinkProvider;
import org.onosproject.net.link.LinkProviderRegistry;
import org.onosproject.net.link.LinkProviderService;
import org.onosproject.net.link.LinkService;
import org.onosproject.net.packet.PacketContext;
import org.onosproject.net.packet.PacketPriority;
import org.onosproject.net.packet.PacketProcessor;
import org.onosproject.net.packet.PacketService;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.Dictionary;
import java.util.EnumSet;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.onlab.packet.Ethernet.TYPE_BSN;
import static org.onlab.packet.Ethernet.TYPE_LLDP;
import static org.onlab.util.Tools.get;
import static org.onlab.util.Tools.groupedThreads;
import static org.onosproject.net.Link.Type.DIRECT;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Provider which uses LLDP and BDDP packets to detect network infrastructure links.
*/
@Component(immediate = true)
public class LLDPLinkProvider extends AbstractProvider implements LinkProvider {
private static final String PROVIDER_NAME = "org.onosproject.provider.lldp";
private static final String FORMAT =
"Settings: enabled={}, useBDDP={}, probeRate={}, " +
"staleLinkAge={}, lldpSuppression={}";
private final Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected LinkProviderRegistry providerRegistry;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected LinkService linkService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected PacketService packetService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected MastershipService masterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ComponentConfigService cfgService;
private LinkProviderService providerService;
private ScheduledExecutorService executor;
// TODO: Add sanity checking for the configurable params based on the delays
private static final long DEVICE_SYNC_DELAY = 5;
private static final long LINK_PRUNER_DELAY = 3;
private static final String PROP_ENABLED = "enabled";
@Property(name = PROP_ENABLED, boolValue = true,
label = "If false, link discovery is disabled")
private boolean enabled = false;
private static final String PROP_USE_BDDP = "useBDDP";
@Property(name = PROP_USE_BDDP, boolValue = true,
label = "Use BDDP for link discovery")
private boolean useBDDP = true;
private static final String PROP_PROBE_RATE = "probeRate";
private static final int DEFAULT_PROBE_RATE = 3_000;
@Property(name = PROP_PROBE_RATE, intValue = DEFAULT_PROBE_RATE,
label = "LLDP and BDDP probe rate specified in millis")
private int probeRate = DEFAULT_PROBE_RATE;
private static final String PROP_STALE_LINK_AGE = "staleLinkAge";
private static final int DEFAULT_STALE_LINK_AGE = 10_000;
@Property(name = PROP_STALE_LINK_AGE, intValue = DEFAULT_STALE_LINK_AGE,
label = "Number of millis beyond which links will be considered stale")
private int staleLinkAge = DEFAULT_STALE_LINK_AGE;
// FIXME: convert to use network config subsystem instead
private static final String PROP_LLDP_SUPPRESSION = "lldpSuppression";
private static final String DEFAULT_LLDP_SUPPRESSION_CONFIG = "../config/lldp_suppression.json";
@Property(name = PROP_LLDP_SUPPRESSION, value = DEFAULT_LLDP_SUPPRESSION_CONFIG,
label = "Path to LLDP suppression configuration file")
private String lldpSuppression = DEFAULT_LLDP_SUPPRESSION_CONFIG;
private final DiscoveryContext context = new InternalDiscoveryContext();
private final InternalRoleListener roleListener = new InternalRoleListener();
private final InternalDeviceListener deviceListener = new InternalDeviceListener();
private final InternalPacketProcessor packetProcessor = new InternalPacketProcessor();
// Device link discovery helpers.
protected final Map<DeviceId, LinkDiscovery> discoverers = new ConcurrentHashMap<>();
// Most recent time a tracked link was seen; links are tracked if their
// destination connection point is mastered by this controller instance.
private final Map<LinkKey, Long> linkTimes = Maps.newConcurrentMap();
private SuppressionRules rules;
private ApplicationId appId;
/**
* Creates an OpenFlow link provider.
*/
public LLDPLinkProvider() {
super(new ProviderId("lldp", PROVIDER_NAME));
}
@Activate
public void activate(ComponentContext context) {
cfgService.registerProperties(getClass());
appId = coreService.registerApplication(PROVIDER_NAME);
modified(context);
log.info("Started");
}
@Deactivate
public void deactivate() {
cfgService.unregisterProperties(getClass(), false);
disable();
log.info("Stopped");
}
@Modified
public void modified(ComponentContext context) {
Dictionary<?, ?> properties = context != null ? context.getProperties() : new Properties();
boolean newEnabled, newUseBddp;
int newProbeRate, newStaleLinkAge;
String newLldpSuppression;
try {
String s = get(properties, PROP_ENABLED);
newEnabled = isNullOrEmpty(s) || Boolean.parseBoolean(s.trim());
s = get(properties, PROP_USE_BDDP);
newUseBddp = isNullOrEmpty(s) || Boolean.parseBoolean(s.trim());
s = get(properties, PROP_PROBE_RATE);
newProbeRate = isNullOrEmpty(s) ? probeRate : Integer.parseInt(s.trim());
s = get(properties, PROP_STALE_LINK_AGE);
newStaleLinkAge = isNullOrEmpty(s) ? staleLinkAge : Integer.parseInt(s.trim());
s = get(properties, PROP_LLDP_SUPPRESSION);
newLldpSuppression = isNullOrEmpty(s) ? DEFAULT_LLDP_SUPPRESSION_CONFIG : s;
} catch (NumberFormatException e) {
log.warn(e.getMessage());
newEnabled = enabled;
newUseBddp = useBDDP;
newProbeRate = probeRate;
newStaleLinkAge = staleLinkAge;
newLldpSuppression = lldpSuppression;
}
boolean wasEnabled = enabled;
enabled = newEnabled;
useBDDP = newUseBddp;
probeRate = newProbeRate;
staleLinkAge = newStaleLinkAge;
lldpSuppression = newLldpSuppression;
if (!wasEnabled && enabled) {
enable();
} else if (wasEnabled && !enabled) {
disable();
}
log.info(FORMAT, enabled, useBDDP, probeRate, staleLinkAge, lldpSuppression);
}
/**
* Enables link discovery processing.
*/
private void enable() {
providerService = providerRegistry.register(this);
masterService.addListener(roleListener);
deviceService.addListener(deviceListener);
packetService.addProcessor(packetProcessor, PacketProcessor.advisor(0));
loadSuppressionRules();
loadDevices();
executor = newSingleThreadScheduledExecutor(groupedThreads("onos/link", "discovery-%d"));
executor.scheduleAtFixedRate(new SyncDeviceInfoTask(),
DEVICE_SYNC_DELAY, DEVICE_SYNC_DELAY, SECONDS);
executor.scheduleAtFixedRate(new LinkPrunerTask(),
LINK_PRUNER_DELAY, LINK_PRUNER_DELAY, SECONDS);
requestIntercepts();
}
/**
* Disables link discovery processing.
*/
private void disable() {
withdrawIntercepts();
providerRegistry.unregister(this);
masterService.removeListener(roleListener);
deviceService.removeListener(deviceListener);
packetService.removeProcessor(packetProcessor);
if (executor != null) {
executor.shutdownNow();
}
discoverers.values().forEach(LinkDiscovery::stop);
discoverers.clear();
providerService = null;
}
/**
* Loads available devices and registers their ports to be probed.
*/
private void loadDevices() {
for (Device device : deviceService.getAvailableDevices()) {
if (rules.isSuppressed(device)) {
log.debug("LinkDiscovery from {} disabled by configuration", device.id());
continue;
}
LinkDiscovery ld = new LinkDiscovery(device, context);
discoverers.put(device.id(), ld);
addPorts(ld, device.id());
}
}
/**
* Adds ports of the specified device to the specified discovery helper.
*/
private void addPorts(LinkDiscovery discoverer, DeviceId deviceId) {
for (Port p : deviceService.getPorts(deviceId)) {
if (rules.isSuppressed(p)) {
continue;
}
if (!p.number().isLogical()) {
discoverer.addPort(p);
}
}
}
/**
* Loads LLDP suppression rules.
*/
private void loadSuppressionRules() {
// FIXME: convert to use network configuration
SuppressionRulesStore store = new SuppressionRulesStore(lldpSuppression);
try {
log.info("Reading suppression rules from {}", lldpSuppression);
rules = store.read();
} catch (IOException e) {
log.info("Failed to load {}, using built-in rules", lldpSuppression);
// default rule to suppress ROADM to maintain compatibility
rules = new SuppressionRules(ImmutableSet.of(),
EnumSet.of(Device.Type.ROADM),
ImmutableMap.of());
}
// should refresh discoverers when we need dynamic reconfiguration
}
/**
* Requests packet intercepts.
*/
private void requestIntercepts() {
TrafficSelector.Builder selector = DefaultTrafficSelector.builder();
selector.matchEthType(TYPE_LLDP);
packetService.requestPackets(selector.build(), PacketPriority.CONTROL, appId);
selector.matchEthType(TYPE_BSN);
if (useBDDP) {
packetService.requestPackets(selector.build(), PacketPriority.CONTROL, appId);
} else {
packetService.cancelPackets(selector.build(), PacketPriority.CONTROL, appId);
}
}
/**
* Withdraws packet intercepts.
*/
private void withdrawIntercepts() {
TrafficSelector.Builder selector = DefaultTrafficSelector.builder();
selector.matchEthType(TYPE_LLDP);
packetService.cancelPackets(selector.build(), PacketPriority.CONTROL, appId);
selector.matchEthType(TYPE_BSN);
packetService.cancelPackets(selector.build(), PacketPriority.CONTROL, appId);
}
/**
* Processes device mastership role changes.
*/
private class InternalRoleListener implements MastershipListener {
@Override
public void event(MastershipEvent event) {
if (MastershipEvent.Type.BACKUPS_CHANGED.equals(event.type())) {
// only need new master events
return;
}
DeviceId deviceId = event.subject();
Device device = deviceService.getDevice(deviceId);
if (device == null) {
log.debug("Device {} doesn't exist, or isn't there yet", deviceId);
return;
}
if (rules.isSuppressed(device)) {
return;
}
discoverers.computeIfAbsent(deviceId, k -> new LinkDiscovery(device, context));
}
}
/**
* Processes device events.
*/
private class InternalDeviceListener implements DeviceListener {
@Override
public void event(DeviceEvent event) {
LinkDiscovery ld;
Device device = event.subject();
Port port = event.port();
if (device == null) {
log.error("Device is null.");
return;
}
log.trace("{} {} {}", event.type(), event.subject(), event);
final DeviceId deviceId = device.id();
switch (event.type()) {
case DEVICE_ADDED:
case DEVICE_UPDATED:
synchronized (discoverers) {
ld = discoverers.get(deviceId);
if (ld == null) {
if (rules != null && rules.isSuppressed(device)) {
log.debug("LinkDiscovery from {} disabled by configuration", device.id());
return;
}
log.debug("Device added ({}) {}", event.type(), deviceId);
discoverers.put(deviceId, new LinkDiscovery(device, context));
} else {
if (ld.isStopped()) {
log.debug("Device restarted ({}) {}", event.type(), deviceId);
ld.start();
}
}
}
break;
case PORT_ADDED:
case PORT_UPDATED:
if (port.isEnabled()) {
ld = discoverers.get(deviceId);
if (ld == null) {
return;
}
if (rules.isSuppressed(port)) {
log.debug("LinkDiscovery from {}@{} disabled by configuration",
port.number(), device.id());
return;
}
if (!port.number().isLogical()) {
log.debug("Port added {}", port);
ld.addPort(port);
}
} else {
log.debug("Port down {}", port);
ConnectPoint point = new ConnectPoint(deviceId, port.number());
providerService.linksVanished(point);
}
break;
case PORT_REMOVED:
log.debug("Port removed {}", port);
ConnectPoint point = new ConnectPoint(deviceId, port.number());
providerService.linksVanished(point);
break;
case DEVICE_REMOVED:
case DEVICE_SUSPENDED:
log.debug("Device removed {}", deviceId);
ld = discoverers.get(deviceId);
if (ld == null) {
return;
}
ld.stop();
providerService.linksVanished(deviceId);
break;
case DEVICE_AVAILABILITY_CHANGED:
ld = discoverers.get(deviceId);
if (ld == null) {
return;
}
if (deviceService.isAvailable(deviceId)) {
log.debug("Device up {}", deviceId);
ld.start();
} else {
providerService.linksVanished(deviceId);
log.debug("Device down {}", deviceId);
ld.stop();
}
break;
case PORT_STATS_UPDATED:
break;
default:
log.debug("Unknown event {}", event);
}
}
}
/**
* Processes incoming packets.
*/
private class InternalPacketProcessor implements PacketProcessor {
@Override
public void process(PacketContext context) {
if (context == null || context.isHandled()) {
return;
}
Ethernet eth = context.inPacket().parsed();
if (eth == null || (eth.getEtherType() != TYPE_LLDP && eth.getEtherType() != TYPE_BSN)) {
return;
}
LinkDiscovery ld = discoverers.get(context.inPacket().receivedFrom().deviceId());
if (ld == null) {
return;
}
if (ld.handleLLDP(context)) {
context.block();
}
}
}
/**
* Auxiliary task to keep device ports up to date.
*/
private final class SyncDeviceInfoTask implements Runnable {
@Override
public void run() {
if (Thread.currentThread().isInterrupted()) {
log.info("Interrupted, quitting");
return;
}
// check what deviceService sees, to see if we are missing anything
try {
for (Device dev : deviceService.getDevices()) {
if (rules.isSuppressed(dev)) {
continue;
}
DeviceId did = dev.id();
synchronized (discoverers) {
LinkDiscovery ld = discoverers
.computeIfAbsent(did, k -> new LinkDiscovery(dev, context));
addPorts(ld, did);
}
}
} catch (Exception e) {
// Catch all exceptions to avoid task being suppressed
log.error("Exception thrown during synchronization process", e);
}
}
}
/**
* Auxiliary task for pruning stale links.
*/
private class LinkPrunerTask implements Runnable {
@Override
public void run() {
if (Thread.currentThread().isInterrupted()) {
log.info("Interrupted, quitting");
return;
}
try {
// TODO: There is still a slight possibility of mastership
// change occurring right with link going stale. This will
// result in the stale link not being pruned.
Maps.filterEntries(linkTimes, e -> {
if (!masterService.isLocalMaster(e.getKey().dst().deviceId())) {
return true;
}
if (isStale(e.getValue())) {
providerService.linkVanished(new DefaultLinkDescription(e.getKey().src(),
e.getKey().dst(),
DIRECT));
return true;
}
return false;
}).clear();
} catch (Exception e) {
// Catch all exceptions to avoid task being suppressed
log.error("Exception thrown during link pruning process", e);
}
}
private boolean isStale(long lastSeen) {
return lastSeen < System.currentTimeMillis() - staleLinkAge;
}
}
/**
* Provides processing context for the device link discovery helpers.
*/
private class InternalDiscoveryContext implements DiscoveryContext {
@Override
public MastershipService mastershipService() {
return masterService;
}
@Override
public LinkProviderService providerService() {
return providerService;
}
@Override
public PacketService packetService() {
return packetService;
}
@Override
public long probeRate() {
return probeRate;
}
@Override
public boolean useBDDP() {
return useBDDP;
}
@Override
public void touchLink(LinkKey key) {
linkTimes.put(key, System.currentTimeMillis());
}
}
}
|
|
package hadoopGIS.examples;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import hadoopGIS.GIS;
import hadoopGIS.GISInputFormat;
import hadoopGIS.GISOutputFormat;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Iterator;
import java.util.HashMap;
import java.util.Map;
import java.util.ArrayList;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.filecache.DistributedCache;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.io.WKTReader;
import com.vividsolutions.jts.io.ParseException;
public class TestGIS extends Configured implements Tool, Mapper<LongWritable, GIS, LongWritable, LongWritable>, Reducer<LongWritable, LongWritable, LongWritable, LongWritable>
{
HashMap<Integer, Geometry> C;
HashMap<Integer, Geometry> I;
HashMap<Integer, Geometry> G;
String parcelDataFile;
ArrayList<String> parcelColumnList;
// For Mapper interface
public void map(LongWritable key, GIS value, OutputCollector<LongWritable, LongWritable> output, Reporter reporter) throws IOException
{
double minDistance = Double.MAX_VALUE, currDistance;
int closestParcel = -1;
/*
GIS myGIS;
String line;
ArrayList<String> parcelColumnList = new ArrayList<String> ();
BufferedReader reader = new BufferedReader(new FileReader(parcelDataFile));
while ((line = reader.readLine()) != null)
{
myGIS.update (new Text (line), parcelColumnList);
currDistance = myGIS.geometry.distance (value.geometry);
if (currDistance < minDistance)
{
minDistance = currDistance;
closestParcel = myGIS.attributes.get("id");
}
}
*/
HashMap<Integer,Geometry> parcels = null;
if (value.attributes.get ("devtype").equals ("C"))
parcels = C;
else if (value.attributes.get ("devtype").equals ("G"))
parcels = G;
else if (value.attributes.get ("devtype").equals ("I"))
parcels = I;
Iterator it = parcels.entrySet().iterator();
while (it.hasNext())
{
Map.Entry entry = (Map.Entry) it.next();
currDistance = value.geometry.distance ((Geometry) entry.getValue ());
if (currDistance < minDistance)
{
minDistance = currDistance;
closestParcel = ((Integer) entry.getKey ()).intValue ();
}
}
LongWritable lngClosestParcel = new LongWritable (closestParcel);
output.collect(key, lngClosestParcel);
}
// For Reducer interface
public void reduce(LongWritable key, Iterator<LongWritable> values, OutputCollector<LongWritable, LongWritable> output, Reporter reporter)
{
while(values.hasNext()) {
try {
output.collect(key, values.next());
} catch (IOException e) {}
}
}
// For Mapper (via JobConfigurable) interface
public void configure(JobConf job)
{
String columnFilename = job.get ("parcelColumnNames");
String dataFilename = job.get ("parcelData");
Path[] distCacheFiles = new Path[0];
try { distCacheFiles = DistributedCache.getLocalCacheFiles(job); }
catch (IOException e) { return; }
parcelColumnList = new ArrayList<String>();
G = new HashMap<Integer, Geometry>();
C = new HashMap<Integer, Geometry>();
I = new HashMap<Integer, Geometry>();
BufferedReader reader = null;
String line;
for (int i=0; i<distCacheFiles.length; i++)
{
if (distCacheFiles [i].getName ().equals (columnFilename))
{
try
{
reader = new BufferedReader(new FileReader(distCacheFiles [i].toString ()));
while ((line = reader.readLine()) != null)
parcelColumnList.add (line);
}
catch (Exception e) { }
break;
}
}
GIS myGIS = new GIS ();
for (int i=0; i<distCacheFiles.length; i++)
{
if (distCacheFiles [i].getName ().equals (dataFilename))
{
try
{
reader = new BufferedReader(new FileReader(distCacheFiles [i].toString ()));
while ((line = reader.readLine()) != null)
{
myGIS.update (new Text (line), parcelColumnList);
if (myGIS.attributes.get ("devtype").equals("C"))
{
Geometry geom = (Geometry) myGIS.geometry.clone ();
C.put (new Integer (myGIS.attributes.get ("id")), geom);
}
else if (myGIS.attributes.get ("devtype").equals("I"))
{
Geometry geom = (Geometry) myGIS.geometry.clone ();
I.put (new Integer (myGIS.attributes.get ("id")), geom);
}
else if (myGIS.attributes.get ("devtype").equals("G"))
{
Geometry geom = (Geometry) myGIS.geometry.clone ();
G.put (new Integer (myGIS.attributes.get ("id")), geom);
}
}
}
catch (Exception e) { }
break;
}
}
}
// For Mapper (via Closeable) interface
public void close() {}
// For Tool interface
public int run(String[] args) throws Exception
{
JobConf job = new JobConf(new Configuration(), this.getClass());
GISInputFormat.setInputPaths(job, new Path("/user/alaster/gis/jobs.gis"));
GISOutputFormat.setOutputPath(job, new Path("output"));
job.setJobName("test GIS - Large Jobs/Parcels");
job.setMapperClass(this.getClass());
//job.setCombinerClass(this.getClass());
job.setReducerClass(this.getClass());
job.setInputFormat(GISInputFormat.class);
//job.setOutputFormat(TextOutputFormat.class);
job.setOutputValueClass(LongWritable.class);
Path p = new Path ("/user/alaster/gis/jobs.names");
DistributedCache.addCacheFile (p.toUri (), job);
job.set ("columnNames", p.getName ());
p = new Path ("/user/alaster/gis/parcels.names");
DistributedCache.addCacheFile (p.toUri (), job);
job.set ("parcelColumnNames", p.getName ());
p = new Path ("/user/alaster/gis/parcels.gis");
DistributedCache.addCacheFile (p.toUri (), job);
job.set ("parcelData", p.getName ());
return JobClient.runJob(job).getJobState();
}
// Hadoop runner requires this to be a static void!
// Thus must use exit instead of return
// Also must directly use the class name instead of figuring it out
public static void main(String[] args) throws Exception {
System.exit(ToolRunner.run(new Configuration(), new TestGIS(), args));
}
}
|
|
package nam.ui.src.main.java.admin.data;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import nam.ProjectLevelHelper;
import nam.model.Element;
import nam.model.ModelLayerHelper;
import nam.model.Type;
import nam.model.util.ElementUtil;
import nam.model.util.ViewUtil;
import nam.ui.Relation;
import nam.ui.View;
import org.aries.util.NameUtil;
import aries.codegen.AbstractBeanBuilder;
import aries.codegen.util.Buf;
import aries.generation.engine.GenerationContext;
import aries.generation.model.AnnotationUtil;
import aries.generation.model.ModelClass;
import aries.generation.model.ModelConstructor;
import aries.generation.model.ModelOperation;
import aries.generation.model.ModelReference;
/**
* Builds an Element Record List Object Implementation {@link ModelClass} object given an {@link Element} Specification as input;
*
* Model construction properties:
* <ul>
* <li>generateJavadoc</li>
* </ul>
*
* @author tfisher
*/
public class ElementRecordSectionBuilder extends AbstractBeanBuilder {
public ElementRecordSectionBuilder(GenerationContext context) {
super(context);
initialize();
}
protected void initialize() {
}
public Collection<ModelClass> buildClasses(Type type) throws Exception {
String elementClassName = ModelLayerHelper.getElementClassName(type);
List<ModelClass> modelClasses = new ArrayList<ModelClass>();
if (!ElementUtil.isEnumeration(type)) {
modelClasses.add(buildOverviewSectionClass(type));
modelClasses.add(buildIdentificationSectionClass(type));
modelClasses.add(buildConfigurationSectionClass(type));
modelClasses.add(buildDocumentationSectionClass(type));
}
View view = context.getModule().getView();
Relation relation = ViewUtil.getMemberOfRelation(view, elementClassName);
if (relation != null) {
List<String> children = relation.getType();
Iterator<String> iterator = children.iterator();
while (iterator.hasNext()) {
String child = iterator.next();
String childNameCapped = NameUtil.capName(child);
String childNameCappedPlural = NameUtil.toPlural(childNameCapped);
modelClasses.add(buildClass(type, childNameCappedPlural));
}
}
return modelClasses;
}
protected ModelClass buildOverviewSectionClass(Type type) throws Exception {
return buildClass(type, "Overview");
}
protected ModelClass buildIdentificationSectionClass(Type type) throws Exception {
return buildClass(type, "Identification");
}
protected ModelClass buildConfigurationSectionClass(Type type) throws Exception {
return buildClass(type, "Configuration");
}
protected ModelClass buildDocumentationSectionClass(Type type) throws Exception {
return buildClass(type, "Documentation");
}
public ModelClass buildClass(Type type, String section) throws Exception {
String namespace = context.getModule().getNamespace();
String elementName = ModelLayerHelper.getElementNameCapped(type);
String elementNameUncapped = ModelLayerHelper.getElementNameUncapped(type);
String elementClassName = ModelLayerHelper.getElementClassName(type);
String elementPackageName = ModelLayerHelper.getElementPackageName(type);
//String namespacePackageName = ProjectLevelHelper.getPackageName(namespace);
//String packageName = elementPackageName + ".ui." + elementNameUncapped;
String packageName = elementPackageName + "." + elementNameUncapped;
String className = elementName + "Record_" + section + "Section";
ModelClass modelClass = createModelClass(namespace, packageName, className);
modelClass.setParentClassName("AbstractWizardPage<"+elementClassName+">");
modelClass.addImplementedInterface("Serializable");
initializeClass(modelClass, type, section);
return modelClass;
}
public void initializeClass(ModelClass modelClass, Type type, String section) throws Exception {
initializeImportedClasses(modelClass, type);
initializeClassAnnotations(modelClass, type, section);
initializeClassConstructors(modelClass, type, section);
initializeInstanceFields(modelClass, type, section);
initializeInstanceOperations(modelClass, type, section);
}
protected void initializeImportedClasses(ModelClass modelClass, Type type) {
String elementPackageName = ModelLayerHelper.getElementPackageName(type);
String elementClassName = ModelLayerHelper.getElementClassName(type);
modelClass.addImportedClass(elementPackageName + "." + elementClassName);
if (ElementUtil.isEnumeration(type)) {
//nothing for now
} else if (ElementUtil.isElement(type)) {
modelClass.addImportedClass(elementPackageName + ".util." + elementClassName + "Util");
}
modelClass.addImportedClass("org.apache.commons.lang.StringUtils");
modelClass.addImportedClass("org.aries.ui.AbstractWizardPage");
modelClass.addImportedClass("java.io.Serializable");
modelClass.addImportedClass("javax.enterprise.context.SessionScoped");
//modelClass.addImportedClass("javax.inject.Inject");
modelClass.addImportedClass("javax.inject.Named");
}
/*
* Class Annotations
*/
protected void initializeClassAnnotations(ModelClass modelClass, Type element, String section) throws Exception {
String elementNameUncapped = ModelLayerHelper.getElementNameUncapped(element);
modelClass.getClassAnnotations().add(AnnotationUtil.createSessionScopedAnnotation());
modelClass.getClassAnnotations().add(AnnotationUtil.createNamedAnnotation(elementNameUncapped + section + "Section"));
}
/*
* Class Constructor(s)
*/
protected void initializeClassConstructors(ModelClass modelClass, Type element, String section) throws Exception {
ModelConstructor modelConstructor = createConstructor(element, section);
modelClass.addInstanceConstructor(modelConstructor);
}
protected ModelConstructor createConstructor(Type element, String section) {
String elementClassName = ModelLayerHelper.getElementClassName(element);
String elementNameUncapped = ModelLayerHelper.getElementNameUncapped(element);
String sectionUncapped = NameUtil.uncapName(section);
ModelConstructor modelConstructor = new ModelConstructor();
modelConstructor.setModifiers(Modifier.PUBLIC);
Buf buf = new Buf();
buf.putLine2("setName(\""+section+"\");");
buf.putLine2("setUrl(\""+sectionUncapped+"\");");
modelConstructor.addInitialSource(buf.get());
return modelConstructor;
}
/*
* Instance fields
*/
protected void initializeInstanceFields(ModelClass modelClass, Type element, String section) throws Exception {
modelClass.addInstanceReference(createReference_Element(element));
}
public ModelReference createReference_Element(Type element) {
String namespace = context.getModule().getNamespace();
String elementName = ModelLayerHelper.getElementNameUncapped(element);
String packageName = ProjectLevelHelper.getPackageName(namespace);
String className = ModelLayerHelper.getElementClassName(element);
ModelReference modelReference = new ModelReference();
modelReference.setModifiers(Modifier.PRIVATE);
modelReference.setPackageName(packageName);
modelReference.setClassName(className);
modelReference.setName(elementName);
modelReference.setGenerateGetter(true);
modelReference.setGenerateSetter(true);
return modelReference;
}
/*
* Instance operations
*/
protected void initializeInstanceOperations(ModelClass modelClass, Type element, String section) throws Exception {
modelClass.addInstanceOperation(createOperation_initialize(element, section));
modelClass.addInstanceOperation(createOperation_validate(element, section));
}
protected ModelOperation createOperation_initialize(Type element, String section) throws Exception {
String elementClassName = ModelLayerHelper.getElementClassName(element);
String elementNameUncapped = ModelLayerHelper.getElementNameUncapped(element);
//TODO this is "too crude" - externalize this logic for all buttons
boolean backEnabled = !section.equals("Identification") && !section.equals("Overview");
boolean populateVisible = section.equals("Identification");
boolean populateEnabled = populateVisible; //&& testModeEnabled;
ModelOperation modelOperation = new ModelOperation();
modelOperation.addAnnotation(AnnotationUtil.createOverrideAnnotation());
modelOperation.setModifiers(Modifier.PUBLIC);
modelOperation.setName("initialize");
modelOperation.addParameter(createParameter(elementClassName, elementNameUncapped));
Buf buf = new Buf();
buf.putLine2("set"+elementClassName+"("+elementNameUncapped+");");
buf.putLine2("setEnabled(true);");
buf.putLine2("setBackEnabled("+backEnabled+");");
buf.putLine2("setNextEnabled(true);");
buf.putLine2("setFinishEnabled(false);");
if (populateVisible)
buf.putLine2("setPopulateVisible(true);");
if (populateEnabled)
buf.putLine2("setPopulateEnabled(true);");
buf.putLine2("super.initialize("+elementNameUncapped+");");
modelOperation.addInitialSource(buf.get());
return modelOperation;
}
protected ModelOperation createOperation_validate(Type element, String section) throws Exception {
String elementClassName = ModelLayerHelper.getElementClassName(element);
String elementNameCapped = ModelLayerHelper.getElementNameCapped(element);
String elementNameUncapped = ModelLayerHelper.getElementNameUncapped(element);
ModelOperation modelOperation = new ModelOperation();
modelOperation.addAnnotation(AnnotationUtil.createOverrideAnnotation());
modelOperation.setModifiers(Modifier.PUBLIC);
modelOperation.setName("validate");
Buf buf = new Buf();
buf.putLine2("if ("+elementNameUncapped+" == null) {");
buf.putLine2(" validator.missing(\""+elementNameCapped+"\");");
buf.putLine2("} else {");
buf.putLine2("}");
modelOperation.addInitialSource(buf.get());
return modelOperation;
}
}
|
|
package com.education.mora_ice_12.i_jiffy;
import android.support.v7.app.ActionBarActivity;
import android.app.Activity;
import android.support.v7.app.ActionBar;
import android.support.v4.app.Fragment;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class NavigationDrawerFragment extends Fragment {
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
private boolean mFromSavedInstanceState;
private boolean mUserLearnedDrawer;
public NavigationDrawerFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
mFromSavedInstanceState = true;
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDrawerListView = (ListView) inflater.inflate(
R.layout.fragment_navigation_drawer, container, false);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setAdapter(new ArrayAdapter<String>(
getActionBar().getThemedContext(),
android.R.layout.simple_list_item_activated_1,
android.R.id.text1,
new String[]{
getString(R.string.title_section1),
getString(R.string.title_section2),
getString(R.string.title_section3),
getString(R.string.title_section4),
getString(R.string.title_section5),
getString(R.string.title_section6),
}));
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
return mDrawerListView;
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
};
// If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
mCurrentSelectedPosition = position;
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallbacks != null) {
mCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
if (item.getItemId() == R.id.action_example) {
Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return ((ActionBarActivity) getActivity()).getSupportActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
}
|
|
// Portions copyright 2002, Google, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.freshplanet.inapppurchase.utils;
// This code was converted from code at http://iharder.sourceforge.net/base64/
// Lots of extraneous features were removed.
/* The original code said:
* <p>
* I am placing this code in the Public Domain. Do with it as you will.
* This software comes with no guarantees or warranties but with
* plenty of well-wishing instead!
* Please visit
* <a href="http://iharder.net/xmlizable">http://iharder.net/xmlizable</a>
* periodically to check for updates or to contribute improvements.
* </p>
*
* @author Robert Harder
* @author rharder@usa.net
* @version 1.3
*/
/**
* Base64 converter class. This code is not a complete MIME encoder;
* it simply converts binary data to base64 data and back.
*
* <p>Note {@link CharBase64} is a GWT-compatible implementation of this
* class.
*/
public class Base64 {
/** Specify encoding (value is {@code true}). */
public final static boolean ENCODE = true;
/** Specify decoding (value is {@code false}). */
public final static boolean DECODE = false;
/** The equals sign (=) as a byte. */
private final static byte EQUALS_SIGN = (byte) '=';
/** The new line character (\n) as a byte. */
private final static byte NEW_LINE = (byte) '\n';
/**
* The 64 valid Base64 values.
*/
private final static byte[] ALPHABET =
{(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F',
(byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K',
(byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P',
(byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U',
(byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z',
(byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e',
(byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j',
(byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o',
(byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't',
(byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y',
(byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3',
(byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8',
(byte) '9', (byte) '+', (byte) '/'};
/**
* The 64 valid web safe Base64 values.
*/
private final static byte[] WEBSAFE_ALPHABET =
{(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F',
(byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K',
(byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P',
(byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U',
(byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z',
(byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e',
(byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j',
(byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o',
(byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't',
(byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y',
(byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3',
(byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8',
(byte) '9', (byte) '-', (byte) '_'};
/**
* Translates a Base64 value to either its 6-bit reconstruction value
* or a negative number indicating some other meaning.
**/
private final static byte[] DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8
-5, -5, // Whitespace: Tab and Linefeed
-9, -9, // Decimal 11 - 12
-5, // Whitespace: Carriage Return
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26
-9, -9, -9, -9, -9, // Decimal 27 - 31
-5, // Whitespace: Space
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42
62, // Plus sign at decimal 43
-9, -9, -9, // Decimal 44 - 46
63, // Slash at decimal 47
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine
-9, -9, -9, // Decimal 58 - 60
-1, // Equals sign at decimal 61
-9, -9, -9, // Decimal 62 - 64
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N'
14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z'
-9, -9, -9, -9, -9, -9, // Decimal 91 - 96
26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm'
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z'
-9, -9, -9, -9, -9 // Decimal 123 - 127
/* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */
};
/** The web safe decodabet */
private final static byte[] WEBSAFE_DECODABET =
{-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8
-5, -5, // Whitespace: Tab and Linefeed
-9, -9, // Decimal 11 - 12
-5, // Whitespace: Carriage Return
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26
-9, -9, -9, -9, -9, // Decimal 27 - 31
-5, // Whitespace: Space
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 44
62, // Dash '-' sign at decimal 45
-9, -9, // Decimal 46-47
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine
-9, -9, -9, // Decimal 58 - 60
-1, // Equals sign at decimal 61
-9, -9, -9, // Decimal 62 - 64
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N'
14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z'
-9, -9, -9, -9, // Decimal 91-94
63, // Underscore '_' at decimal 95
-9, // Decimal 96
26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm'
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z'
-9, -9, -9, -9, -9 // Decimal 123 - 127
/* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */
};
// Indicates white space in encoding
private final static byte WHITE_SPACE_ENC = -5;
// Indicates equals sign in encoding
private final static byte EQUALS_SIGN_ENC = -1;
/** Defeats instantiation. */
private Base64() {
}
/* ******** E N C O D I N G M E T H O D S ******** */
/**
* Encodes up to three bytes of the array <var>source</var>
* and writes the resulting four Base64 bytes to <var>destination</var>.
* The source and destination arrays can be manipulated
* anywhere along their length by specifying
* <var>srcOffset</var> and <var>destOffset</var>.
* This method does not check to make sure your arrays
* are large enough to accommodate <var>srcOffset</var> + 3 for
* the <var>source</var> array or <var>destOffset</var> + 4 for
* the <var>destination</var> array.
* The actual number of significant bytes in your array is
* given by <var>numSigBytes</var>.
*
* @param source the array to convert
* @param srcOffset the index where conversion begins
* @param numSigBytes the number of significant bytes in your array
* @param destination the array to hold the conversion
* @param destOffset the index where output will be put
* @param alphabet is the encoding alphabet
* @return the <var>destination</var> array
* @since 1.3
*/
private static byte[] encode3to4(byte[] source, int srcOffset,
int numSigBytes, byte[] destination, int destOffset, byte[] alphabet) {
// 1 2 3
// 01234567890123456789012345678901 Bit position
// --------000000001111111122222222 Array position from threeBytes
// --------| || || || | Six bit groups to index alphabet
// >>18 >>12 >> 6 >> 0 Right shift necessary
// 0x3f 0x3f 0x3f Additional AND
// Create buffer with zero-padding if there are only one or two
// significant bytes passed in the array.
// We have to shift left 24 in order to flush out the 1's that appear
// when Java treats a value as negative that is cast from a byte to an int.
int inBuff =
(numSigBytes > 0 ? ((source[srcOffset] << 24) >>> 8) : 0)
| (numSigBytes > 1 ? ((source[srcOffset + 1] << 24) >>> 16) : 0)
| (numSigBytes > 2 ? ((source[srcOffset + 2] << 24) >>> 24) : 0);
switch (numSigBytes) {
case 3:
destination[destOffset] = alphabet[(inBuff >>> 18)];
destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f];
destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f];
destination[destOffset + 3] = alphabet[(inBuff) & 0x3f];
return destination;
case 2:
destination[destOffset] = alphabet[(inBuff >>> 18)];
destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f];
destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f];
destination[destOffset + 3] = EQUALS_SIGN;
return destination;
case 1:
destination[destOffset] = alphabet[(inBuff >>> 18)];
destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f];
destination[destOffset + 2] = EQUALS_SIGN;
destination[destOffset + 3] = EQUALS_SIGN;
return destination;
default:
return destination;
} // end switch
} // end encode3to4
/**
* Encodes a byte array into Base64 notation.
* Equivalent to calling
* {@code encodeBytes(source, 0, source.length)}
*
* @param source The data to convert
* @since 1.4
*/
public static String encode(byte[] source) {
return encode(source, 0, source.length, ALPHABET, true);
}
/**
* Encodes a byte array into web safe Base64 notation.
*
* @param source The data to convert
* @param doPadding is {@code true} to pad result with '=' chars
* if it does not fall on 3 byte boundaries
*/
public static String encodeWebSafe(byte[] source, boolean doPadding) {
return encode(source, 0, source.length, WEBSAFE_ALPHABET, doPadding);
}
/**
* Encodes a byte array into Base64 notation.
*
* @param source the data to convert
* @param off offset in array where conversion should begin
* @param len length of data to convert
* @param alphabet the encoding alphabet
* @param doPadding is {@code true} to pad result with '=' chars
* if it does not fall on 3 byte boundaries
* @since 1.4
*/
public static String encode(byte[] source, int off, int len, byte[] alphabet,
boolean doPadding) {
byte[] outBuff = encode(source, off, len, alphabet, Integer.MAX_VALUE);
int outLen = outBuff.length;
// If doPadding is false, set length to truncate '='
// padding characters
while (doPadding == false && outLen > 0) {
if (outBuff[outLen - 1] != '=') {
break;
}
outLen -= 1;
}
return new String(outBuff, 0, outLen);
}
/**
* Encodes a byte array into Base64 notation.
*
* @param source the data to convert
* @param off offset in array where conversion should begin
* @param len length of data to convert
* @param alphabet is the encoding alphabet
* @param maxLineLength maximum length of one line.
* @return the BASE64-encoded byte array
*/
public static byte[] encode(byte[] source, int off, int len, byte[] alphabet,
int maxLineLength) {
int lenDiv3 = (len + 2) / 3; // ceil(len / 3)
int len43 = lenDiv3 * 4;
byte[] outBuff = new byte[len43 // Main 4:3
+ (len43 / maxLineLength)]; // New lines
int d = 0;
int e = 0;
int len2 = len - 2;
int lineLength = 0;
for (; d < len2; d += 3, e += 4) {
// The following block of code is the same as
// encode3to4( source, d + off, 3, outBuff, e, alphabet );
// but inlined for faster encoding (~20% improvement)
int inBuff =
((source[d + off] << 24) >>> 8)
| ((source[d + 1 + off] << 24) >>> 16)
| ((source[d + 2 + off] << 24) >>> 24);
outBuff[e] = alphabet[(inBuff >>> 18)];
outBuff[e + 1] = alphabet[(inBuff >>> 12) & 0x3f];
outBuff[e + 2] = alphabet[(inBuff >>> 6) & 0x3f];
outBuff[e + 3] = alphabet[(inBuff) & 0x3f];
lineLength += 4;
if (lineLength == maxLineLength) {
outBuff[e + 4] = NEW_LINE;
e++;
lineLength = 0;
} // end if: end of line
} // end for: each piece of array
if (d < len) {
encode3to4(source, d + off, len - d, outBuff, e, alphabet);
lineLength += 4;
if (lineLength == maxLineLength) {
// Add a last newline
outBuff[e + 4] = NEW_LINE;
e++;
}
e += 4;
}
assert (e == outBuff.length);
return outBuff;
}
/* ******** D E C O D I N G M E T H O D S ******** */
/**
* Decodes four bytes from array <var>source</var>
* and writes the resulting bytes (up to three of them)
* to <var>destination</var>.
* The source and destination arrays can be manipulated
* anywhere along their length by specifying
* <var>srcOffset</var> and <var>destOffset</var>.
* This method does not check to make sure your arrays
* are large enough to accommodate <var>srcOffset</var> + 4 for
* the <var>source</var> array or <var>destOffset</var> + 3 for
* the <var>destination</var> array.
* This method returns the actual number of bytes that
* were converted from the Base64 encoding.
*
*
* @param source the array to convert
* @param srcOffset the index where conversion begins
* @param destination the array to hold the conversion
* @param destOffset the index where output will be put
* @param decodabet the decodabet for decoding Base64 content
* @return the number of decoded bytes converted
* @since 1.3
*/
private static int decode4to3(byte[] source, int srcOffset,
byte[] destination, int destOffset, byte[] decodabet) {
// Example: Dk==
if (source[srcOffset + 2] == EQUALS_SIGN) {
int outBuff =
((decodabet[source[srcOffset]] << 24) >>> 6)
| ((decodabet[source[srcOffset + 1]] << 24) >>> 12);
destination[destOffset] = (byte) (outBuff >>> 16);
return 1;
} else if (source[srcOffset + 3] == EQUALS_SIGN) {
// Example: DkL=
int outBuff =
((decodabet[source[srcOffset]] << 24) >>> 6)
| ((decodabet[source[srcOffset + 1]] << 24) >>> 12)
| ((decodabet[source[srcOffset + 2]] << 24) >>> 18);
destination[destOffset] = (byte) (outBuff >>> 16);
destination[destOffset + 1] = (byte) (outBuff >>> 8);
return 2;
} else {
// Example: DkLE
int outBuff =
((decodabet[source[srcOffset]] << 24) >>> 6)
| ((decodabet[source[srcOffset + 1]] << 24) >>> 12)
| ((decodabet[source[srcOffset + 2]] << 24) >>> 18)
| ((decodabet[source[srcOffset + 3]] << 24) >>> 24);
destination[destOffset] = (byte) (outBuff >> 16);
destination[destOffset + 1] = (byte) (outBuff >> 8);
destination[destOffset + 2] = (byte) (outBuff);
return 3;
}
} // end decodeToBytes
/**
* Decodes data from Base64 notation.
*
* @param s the string to decode (decoded in default encoding)
* @return the decoded data
* @since 1.4
*/
public static byte[] decode(String s) throws Base64DecoderException {
byte[] bytes = s.getBytes();
return decode(bytes, 0, bytes.length);
}
/**
* Decodes data from web safe Base64 notation.
* Web safe encoding uses '-' instead of '+', '_' instead of '/'
*
* @param s the string to decode (decoded in default encoding)
* @return the decoded data
*/
public static byte[] decodeWebSafe(String s) throws Base64DecoderException {
byte[] bytes = s.getBytes();
return decodeWebSafe(bytes, 0, bytes.length);
}
/**
* Decodes Base64 content in byte array format and returns
* the decoded byte array.
*
* @param source The Base64 encoded data
* @return decoded data
* @since 1.3
* @throws Base64DecoderException
*/
public static byte[] decode(byte[] source) throws Base64DecoderException {
return decode(source, 0, source.length);
}
/**
* Decodes web safe Base64 content in byte array format and returns
* the decoded data.
* Web safe encoding uses '-' instead of '+', '_' instead of '/'
*
* @param source the string to decode (decoded in default encoding)
* @return the decoded data
*/
public static byte[] decodeWebSafe(byte[] source)
throws Base64DecoderException {
return decodeWebSafe(source, 0, source.length);
}
/**
* Decodes Base64 content in byte array format and returns
* the decoded byte array.
*
* @param source the Base64 encoded data
* @param off the offset of where to begin decoding
* @param len the length of characters to decode
* @return decoded data
* @since 1.3
* @throws Base64DecoderException
*/
public static byte[] decode(byte[] source, int off, int len)
throws Base64DecoderException {
return decode(source, off, len, DECODABET);
}
/**
* Decodes web safe Base64 content in byte array format and returns
* the decoded byte array.
* Web safe encoding uses '-' instead of '+', '_' instead of '/'
*
* @param source the Base64 encoded data
* @param off the offset of where to begin decoding
* @param len the length of characters to decode
* @return decoded data
*/
public static byte[] decodeWebSafe(byte[] source, int off, int len)
throws Base64DecoderException {
return decode(source, off, len, WEBSAFE_DECODABET);
}
/**
* Decodes Base64 content using the supplied decodabet and returns
* the decoded byte array.
*
* @param source the Base64 encoded data
* @param off the offset of where to begin decoding
* @param len the length of characters to decode
* @param decodabet the decodabet for decoding Base64 content
* @return decoded data
*/
public static byte[] decode(byte[] source, int off, int len, byte[] decodabet)
throws Base64DecoderException {
int len34 = len * 3 / 4;
byte[] outBuff = new byte[2 + len34]; // Upper limit on size of output
int outBuffPosn = 0;
byte[] b4 = new byte[4];
int b4Posn = 0;
int i = 0;
byte sbiCrop = 0;
byte sbiDecode = 0;
for (i = 0; i < len; i++) {
sbiCrop = (byte) (source[i + off] & 0x7f); // Only the low seven bits
sbiDecode = decodabet[sbiCrop];
if (sbiDecode >= WHITE_SPACE_ENC) { // White space Equals sign or better
if (sbiDecode >= EQUALS_SIGN_ENC) {
// An equals sign (for padding) must not occur at position 0 or 1
// and must be the last byte[s] in the encoded value
if (sbiCrop == EQUALS_SIGN) {
int bytesLeft = len - i;
byte lastByte = (byte) (source[len - 1 + off] & 0x7f);
if (b4Posn == 0 || b4Posn == 1) {
throw new Base64DecoderException(
"invalid padding byte '=' at byte offset " + i);
} else if ((b4Posn == 3 && bytesLeft > 2)
|| (b4Posn == 4 && bytesLeft > 1)) {
throw new Base64DecoderException(
"padding byte '=' falsely signals end of encoded value "
+ "at offset " + i);
} else if (lastByte != EQUALS_SIGN && lastByte != NEW_LINE) {
throw new Base64DecoderException(
"encoded value has invalid trailing byte");
}
break;
}
b4[b4Posn++] = sbiCrop;
if (b4Posn == 4) {
outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet);
b4Posn = 0;
}
}
} else {
throw new Base64DecoderException("Bad Base64 input character at " + i
+ ": " + source[i + off] + "(decimal)");
}
}
// Because web safe encoding allows non padding base64 encodes, we
// need to pad the rest of the b4 buffer with equal signs when
// b4Posn != 0. There can be at most 2 equal signs at the end of
// four characters, so the b4 buffer must have two or three
// characters. This also catches the case where the input is
// padded with EQUALS_SIGN
if (b4Posn != 0) {
if (b4Posn == 1) {
throw new Base64DecoderException("single trailing character at offset "
+ (len - 1));
}
b4[b4Posn++] = EQUALS_SIGN;
outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet);
}
byte[] out = new byte[outBuffPosn];
System.arraycopy(outBuff, 0, out, 0, outBuffPosn);
return out;
}
}
|
|
package com.primeradiants.oniri.test.user;
import javax.servlet.Filter;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Restrictions;
import org.json.JSONObject;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.test.web.servlet.ResultMatcher;
import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import com.primeradiants.hibernate.util.HibernateUtil;
import com.primeradiants.oniri.config.ApplicationConfig;
import com.primeradiants.oniri.test.novent.NoventTestUtil;
import com.primeradiants.oniri.user.UserEntity;
@RunWith(SpringJUnit4ClassRunner.class)
@WebAppConfiguration
@ContextConfiguration(classes = ApplicationConfig.class)
public class AllSignUpRestControllerTest {
@Autowired
private WebApplicationContext webApplicationContext;
@Autowired
private Filter springSecurityFilterChain;
private MockMvc mockMvc;
private static final String USERNAME = "username";
private static final String EMAIL = "email";
private static final String PASSWORD = "password";
private static final String EMPTY_STRING = "";
private static final String VALID_USERNAME = "gbiaux";
private static final String INVALID_USERNAME_WITH_SPACE = "gabit bol";
private static final String INVALID_USERNAME_TOO_SHORT = "ga";
private static final String INVALID_USERNAME_TOO_LONG = "gabitbolgabitbolgabitbolgabitbolgaba";
private static final String VALID_PASSWORD = "abcd123A";
private static final String INVALID_PASSWORD_MISSING_DIGIT = "abcdadsA";
private static final String INVALID_PASSWORD_MISSING_UPPERCASE = "abcdads1";
private static final String INVALID_PASSWORD_MISSING_LOWERCASE = "ABCDEFG1";
private static final String INVALID_PASSWORD_TOO_SHORT = "A1a";
private static final String INVALID_PASSWORD_WITH_SPACE = "ABCDa FG1";
private static final String VALID_EMAIL = "test@prime-radiants.com";
private static final String INVALID_EMAIL_MISSING_AT = "georges.biauxprime-radiants.com";
private static final String INVALID_EMAIL_MISSING_DOMAIN = "georges.biaux@.com";
private static final String INVALID_EMAIL_MISSING_EXT = "georges.biaux@prime-radiantscom";
private static final String INVALID_EMAIL_MISSING_LOCAL = "@prime-radiants.com";
private static final String INVALID_EMAIL_DOT_BEFORE_AT = "georges.@prime-radiants.com";
private static final String INVALID_EMAIL_TWO_DOTS = "georges..biaux@prime-radiants.com";
@Before
public void initEachTest() {
UserTestUtil.cleanEmailValidationTokenTable();
NoventTestUtil.cleanUserNoventTable();
UserTestUtil.cleanUserTable();
NoventTestUtil.cleanNoventTable();
UserTestUtil.insertUserInDatabase(UserTestData.USER_USERNAME, UserTestData.USER_EMAIL, UserTestData.USER_PASSWORD, true, false);
this.mockMvc = MockMvcBuilders
.webAppContextSetup(this.webApplicationContext)
.addFilters(springSecurityFilterChain)
.build();
}
@Test
public void SignUpReturns302WhenNotSecured() throws Exception {
ResultMatcher redirection = MockMvcResultMatchers.status().is3xxRedirection();
JSONObject request = new JSONObject();
request.put(USERNAME, VALID_USERNAME);
request.put(PASSWORD, VALID_PASSWORD);
request.put(EMAIL, VALID_EMAIL);
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.post("/signUp")
.contentType(MediaType.APPLICATION_JSON_UTF8)
.content(request.toString())
.secure(false);
this.mockMvc.perform(builder)
.andExpect(redirection);
}
@Test
public void SignUpReturnsOkWithValidArguments() throws Exception {
ResultMatcher ok = MockMvcResultMatchers.status().isOk();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(ok);
}
@Test
public void SignUpCreatesDesabledUserInDatabase() throws Exception {
sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, VALID_EMAIL);
SessionFactory sessionFactory = HibernateUtil.getSessionAnnotationFactory();
Session session = sessionFactory.openSession();
session.beginTransaction();
Criteria criteria = session.createCriteria(UserEntity.class)
.add(Restrictions.eq(USERNAME, VALID_USERNAME))
.setMaxResults(1);
UserEntity user = (UserEntity) criteria.uniqueResult();
session.getTransaction().commit();
session.close();
Assert.assertNotNull(user);
Assert.assertFalse(user.getEnabled());
}
@Test
public void SignUpReturns400WithAlreadyExistingUsername() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(UserTestData.USER_USERNAME, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmptyUsername() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(EMPTY_STRING, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithNullUsername() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(null, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithUsernameWithSpace() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(INVALID_USERNAME_WITH_SPACE, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithTooShortUsername() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(INVALID_USERNAME_TOO_SHORT, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithTooLongUsername() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(INVALID_USERNAME_TOO_LONG, VALID_PASSWORD, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithAlreadyEmptyPassword() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, EMPTY_STRING, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithAlreadyNullPassword() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, null, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithPasswordWithMissingDigits() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, INVALID_PASSWORD_MISSING_DIGIT, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithPasswordWithMissingUppercase() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, INVALID_PASSWORD_MISSING_UPPERCASE, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithPasswordWithMissingLowercase() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, INVALID_PASSWORD_MISSING_LOWERCASE, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithTooShortPassword() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, INVALID_PASSWORD_TOO_SHORT, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithPasswordWithSpace() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, INVALID_PASSWORD_WITH_SPACE, VALID_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithAlreadyExistingEmail() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, UserTestData.USER_EMAIL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmptyEmail() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, EMPTY_STRING);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithNullEmail() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, null);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmailWithMissingAt() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, INVALID_EMAIL_MISSING_AT);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmailWithMissingDomain() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, INVALID_EMAIL_MISSING_DOMAIN);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmailWithMissingExtension() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, INVALID_EMAIL_MISSING_EXT);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmailWithMissingLocal() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, INVALID_EMAIL_MISSING_LOCAL);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmailWithDotBeforeAt() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, INVALID_EMAIL_DOT_BEFORE_AT);
result.andExpect(badRequest);
}
@Test
public void SignUpReturns400WithEmailWithTwoDots() throws Exception {
ResultMatcher badRequest = MockMvcResultMatchers.status().isBadRequest();
ResultActions result = sendSignUpRequest(VALID_USERNAME, VALID_PASSWORD, INVALID_EMAIL_TWO_DOTS);
result.andExpect(badRequest);
}
@AfterClass
public static void endingAllTests() {
NoventTestUtil.cleanUserNoventTable();
UserTestUtil.cleanUserTable();
NoventTestUtil.cleanNoventTable();
}
private ResultActions sendSignUpRequest(String username, String email, String password) throws Exception {
JSONObject request = new JSONObject();
request.put(USERNAME, username);
request.put(PASSWORD, email);
request.put(EMAIL, password);
MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.post("/signUp")
.contentType(MediaType.APPLICATION_JSON_UTF8)
.content(request.toString())
.secure(true);
return this.mockMvc.perform(builder);
}
}
|
|
/*
* Encog(tm) Core v3.2 - Java Version
* http://www.heatonresearch.com/encog/
* https://github.com/encog/encog-java-core
* Copyright 2008-2013 Heaton Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package org.encog.neural.networks.training.pnn;
import org.encog.Encog;
import org.encog.util.EngineArray;
import org.encog.util.logging.EncogLogging;
/**
* This class determines optimal values for multiple sigmas in a PNN kernel.
* This is done using a CJ (conjugate gradient) method.
* <p/>
* <
* p/>
* Some of the algorithms in this class are based on C++ code from:
* <p/>
* Advanced Algorithms for Neural Networks: A C++ Sourcebook by Timothy Masters
* John Wiley & Sons Inc (Computers); April 3, 1995 ISBN: 0471105880
*/
public class DeriveMinimum {
/**
* Derive the minimum, using a conjugate gradient method.
* <p/>
* @param maxIterations
* The max iterations.
* @param maxError
* Stop at this error rate.
* @param eps
* The machine's precision.
* @param tol
* The convergence tolerance.
* @param network
* The network to get the error from.
* @param n
* The number of variables.
* @param x
* The independent variable.
* @param ystart
* The start for y.
* @param base
* Work vector, must have n elements.
* @param direc
* Work vector, must have n elements.
* @param g
* Work vector, must have n elements.
* @param h
* Work vector, must have n elements.
* @param deriv2
* Work vector, must have n elements.
* <p/>
* @return The best error.
*/
public double calculate(final int maxIterations, final double maxError,
final double eps, final double tol,
final CalculationCriteria network,
final int n, final double[] x, final double ystart,
final double[] base, final double[] direc,
final double[] g,
final double[] h, final double[] deriv2) {
double prevBest, toler, gam, improvement;
final GlobalMinimumSearch globalMinimum = new GlobalMinimumSearch();
double fbest = network
.calcErrorWithMultipleSigma(x, direc, deriv2, true);
prevBest = 1.e30;
for (int i = 0; i < n; i++) {
direc[i] = -direc[i];
}
EngineArray.arrayCopy(direc, g);
EngineArray.arrayCopy(direc, h);
int convergenceCounter = 0;
int poorCJ = 0;
// Main loop
for (int iteration = 0; iteration < maxIterations; iteration++) {
if (fbest < maxError) {
break;
}
EncogLogging.log(EncogLogging.LEVEL_INFO,
"Beginning internal Iteration #" + iteration +
", currentError=" + fbest + ",target=" + maxError);
// Check for convergence
if (prevBest <= 1.0) {
toler = tol;
} else {
toler = tol * prevBest;
}
// Stop if there is little improvement
if ((prevBest - fbest) <= toler) {
if (++convergenceCounter >= 3) {
break;
}
} else {
convergenceCounter = 0;
}
double dot1 = 0;
double dot2 = 0;
double dlen = 0;
dot1 = dot2 = dlen = 0.0;
double high = 1.e-4;
for (int i = 0; i < n; i++) {
base[i] = x[i];
if (deriv2[i] > high) {
high = deriv2[i];
}
dot1 += direc[i] * g[i]; // Directional first derivative
dot2 += direc[i] * direc[i] * deriv2[i]; // and second
dlen += direc[i] * direc[i]; // Length of search vector
}
dlen = Math.sqrt(dlen);
double scale;
if (Math.abs(dot2) < Encog.DEFAULT_DOUBLE_EQUAL) {
scale = 0;
} else {
scale = dot1 / dot2;
}
high = 1.5 / high;
if (high < 1.e-4) {
high = 1.e-4;
}
if (scale < 0.0) {
scale = high;
} else if (scale < 0.1 * high) {
scale = 0.1 * high;
} else if (scale > 10.0 * high) {
scale = 10.0 * high;
}
prevBest = fbest;
globalMinimum.setY2(fbest);
globalMinimum.findBestRange(0.0, 2.0 * scale, -3, false, maxError,
network);
if (globalMinimum.getY2() < maxError) {
if (globalMinimum.getY2() < fbest) {
for (int i = 0; i < n; i++) {
x[i] = base[i] + globalMinimum.getY2() * direc[i];
if (x[i] < 1.e-10) {
x[i] = 1.e-10;
}
}
fbest = globalMinimum.getY2();
} else {
System.arraycopy(base, 0, x, 0, n);
}
break;
}
if (convergenceCounter > 0) {
fbest = globalMinimum.brentmin(20, maxError, eps, 1.e-7,
network, globalMinimum.getY2());
} else {
fbest = globalMinimum.brentmin(10, maxError, 1.e-6, 1.e-5,
network, globalMinimum.getY2());
}
for (int i = 0; i < n; i++) {
x[i] = base[i] + globalMinimum.getX2() * direc[i];
if (x[i] < 1.e-10) {
x[i] = 1.e-10;
}
}
improvement = (prevBest - fbest) / prevBest;
if (fbest < maxError) {
break;
}
for (int i = 0; i < n; i++) {
direc[i] = -direc[i]; // negative gradient
}
gam = gamma(n, g, direc);
if (gam < 0.0) {
gam = 0.0;
}
if (gam > 10.0) {
gam = 10.0;
}
if (improvement < 0.001) {
++poorCJ;
} else {
poorCJ = 0;
}
if (poorCJ >= 2) {
if (gam > 1.0) {
gam = 1.0;
}
}
if (poorCJ >= 6) {
poorCJ = 0;
gam = 0.0;
}
findNewDir(n, gam, g, h, direc);
}
return fbest;
}
/**
* Find gamma.
* <p/>
* @param n
* The number of variables.
* @param gam
* The gamma value.
* @param g
* The "g" value, used for CJ algorithm.
* @param h
* The "h" value, used for CJ algorithm.
* @param grad
* The gradients.
*/
private void findNewDir(final int n, final double gam, final double[] g,
final double[] h, final double[] grad) {
int i;
System.arraycopy(grad, 0, g, 0, n);
for (i = 0; i < n; i++) {
grad[i] = h[i] = g[i] + gam * h[i];
}
}
/**
* Find correction for next iteration.
* <p/>
* @param n
* The number of variables.
* @param g
* The "g" value, used for CJ algorithm.
* @param grad
* The gradients.
* <p/>
* @return The correction for the next iteration.
*/
private double gamma(final int n, final double[] g, final double[] grad) {
int i;
double denom, numer;
numer = denom = 0.0;
for (i = 0; i < n; i++) {
denom += g[i] * g[i];
numer += (grad[i] - g[i]) * grad[i]; // Grad is neg gradient
}
if (denom == 0.0) {
return 0.0;
} else {
return numer / denom;
}
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/common/criteria.proto
package com.google.ads.googleads.v8.common;
/**
* <pre>
* A custom affinity criterion.
* A criterion of this type is only targetable.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.common.CustomAffinityInfo}
*/
public final class CustomAffinityInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.common.CustomAffinityInfo)
CustomAffinityInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use CustomAffinityInfo.newBuilder() to construct.
private CustomAffinityInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CustomAffinityInfo() {
customAffinity_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CustomAffinityInfo();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CustomAffinityInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
customAffinity_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CustomAffinityInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CustomAffinityInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.common.CustomAffinityInfo.class, com.google.ads.googleads.v8.common.CustomAffinityInfo.Builder.class);
}
private int bitField0_;
public static final int CUSTOM_AFFINITY_FIELD_NUMBER = 2;
private volatile java.lang.Object customAffinity_;
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return Whether the customAffinity field is set.
*/
@java.lang.Override
public boolean hasCustomAffinity() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return The customAffinity.
*/
@java.lang.Override
public java.lang.String getCustomAffinity() {
java.lang.Object ref = customAffinity_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customAffinity_ = s;
return s;
}
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return The bytes for customAffinity.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomAffinityBytes() {
java.lang.Object ref = customAffinity_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customAffinity_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, customAffinity_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, customAffinity_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.common.CustomAffinityInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.common.CustomAffinityInfo other = (com.google.ads.googleads.v8.common.CustomAffinityInfo) obj;
if (hasCustomAffinity() != other.hasCustomAffinity()) return false;
if (hasCustomAffinity()) {
if (!getCustomAffinity()
.equals(other.getCustomAffinity())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCustomAffinity()) {
hash = (37 * hash) + CUSTOM_AFFINITY_FIELD_NUMBER;
hash = (53 * hash) + getCustomAffinity().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.common.CustomAffinityInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A custom affinity criterion.
* A criterion of this type is only targetable.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.common.CustomAffinityInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.common.CustomAffinityInfo)
com.google.ads.googleads.v8.common.CustomAffinityInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CustomAffinityInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CustomAffinityInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.common.CustomAffinityInfo.class, com.google.ads.googleads.v8.common.CustomAffinityInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v8.common.CustomAffinityInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
customAffinity_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CustomAffinityInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CustomAffinityInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v8.common.CustomAffinityInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CustomAffinityInfo build() {
com.google.ads.googleads.v8.common.CustomAffinityInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CustomAffinityInfo buildPartial() {
com.google.ads.googleads.v8.common.CustomAffinityInfo result = new com.google.ads.googleads.v8.common.CustomAffinityInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.customAffinity_ = customAffinity_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.common.CustomAffinityInfo) {
return mergeFrom((com.google.ads.googleads.v8.common.CustomAffinityInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.common.CustomAffinityInfo other) {
if (other == com.google.ads.googleads.v8.common.CustomAffinityInfo.getDefaultInstance()) return this;
if (other.hasCustomAffinity()) {
bitField0_ |= 0x00000001;
customAffinity_ = other.customAffinity_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.common.CustomAffinityInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.common.CustomAffinityInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object customAffinity_ = "";
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return Whether the customAffinity field is set.
*/
public boolean hasCustomAffinity() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return The customAffinity.
*/
public java.lang.String getCustomAffinity() {
java.lang.Object ref = customAffinity_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customAffinity_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return The bytes for customAffinity.
*/
public com.google.protobuf.ByteString
getCustomAffinityBytes() {
java.lang.Object ref = customAffinity_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customAffinity_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @param value The customAffinity to set.
* @return This builder for chaining.
*/
public Builder setCustomAffinity(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
customAffinity_ = value;
onChanged();
return this;
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @return This builder for chaining.
*/
public Builder clearCustomAffinity() {
bitField0_ = (bitField0_ & ~0x00000001);
customAffinity_ = getDefaultInstance().getCustomAffinity();
onChanged();
return this;
}
/**
* <pre>
* The CustomInterest resource name.
* </pre>
*
* <code>optional string custom_affinity = 2;</code>
* @param value The bytes for customAffinity to set.
* @return This builder for chaining.
*/
public Builder setCustomAffinityBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
customAffinity_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.common.CustomAffinityInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.common.CustomAffinityInfo)
private static final com.google.ads.googleads.v8.common.CustomAffinityInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.common.CustomAffinityInfo();
}
public static com.google.ads.googleads.v8.common.CustomAffinityInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CustomAffinityInfo>
PARSER = new com.google.protobuf.AbstractParser<CustomAffinityInfo>() {
@java.lang.Override
public CustomAffinityInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CustomAffinityInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CustomAffinityInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CustomAffinityInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CustomAffinityInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.dom;
import org.apache.batik.dom.util.DOMUtilities;
import org.w3c.dom.Attr;
import org.w3c.dom.DOMException;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.TypeInfo;
import org.w3c.dom.events.MutationEvent;
/**
* This class implements the {@link org.w3c.dom.Attr} interface.
*
* @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a>
* @version $Id$
*/
public abstract class AbstractAttr extends AbstractParentNode implements Attr {
/**
* The name of this node.
*/
protected String nodeName;
/**
* Whether this attribute was not specified in the original document.
*/
protected boolean unspecified;
/**
* Whether this attribute is an ID attribute
*/
protected boolean isIdAttr;
/**
* The owner element.
*/
protected AbstractElement ownerElement;
/**
* The attribute type information.
*/
protected TypeInfo typeInfo;
/**
* Creates a new Attr object.
*/
protected AbstractAttr() {
}
/**
* Creates a new Attr object.
* @param name The attribute name for validation purposes.
* @param owner The owner document.
* @exception DOMException
* INVALID_CHARACTER_ERR: if name contains invalid characters,
*/
protected AbstractAttr(String name, AbstractDocument owner)
throws DOMException {
ownerDocument = owner;
if (owner.getStrictErrorChecking() && !DOMUtilities.isValidName(name)) {
throw createDOMException(DOMException.INVALID_CHARACTER_ERR,
"xml.name",
new Object[] { name });
}
}
/**
* Sets the node name.
*/
public void setNodeName(String v) {
nodeName = v;
isIdAttr = ownerDocument.isId(this);
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Node#getNodeName()}.
* @return {@link #nodeName}.
*/
public String getNodeName() {
return nodeName;
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Node#getNodeType()}.
* @return {@link org.w3c.dom.Node#ATTRIBUTE_NODE}
*/
public short getNodeType() {
return ATTRIBUTE_NODE;
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Node#getNodeValue()}.
* @return The content of the attribute.
*/
public String getNodeValue() throws DOMException {
Node first = getFirstChild();
if (first == null) {
return "";
}
Node n = first.getNextSibling();
if (n == null) {
return first.getNodeValue();
}
StringBuffer result = new StringBuffer(first.getNodeValue());
do {
result.append(n.getNodeValue());
n = n.getNextSibling();
} while (n != null);
return result.toString();
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Node#setNodeValue(String)}.
*/
public void setNodeValue(String nodeValue) throws DOMException {
if (isReadonly()) {
throw createDOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR,
"readonly.node",
new Object[] { new Integer(getNodeType()),
getNodeName() });
}
String s = getNodeValue();
// Remove all the children
Node n;
while ((n = getFirstChild()) != null) {
removeChild(n);
}
String val = (nodeValue == null) ? "" : nodeValue;
// Create and append a new child.
n = getOwnerDocument().createTextNode(val);
appendChild(n);
if (ownerElement != null) {
ownerElement.fireDOMAttrModifiedEvent(nodeName,
this,
s,
val,
MutationEvent.MODIFICATION);
}
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#getName()}.
* @return {@link #getNodeName()}.
*/
public String getName() {
return getNodeName();
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#getSpecified()}.
* @return !{@link #unspecified}.
*/
public boolean getSpecified() {
return !unspecified;
}
/**
* Sets the specified attribute.
*/
public void setSpecified(boolean v) {
unspecified = !v;
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#getValue()}.
* @return {@link #getNodeValue()}.
*/
public String getValue() {
return getNodeValue();
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#setValue(String)}.
*/
public void setValue(String value) throws DOMException {
setNodeValue(value);
}
/**
* Sets the owner element.
*/
public void setOwnerElement(AbstractElement v) {
ownerElement = v;
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#getOwnerElement()}.
*/
public Element getOwnerElement() {
return ownerElement;
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#getSchemaTypeInfo()}.
*/
public TypeInfo getSchemaTypeInfo() {
if (typeInfo == null) {
typeInfo = new AttrTypeInfo();
}
return typeInfo;
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Attr#isId()}.
*/
public boolean isId() {
return isIdAttr;
}
/**
* Sets whether this attribute is an ID attribute.
*/
public void setIsId(boolean isId) {
isIdAttr = isId;
}
/**
* Called when a child node has been added.
*/
protected void nodeAdded(Node n) {
setSpecified(true);
}
/**
* Called when a child node is going to be removed.
*/
protected void nodeToBeRemoved(Node n) {
setSpecified(true);
}
/**
* Exports this node to the given document.
*/
protected Node export(Node n, AbstractDocument d) {
super.export(n, d);
AbstractAttr aa = (AbstractAttr)n;
aa.nodeName = nodeName;
aa.unspecified = false;
aa.isIdAttr = d.isId(aa);
return n;
}
/**
* Deeply exports this node to the given document.
*/
protected Node deepExport(Node n, AbstractDocument d) {
super.deepExport(n, d);
AbstractAttr aa = (AbstractAttr)n;
aa.nodeName = nodeName;
aa.unspecified = false;
aa.isIdAttr = d.isId(aa);
return n;
}
/**
* Copy the fields of the current node into the given node.
* @param n a node of the type of this.
*/
protected Node copyInto(Node n) {
super.copyInto(n);
AbstractAttr aa = (AbstractAttr)n;
aa.nodeName = nodeName;
aa.unspecified = unspecified;
aa.isIdAttr = isIdAttr;
return n;
}
/**
* Deeply copy the fields of the current node into the given node.
* @param n a node of the type of this.
*/
protected Node deepCopyInto(Node n) {
super.deepCopyInto(n);
AbstractAttr aa = (AbstractAttr)n;
aa.nodeName = nodeName;
aa.unspecified = unspecified;
aa.isIdAttr = isIdAttr;
return n;
}
/**
* Checks the validity of a node to be inserted.
*/
protected void checkChildType(Node n, boolean replace) {
switch (n.getNodeType()) {
case TEXT_NODE:
case ENTITY_REFERENCE_NODE:
case DOCUMENT_FRAGMENT_NODE:
break;
default:
throw createDOMException
(DOMException.HIERARCHY_REQUEST_ERR,
"child.type",
new Object[] { new Integer(getNodeType()),
getNodeName(),
new Integer(n.getNodeType()),
n.getNodeName() });
}
}
/**
* Fires a DOMSubtreeModified event.
*/
protected void fireDOMSubtreeModifiedEvent() {
AbstractDocument doc = getCurrentDocument();
if (doc.getEventsEnabled()) {
super.fireDOMSubtreeModifiedEvent();
if (getOwnerElement() != null) {
((AbstractElement)getOwnerElement()).
fireDOMSubtreeModifiedEvent();
}
}
}
/**
* Inner class to hold type information about this attribute.
*/
public class AttrTypeInfo implements TypeInfo {
/**
* Type namespace.
*/
public String getTypeNamespace() {
return null;
}
/**
* Type name.
*/
public String getTypeName() {
return null;
}
/**
* Returns whether this type derives from the given type.
*/
public boolean isDerivedFrom(String ns, String name, int method) {
return false;
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.queryparser.classic;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.Version;
import org.apache.lucene.util.automaton.RegExp;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.support.QueryParsers;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
/**
* A query parser that uses the {@link MapperService} in order to build smarter
* queries based on the mapping information.
* <p>
* Also breaks fields with [type].[name] into a boolean query that must include the type
* as well as the query on the name.
*/
public class MapperQueryParser extends QueryParser {
public static final ImmutableMap<String, FieldQueryExtension> fieldQueryExtensions;
static {
fieldQueryExtensions = ImmutableMap.<String, FieldQueryExtension>builder()
.put(ExistsFieldQueryExtension.NAME, new ExistsFieldQueryExtension())
.put(MissingFieldQueryExtension.NAME, new MissingFieldQueryExtension())
.build();
}
private final QueryParseContext parseContext;
private QueryParserSettings settings;
private Analyzer quoteAnalyzer;
private boolean forcedAnalyzer;
private boolean forcedQuoteAnalyzer;
private MappedFieldType currentFieldType;
private boolean analyzeWildcard;
private String quoteFieldSuffix;
public MapperQueryParser(QueryParseContext parseContext) {
super(null, null);
this.parseContext = parseContext;
}
public void reset(QueryParserSettings settings) {
this.settings = settings;
this.field = settings.defaultField();
if (settings.fields() != null) {
if (settings.fields.size() == 1) {
// just mark it as the default field
this.field = settings.fields().get(0);
} else {
// otherwise, we need to have the default field being null...
this.field = null;
}
}
this.forcedAnalyzer = settings.forcedAnalyzer() != null;
this.setAnalyzer(forcedAnalyzer ? settings.forcedAnalyzer() : settings.defaultAnalyzer());
if (settings.forcedQuoteAnalyzer() != null) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedQuoteAnalyzer();
} else if (forcedAnalyzer) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedAnalyzer();
} else {
this.forcedAnalyzer = false;
this.quoteAnalyzer = settings.defaultQuoteAnalyzer();
}
this.quoteFieldSuffix = settings.quoteFieldSuffix();
setMultiTermRewriteMethod(settings.rewriteMethod());
setEnablePositionIncrements(settings.enablePositionIncrements());
setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries());
setMaxDeterminizedStates(settings.maxDeterminizedStates());
setAllowLeadingWildcard(settings.allowLeadingWildcard());
setLowercaseExpandedTerms(settings.lowercaseExpandedTerms());
setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator());
setFuzzyMinSim(settings.getFuzziness().asFloat());
setFuzzyPrefixLength(settings.fuzzyPrefixLength());
setLocale(settings.locale());
this.analyzeWildcard = settings.analyzeWildcard();
}
/**
* We override this one so we can get the fuzzy part to be treated as string, so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
*/
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
if (fuzzySlop.image.length() == 1) {
return getFuzzyQuery(qfield, termImage, Float.toString(fuzzyMinSim));
}
return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1));
}
@Override
protected Query newTermQuery(Term term) {
if (currentFieldType != null) {
Query termQuery = currentFieldType.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}
}
return super.newTermQuery(term);
}
@Override
protected Query newMatchAllDocsQuery() {
return Queries.newMatchAllQuery();
}
@Override
public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
FieldQueryExtension fieldQueryExtension = fieldQueryExtensions.get(field);
if (fieldQueryExtension != null) {
return fieldQueryExtension.query(parseContext, queryText);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getFieldQuerySingle(fields.iterator().next(), queryText, quoted);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getFieldQuerySingle(field, queryText, quoted);
}
}
private Query getFieldQuerySingle(String field, String queryText, boolean quoted) throws ParseException {
if (!quoted && queryText.length() > 1) {
if (queryText.charAt(0) == '>') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuerySingle(field, queryText.substring(2), null, true, true);
}
}
return getRangeQuerySingle(field, queryText.substring(1), null, false, true);
} else if (queryText.charAt(0) == '<') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuerySingle(field, null, queryText.substring(2), true, true);
}
}
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
}
}
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
if (quoted) {
setAnalyzer(quoteAnalyzer);
if (quoteFieldSuffix != null) {
currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix);
}
}
if (currentFieldType == null) {
currentFieldType = parseContext.fieldMapper(field);
}
if (currentFieldType != null) {
if (quoted) {
if (!forcedQuoteAnalyzer) {
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType));
}
} else {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
}
if (currentFieldType != null) {
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
try {
query = currentFieldType.termQuery(queryText, parseContext);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
} else {
throw e;
}
}
}
if (query == null) {
query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted);
}
return query;
}
}
return super.getFieldQuery(field, queryText, quoted);
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException {
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
added = true;
applyBoost(mField, q);
q = applySlop(q, slop);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
applyBoost(mField, q);
q = applySlop(q, slop);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return super.getFieldQuery(field, queryText, slop);
}
}
@Override
protected Query getRangeQuery(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) throws ParseException {
if ("*".equals(part1)) {
part1 = null;
}
if ("*".equals(part2)) {
part2 = null;
}
Collection<String> fields = extractMultiFields(field);
if (fields == null) {
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive);
}
if (fields.size() == 1) {
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
part2 = part2 == null ? null : part2.toLowerCase(locale);
}
try {
Query rangeQuery;
if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
} else {
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive);
}
return rangeQuery;
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
}
}
return newRangeQuery(field, part1, part2, startInclusive, endInclusive);
}
protected Query getFuzzyQuery(String field, String termStr, String minSimilarity) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getFuzzyQuerySingle(fields.iterator().next(), termStr, minSimilarity);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
} else {
return getFuzzyQuerySingle(field, termStr, minSimilarity);
}
}
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
try {
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
}
}
return super.getFuzzyQuery(field, termStr, Float.parseFloat(minSimilarity));
}
@Override
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
String text = term.text();
int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length()));
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
QueryParsers.setRewriteMethod(query, settings.fuzzyRewriteMethod());
return query;
}
@Override
protected Query getPrefixQuery(String field, String termStr) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getPrefixQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getPrefixQuerySingle(field, termStr);
}
}
private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
}
return query;
}
return getPossiblyAnalyzedPrefixQuery(field, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
return super.getPrefixQuery(field, termStr);
}
List<List<String> > tlist;
// get Analyzer from superclass and tokenize the term
TokenStream source = null;
try {
try {
source = getAnalyzer().tokenStream(field, termStr);
source.reset();
} catch (IOException e) {
return super.getPrefixQuery(field, termStr);
}
tlist = new ArrayList<>();
List<String> currentPos = new ArrayList<>();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
PositionIncrementAttribute posAtt = source.addAttribute(PositionIncrementAttribute.class);
while (true) {
try {
if (!source.incrementToken()) break;
} catch (IOException e) {
break;
}
if (currentPos.isEmpty() == false && posAtt.getPositionIncrement() > 0) {
tlist.add(currentPos);
currentPos = new ArrayList<>();
}
currentPos.add(termAtt.toString());
}
if (currentPos.isEmpty() == false) {
tlist.add(currentPos);
}
} finally {
if (source != null) {
IOUtils.closeWhileHandlingException(source);
}
}
if (tlist.size() == 0) {
return null;
}
if (tlist.size() == 1 && tlist.get(0).size() == 1) {
return super.getPrefixQuery(field, tlist.get(0).get(0));
}
// build a boolean query with prefix on the last position only.
List<BooleanClause> clauses = new ArrayList<>();
for (int pos = 0; pos < tlist.size(); pos++) {
List<String> plist = tlist.get(pos);
boolean isLastPos = (pos == tlist.size() - 1);
Query posQuery;
if (plist.size() == 1) {
if (isLastPos) {
posQuery = super.getPrefixQuery(field, plist.get(0));
} else {
posQuery = newTermQuery(new Term(field, plist.get(0)));
}
} else if (isLastPos == false) {
// build a synonym query for terms in the same position.
List<Query> terms = new ArrayList<> ();
for (int i = 0; i < plist.size(); i++) {
terms.add(new TermQuery(new Term(field, plist.get(i))));
}
posQuery = new DisjunctionMaxQuery(terms, 0.0f);
} else {
List<BooleanClause> innerClauses = new ArrayList<>();
for (String token : plist) {
innerClauses.add(new BooleanClause(getPrefixQuery(field, token),
BooleanClause.Occur.SHOULD));
}
posQuery = getBooleanQuery(innerClauses, true);
}
clauses.add(new BooleanClause(posQuery,
getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses);
}
@Override
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
if (termStr.equals("*")) {
// we want to optimize for match all query for the "*:*", and "*" cases
if ("*".equals(field) || Objects.equals(field, this.field)) {
String actualField = field;
if (actualField == null) {
actualField = this.field;
}
if (actualField == null) {
return newMatchAllDocsQuery();
}
if ("*".equals(actualField) || "_all".equals(actualField)) {
return newMatchAllDocsQuery();
}
// effectively, we check if a field exists or not
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField);
}
}
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getWildcardQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getWildcardQuerySingle(field, termStr);
}
}
private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {
String indexedNameField = field;
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
indexedNameField = currentFieldType.names().indexName();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
}
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedWildcardQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
return super.getWildcardQuery(field, termStr);
}
boolean isWithinToken = (!termStr.startsWith("?") && !termStr.startsWith("*"));
StringBuilder aggStr = new StringBuilder();
StringBuilder tmp = new StringBuilder();
for (int i = 0; i < termStr.length(); i++) {
char c = termStr.charAt(i);
if (c == '?' || c == '*') {
if (isWithinToken) {
try (TokenStream source = getAnalyzer().tokenStream(field, tmp.toString())) {
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
if (source.incrementToken()) {
String term = termAtt.toString();
if (term.length() == 0) {
// no tokens, just use what we have now
aggStr.append(tmp);
} else {
aggStr.append(term);
}
} else {
// no tokens, just use what we have now
aggStr.append(tmp);
}
} catch (IOException e) {
aggStr.append(tmp);
}
tmp.setLength(0);
}
isWithinToken = false;
aggStr.append(c);
} else {
tmp.append(c);
isWithinToken = true;
}
}
if (isWithinToken) {
try {
try (TokenStream source = getAnalyzer().tokenStream(field, tmp.toString())) {
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
if (source.incrementToken()) {
String term = termAtt.toString();
if (term.length() == 0) {
// no tokens, just use what we have now
aggStr.append(tmp);
} else {
aggStr.append(term);
}
} else {
// no tokens, just use what we have now
aggStr.append(tmp);
}
}
} catch (IOException e) {
aggStr.append(tmp);
}
}
return super.getWildcardQuery(field, aggStr.toString());
}
@Override
protected Query getRegexpQuery(String field, String termStr) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getRegexpQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getRegexpQuerySingle(field, termStr);
}
}
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);
}
return query;
}
return super.getRegexpQuery(field, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getBooleanQuery(List<BooleanClause> clauses, boolean disableCoord) throws ParseException {
Query q = super.getBooleanQuery(clauses, disableCoord);
if (q == null) {
return null;
}
return fixNegativeQueryIfNeeded(q);
}
private void applyBoost(String field, Query q) {
if (settings.boosts() != null) {
float boost = settings.boosts().getOrDefault(field, 1f);
q.setBoost(boost);
}
}
private Query applySlop(Query q, int slop) {
if (q instanceof PhraseQuery) {
PhraseQuery pq = (PhraseQuery) q;
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = pq.getTerms();
final int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
pq = builder.build();
pq.setBoost(q.getBoost());
return pq;
} else if (q instanceof MultiPhraseQuery) {
((MultiPhraseQuery) q).setSlop(slop);
return q;
} else {
return q;
}
}
private Collection<String> extractMultiFields(String field) {
Collection<String> fields = null;
if (field != null) {
fields = parseContext.simpleMatchToIndexNames(field);
} else {
fields = settings.fields();
}
return fields;
}
@Override
public Query parse(String query) throws ParseException {
if (query.trim().isEmpty()) {
// if the query string is empty we return no docs / empty result
// the behavior is simple to change in the client if all docs is required
// or a default query
return new MatchNoDocsQuery();
}
return super.parse(query);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.operators.rank;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.typeutils.ListTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.util.RowDataUtil;
import org.apache.flink.table.runtime.generated.GeneratedRecordEqualiser;
import org.apache.flink.table.runtime.generated.RecordEqualiser;
import org.apache.flink.table.runtime.keyselector.RowDataKeySelector;
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo;
import org.apache.flink.table.runtime.typeutils.SortedMapTypeInfo;
import org.apache.flink.types.RowKind;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* A TopN function could handle updating stream.
*
* <p>Input stream can contain any change kind: INSERT, DELETE, UPDATE_BEFORE and UPDATE_AFTER.
*/
public class RetractableTopNFunction extends AbstractTopNFunction {
private static final long serialVersionUID = 1365312180599454479L;
private static final Logger LOG = LoggerFactory.getLogger(RetractableTopNFunction.class);
// Message to indicate the state is cleared because of ttl restriction. The message could be
// used to output to log.
private static final String STATE_CLEARED_WARN_MSG =
"The state is cleared because of state ttl. "
+ "This will result in incorrect result. You can increase the state ttl to avoid this.";
private final InternalTypeInfo<RowData> sortKeyType;
// flag to skip records with non-exist error instead to fail, true by default.
private final boolean lenient = true;
// a map state stores mapping from sort key to records list
private transient MapState<RowData, List<RowData>> dataState;
// a sorted map stores mapping from sort key to records count
private transient ValueState<SortedMap<RowData, Long>> treeMap;
// The util to compare two RowData equals to each other.
private GeneratedRecordEqualiser generatedEqualiser;
private RecordEqualiser equaliser;
private final ComparableRecordComparator serializableComparator;
public RetractableTopNFunction(
long minRetentionTime,
long maxRetentionTime,
InternalTypeInfo<RowData> inputRowType,
ComparableRecordComparator comparableRecordComparator,
RowDataKeySelector sortKeySelector,
RankType rankType,
RankRange rankRange,
GeneratedRecordEqualiser generatedEqualiser,
boolean generateUpdateBefore,
boolean outputRankNumber) {
super(
minRetentionTime,
maxRetentionTime,
inputRowType,
comparableRecordComparator.getGeneratedRecordComparator(),
sortKeySelector,
rankType,
rankRange,
generateUpdateBefore,
outputRankNumber);
this.sortKeyType = sortKeySelector.getProducedType();
this.serializableComparator = comparableRecordComparator;
this.generatedEqualiser = generatedEqualiser;
}
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
// compile equaliser
equaliser = generatedEqualiser.newInstance(getRuntimeContext().getUserCodeClassLoader());
generatedEqualiser = null;
ListTypeInfo<RowData> valueTypeInfo = new ListTypeInfo<>(inputRowType);
MapStateDescriptor<RowData, List<RowData>> mapStateDescriptor =
new MapStateDescriptor<>("data-state", sortKeyType, valueTypeInfo);
dataState = getRuntimeContext().getMapState(mapStateDescriptor);
ValueStateDescriptor<SortedMap<RowData, Long>> valueStateDescriptor =
new ValueStateDescriptor<>(
"sorted-map",
new SortedMapTypeInfo<>(
sortKeyType, BasicTypeInfo.LONG_TYPE_INFO, serializableComparator));
treeMap = getRuntimeContext().getState(valueStateDescriptor);
}
@Override
public void processElement(RowData input, Context ctx, Collector<RowData> out)
throws Exception {
long currentTime = ctx.timerService().currentProcessingTime();
// register state-cleanup timer
registerProcessingCleanupTimer(ctx, currentTime);
initRankEnd(input);
SortedMap<RowData, Long> sortedMap = treeMap.value();
if (sortedMap == null) {
sortedMap = new TreeMap<>(sortKeyComparator);
}
RowData sortKey = sortKeySelector.getKey(input);
boolean isAccumulate = RowDataUtil.isAccumulateMsg(input);
input.setRowKind(RowKind.INSERT); // erase row kind for further state accessing
if (isAccumulate) {
// update sortedMap
if (sortedMap.containsKey(sortKey)) {
sortedMap.put(sortKey, sortedMap.get(sortKey) + 1);
} else {
sortedMap.put(sortKey, 1L);
}
// emit
if (outputRankNumber || hasOffset()) {
// the without-number-algorithm can't handle topN with offset,
// so use the with-number-algorithm to handle offset
emitRecordsWithRowNumber(sortedMap, sortKey, input, out);
} else {
emitRecordsWithoutRowNumber(sortedMap, sortKey, input, out);
}
// update data state
List<RowData> inputs = dataState.get(sortKey);
if (inputs == null) {
// the sort key is never seen
inputs = new ArrayList<>();
}
inputs.add(input);
dataState.put(sortKey, inputs);
} else {
final boolean stateRemoved;
// emit updates first
if (outputRankNumber || hasOffset()) {
// the without-number-algorithm can't handle topN with offset,
// so use the with-number-algorithm to handle offset
stateRemoved = retractRecordWithRowNumber(sortedMap, sortKey, input, out);
} else {
stateRemoved = retractRecordWithoutRowNumber(sortedMap, sortKey, input, out);
}
// and then update sortedMap
if (sortedMap.containsKey(sortKey)) {
long count = sortedMap.get(sortKey) - 1;
if (count == 0) {
sortedMap.remove(sortKey);
} else {
sortedMap.put(sortKey, count);
}
} else {
if (sortedMap.isEmpty()) {
if (lenient) {
LOG.warn(STATE_CLEARED_WARN_MSG);
} else {
throw new RuntimeException(STATE_CLEARED_WARN_MSG);
}
} else {
throw new RuntimeException(
"Can not retract a non-existent record. This should never happen.");
}
}
if (!stateRemoved) {
// the input record has not been removed from state
// should update the data state
List<RowData> inputs = dataState.get(sortKey);
if (inputs != null) {
// comparing record by equaliser
Iterator<RowData> inputsIter = inputs.iterator();
while (inputsIter.hasNext()) {
if (equaliser.equals(inputsIter.next(), input)) {
inputsIter.remove();
break;
}
}
if (inputs.isEmpty()) {
dataState.remove(sortKey);
} else {
dataState.put(sortKey, inputs);
}
}
}
}
treeMap.update(sortedMap);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<RowData> out)
throws Exception {
if (stateCleaningEnabled) {
cleanupState(dataState, treeMap);
}
}
// ------------- ROW_NUMBER-------------------------------
private void emitRecordsWithRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long currentRank = 0L;
RowData currentRow = null;
boolean findsSortKey = false;
while (iterator.hasNext() && isInRankEnd(currentRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
currentRank += entry.getValue();
currentRow = inputRow;
findsSortKey = true;
} else if (findsSortKey) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
// Skip the data if it's state is cleared because of state ttl.
if (lenient) {
LOG.warn(STATE_CLEARED_WARN_MSG);
} else {
throw new RuntimeException(STATE_CLEARED_WARN_MSG);
}
} else {
int i = 0;
while (i < inputs.size() && isInRankEnd(currentRank)) {
RowData prevRow = inputs.get(i);
collectUpdateBefore(out, prevRow, currentRank);
collectUpdateAfter(out, currentRow, currentRank);
currentRow = prevRow;
currentRank += 1;
i++;
}
}
} else {
currentRank += entry.getValue();
}
}
if (isInRankEnd(currentRank)) {
// there is no enough elements in Top-N, emit INSERT message for the new record.
collectInsert(out, currentRow, currentRank);
}
}
private void emitRecordsWithoutRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long curRank = 0L;
boolean findsSortKey = false;
RowData toCollect = null;
RowData toDelete = null;
while (iterator.hasNext() && isInRankEnd(curRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
curRank += entry.getValue();
if (isInRankRange(curRank)) {
toCollect = inputRow;
}
findsSortKey = true;
} else if (findsSortKey) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
// Skip the data if it's state is cleared because of state ttl.
if (lenient) {
LOG.warn(STATE_CLEARED_WARN_MSG);
} else {
throw new RuntimeException(STATE_CLEARED_WARN_MSG);
}
} else {
long count = entry.getValue();
// gets the rank of last record with same sortKey
long rankOfLastRecord = curRank + count;
// deletes the record if there is a record recently downgrades to Top-(N+1)
if (isInRankEnd(rankOfLastRecord)) {
curRank = rankOfLastRecord;
} else {
int index = Long.valueOf(rankEnd - curRank).intValue();
toDelete = inputs.get(index);
break;
}
}
} else {
curRank += entry.getValue();
}
}
if (toDelete != null) {
collectDelete(out, toDelete);
}
if (toCollect != null) {
collectInsert(out, inputRow);
}
}
/**
* Retract the input record and emit updated records. This works for outputting with row_number.
*
* @return true if the input record has been removed from {@link #dataState}.
*/
private boolean retractRecordWithRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long currentRank = 0L;
RowData prevRow = null;
boolean findsSortKey = false;
while (iterator.hasNext() && isInRankEnd(currentRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
// Skip the data if it's state is cleared because of state ttl.
if (lenient) {
LOG.warn(STATE_CLEARED_WARN_MSG);
} else {
throw new RuntimeException(STATE_CLEARED_WARN_MSG);
}
} else {
Iterator<RowData> inputIter = inputs.iterator();
while (inputIter.hasNext() && isInRankEnd(currentRank)) {
RowData currentRow = inputIter.next();
if (!findsSortKey && equaliser.equals(currentRow, inputRow)) {
prevRow = currentRow;
findsSortKey = true;
inputIter.remove();
} else if (findsSortKey) {
collectUpdateBefore(out, prevRow, currentRank);
collectUpdateAfter(out, currentRow, currentRank);
prevRow = currentRow;
}
currentRank += 1;
}
if (inputs.isEmpty()) {
dataState.remove(key);
} else {
dataState.put(key, inputs);
}
}
} else if (findsSortKey) {
List<RowData> inputs = dataState.get(key);
int i = 0;
while (i < inputs.size() && isInRankEnd(currentRank)) {
RowData currentRow = inputs.get(i);
collectUpdateBefore(out, prevRow, currentRank);
collectUpdateAfter(out, currentRow, currentRank);
prevRow = currentRow;
currentRank += 1;
i++;
}
} else {
currentRank += entry.getValue();
}
}
if (isInRankEnd(currentRank)) {
// there is no enough elements in Top-N, emit DELETE message for the retract record.
collectDelete(out, prevRow, currentRank);
}
return findsSortKey;
}
/**
* Retract the input record and emit updated records. This works for outputting without
* row_number.
*
* @return true if the input record has been removed from {@link #dataState}.
*/
private boolean retractRecordWithoutRowNumber(
SortedMap<RowData, Long> sortedMap,
RowData sortKey,
RowData inputRow,
Collector<RowData> out)
throws Exception {
Iterator<Map.Entry<RowData, Long>> iterator = sortedMap.entrySet().iterator();
long nextRank = 1L; // the next rank number, should be in the rank range
boolean findsSortKey = false;
while (iterator.hasNext() && isInRankEnd(nextRank)) {
Map.Entry<RowData, Long> entry = iterator.next();
RowData key = entry.getKey();
if (!findsSortKey && key.equals(sortKey)) {
List<RowData> inputs = dataState.get(key);
if (inputs == null) {
// Skip the data if it's state is cleared because of state ttl.
if (lenient) {
LOG.warn(STATE_CLEARED_WARN_MSG);
} else {
throw new RuntimeException(STATE_CLEARED_WARN_MSG);
}
} else {
Iterator<RowData> inputIter = inputs.iterator();
while (inputIter.hasNext() && isInRankEnd(nextRank)) {
RowData prevRow = inputIter.next();
if (!findsSortKey && equaliser.equals(prevRow, inputRow)) {
collectDelete(out, prevRow, nextRank);
nextRank -= 1;
findsSortKey = true;
inputIter.remove();
} else if (findsSortKey) {
if (nextRank == rankEnd) {
collectInsert(out, prevRow, nextRank);
}
}
nextRank += 1;
}
if (inputs.isEmpty()) {
dataState.remove(key);
} else {
dataState.put(key, inputs);
}
}
} else if (findsSortKey) {
long count = entry.getValue();
// gets the rank of last record with same sortKey
long rankOfLastRecord = nextRank + count - 1;
if (rankOfLastRecord < rankEnd) {
nextRank = rankOfLastRecord + 1;
} else {
// sends the record if there is a record recently upgrades to Top-N
int index = Long.valueOf(rankEnd - nextRank).intValue();
List<RowData> inputs = dataState.get(key);
RowData toAdd = inputs.get(index);
collectInsert(out, toAdd);
break;
}
} else {
nextRank += entry.getValue();
}
}
return findsSortKey;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
/**
*
*/
public class ParentFieldMapper extends MetadataFieldMapper {
public static final String NAME = "_parent";
public static final String CONTENT_TYPE = "_parent";
public static class Defaults {
public static final String NAME = ParentFieldMapper.NAME;
public static final MappedFieldType FIELD_TYPE = new ParentFieldType();
public static final MappedFieldType JOIN_FIELD_TYPE = new ParentFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
JOIN_FIELD_TYPE.setHasDocValues(true);
JOIN_FIELD_TYPE.setDocValuesType(DocValuesType.SORTED);
JOIN_FIELD_TYPE.freeze();
}
}
public static class Builder extends MetadataFieldMapper.Builder<Builder, ParentFieldMapper> {
private String parentType;
protected String indexName;
private final String documentType;
private final MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
public Builder(String documentType) {
super(Defaults.NAME, Defaults.FIELD_TYPE);
this.indexName = name;
this.documentType = documentType;
builder = this;
}
public Builder type(String type) {
this.parentType = type;
return builder;
}
@Override
public Builder fieldDataSettings(Settings fieldDataSettings) {
Settings settings = Settings.builder().put(childJoinFieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
childJoinFieldType.setFieldDataType(new FieldDataType(childJoinFieldType.fieldDataType().getType(), settings));
return this;
}
@Override
public ParentFieldMapper build(BuilderContext context) {
if (parentType == null) {
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
}
parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(documentType)));
parentJoinFieldType.setFieldDataType(null);
childJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType)));
return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.type());
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// ignore before 2.0, reject on and after 2.0
iterator.remove();
} else if (fieldName.equals("fielddata")) {
// Only take over `loading`, since that is the only option now that is configurable:
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
if (fieldDataSettings.containsKey(MappedFieldType.Loading.KEY)) {
Settings settings = settingsBuilder().put(MappedFieldType.Loading.KEY, fieldDataSettings.get(MappedFieldType.Loading.KEY)).build();
builder.fieldDataSettings(settings);
}
iterator.remove();
}
}
return builder;
}
}
static final class ParentFieldType extends MappedFieldType {
public ParentFieldType() {
setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
}
protected ParentFieldType(ParentFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new ParentFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
@Override
public Object valueForSearch(Object value) {
if (value == null) {
return null;
}
String sValue = value.toString();
if (sValue == null) {
return null;
}
int index = sValue.indexOf(Uid.DELIMITER);
if (index == -1) {
return sValue;
}
return sValue.substring(index + 1);
}
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return termsQuery(Collections.singletonList(value), context);
}
@Override
public Query termsQuery(List values, @Nullable QueryShardContext context) {
if (context == null) {
return super.termsQuery(values, context);
}
List<String> types = new ArrayList<>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
List<BytesRef> bValues = new ArrayList<>(values.size());
for (Object value : values) {
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
bValues.add(bValue);
} else {
// we use all non child types, cause we don't know if its exact or not...
for (String type : types) {
bValues.add(Uid.createUidAsBytes(type, bValue));
}
}
}
return new TermsQuery(names().indexName(), bValues);
}
}
private final String parentType;
// determines the field data settings
private MappedFieldType childJoinFieldType;
// has no impact of field data settings, is just here for creating a join field, the parent field mapper in the child type pointing to this type determines the field data settings for this join field
private final MappedFieldType parentJoinFieldType;
protected ParentFieldMapper(MappedFieldType fieldType, MappedFieldType parentJoinFieldType, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
this.parentType = parentType;
this.parentJoinFieldType = parentJoinFieldType;
this.parentJoinFieldType.freeze();
this.childJoinFieldType = childJoinFieldType;
if (childJoinFieldType != null) {
this.childJoinFieldType.freeze();
}
}
public ParentFieldMapper(Settings indexSettings, MappedFieldType existing, String parentType) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), joinFieldTypeForParentType(parentType, indexSettings), null, null, indexSettings);
}
private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) {
MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType)));
parentJoinFieldType.freeze();
return parentJoinFieldType;
}
public MappedFieldType getParentJoinFieldType() {
return parentJoinFieldType;
}
public MappedFieldType getChildJoinFieldType() {
return childJoinFieldType;
}
public String type() {
return parentType;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
if (context.sourceToParse().flyweight() == false) {
parse(context);
}
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
boolean parent = context.docMapper().isParent(context.type());
if (parent) {
addJoinFieldIfNeeded(fields, parentJoinFieldType, context.id());
}
if (!active()) {
return;
}
if (context.parser().currentName() != null && context.parser().currentName().equals(Defaults.NAME)) {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
} else {
// otherwise, we are running it post processing of the xcontent
String parsedParentId = context.doc().get(Defaults.NAME);
if (context.sourceToParse().parent() != null) {
String parentId = context.sourceToParse().parent();
if (parsedParentId == null) {
if (parentId == null) {
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
}
}
}
// we have parent mapping, yet no value was set, ignore it...
}
private void addJoinFieldIfNeeded(List<Field> fields, MappedFieldType fieldType, String id) {
if (fieldType.hasDocValues()) {
fields.add(new SortedDocValuesField(fieldType.names().indexName(), new BytesRef(id)));
}
}
public static String joinField(String parentType) {
return ParentFieldMapper.NAME + "#" + parentType;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
private boolean joinFieldHasCustomFieldDataSettings() {
return childJoinFieldType != null && childJoinFieldType.fieldDataType() != null && childJoinFieldType.fieldDataType().equals(Defaults.JOIN_FIELD_TYPE.fieldDataType()) == false;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!active()) {
return builder;
}
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
builder.startObject(CONTENT_TYPE);
builder.field("type", parentType);
if (includeDefaults || joinFieldHasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) childJoinFieldType.fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
if (Objects.equals(parentType, fieldMergeWith.parentType) == false) {
mergeResult.addConflict("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]");
}
List<String> conflicts = new ArrayList<>();
fieldType().checkCompatibility(fieldMergeWith.fieldType(), conflicts, true); // always strict, this cannot change
parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here
if (childJoinFieldType != null) {
// TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type.
childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, mergeResult.updateAllTypes() == false);
}
for (String conflict : conflicts) {
mergeResult.addConflict(conflict);
}
if (active() && mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
childJoinFieldType = fieldMergeWith.childJoinFieldType.clone();
}
}
/**
* @return Whether the _parent field is actually configured.
*/
public boolean active() {
return parentType != null;
}
}
|
|
/*
* Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*/
package jline.console.completer;
import jline.internal.Log;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
/**
* A {@link Completer} implementation that invokes a child completer using the appropriate <i>separator</i> argument.
* This can be used instead of the individual completers having to know about argument parsing semantics.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
* @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
* @since 2.3
*/
public class ArgumentCompleter
implements Completer
{
private final ArgumentDelimiter delimiter;
private final List<Completer> completers = new ArrayList<Completer>();
private boolean strict = true;
/**
* Create a new completer with the specified argument delimiter.
*
* @param delimiter The delimiter for parsing arguments
* @param completers The embedded completers
*/
public ArgumentCompleter(final ArgumentDelimiter delimiter, final Collection<Completer> completers) {
assert delimiter != null;
this.delimiter = delimiter;
assert completers != null;
this.completers.addAll(completers);
}
/**
* Create a new completer with the specified argument delimiter.
*
* @param delimiter The delimiter for parsing arguments
* @param completers The embedded completers
*/
public ArgumentCompleter(final ArgumentDelimiter delimiter, final Completer... completers) {
this(delimiter, Arrays.asList(completers));
}
/**
* Create a new completer with the default {@link WhitespaceArgumentDelimiter}.
*
* @param completers The embedded completers
*/
public ArgumentCompleter(final Completer... completers) {
this(new WhitespaceArgumentDelimiter(), completers);
}
/**
* Create a new completer with the default {@link WhitespaceArgumentDelimiter}.
*
* @param completers The embedded completers
*/
public ArgumentCompleter(final List<Completer> completers) {
this(new WhitespaceArgumentDelimiter(), completers);
}
/**
* If true, a completion at argument index N will only succeed
* if all the completions from 0-(N-1) also succeed.
*/
public void setStrict(final boolean strict) {
this.strict = strict;
}
/**
* Returns whether a completion at argument index N will success
* if all the completions from arguments 0-(N-1) also succeed.
*
* @return True if strict.
* @since 2.3
*/
public boolean isStrict() {
return this.strict;
}
/**
* @since 2.3
*/
public ArgumentDelimiter getDelimiter() {
return delimiter;
}
/**
* @since 2.3
*/
public List<Completer> getCompleters() {
return completers;
}
public int complete(final String buffer, final int cursor, final List<CharSequence> candidates) {
// buffer can be null
assert candidates != null;
ArgumentDelimiter delim = getDelimiter();
ArgumentList list = delim.delimit(buffer, cursor);
int argpos = list.getArgumentPosition();
int argIndex = list.getCursorArgumentIndex();
if (argIndex < 0) {
return -1;
}
List<Completer> completers = getCompleters();
Completer completer;
// if we are beyond the end of the completers, just use the last one
if (argIndex >= completers.size()) {
completer = completers.get(completers.size() - 1);
}
else {
completer = completers.get(argIndex);
}
// ensure that all the previous completers are successful before allowing this completer to pass (only if strict).
for (int i = 0; isStrict() && (i < argIndex); i++) {
Completer sub = completers.get(i >= completers.size() ? (completers.size() - 1) : i);
String[] args = list.getArguments();
String arg = (args == null || i >= args.length) ? "" : args[i];
List<CharSequence> subCandidates = new LinkedList<CharSequence>();
if (sub.complete(arg, arg.length(), subCandidates) == -1) {
return -1;
}
if (subCandidates.size() == 0) {
return -1;
}
}
int ret = completer.complete(list.getCursorArgument(), argpos, candidates);
if (ret == -1) {
return -1;
}
int pos = ret + list.getBufferPosition() - argpos;
// Special case: when completing in the middle of a line, and the area under the cursor is a delimiter,
// then trim any delimiters from the candidates, since we do not need to have an extra delimiter.
//
// E.g., if we have a completion for "foo", and we enter "f bar" into the buffer, and move to after the "f"
// and hit TAB, we want "foo bar" instead of "foo bar".
if ((cursor != buffer.length()) && delim.isDelimiter(buffer, cursor)) {
for (int i = 0; i < candidates.size(); i++) {
CharSequence val = candidates.get(i);
while (val.length() > 0 && delim.isDelimiter(val, val.length() - 1)) {
val = val.subSequence(0, val.length() - 1);
}
candidates.set(i, val);
}
}
Log.trace("Completing ", buffer, " (pos=", cursor, ") with: ", candidates, ": offset=", pos);
return pos;
}
/**
* The {@link ArgumentCompleter.ArgumentDelimiter} allows custom breaking up of a {@link String} into individual
* arguments in order to dispatch the arguments to the nested {@link Completer}.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
*/
public static interface ArgumentDelimiter
{
/**
* Break the specified buffer into individual tokens that can be completed on their own.
*
* @param buffer The buffer to split
* @param pos The current position of the cursor in the buffer
* @return The tokens
*/
ArgumentList delimit(CharSequence buffer, int pos);
/**
* Returns true if the specified character is a whitespace parameter.
*
* @param buffer The complete command buffer
* @param pos The index of the character in the buffer
* @return True if the character should be a delimiter
*/
boolean isDelimiter(CharSequence buffer, int pos);
}
/**
* Abstract implementation of a delimiter that uses the {@link #isDelimiter} method to determine if a particular
* character should be used as a delimiter.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
*/
public abstract static class AbstractArgumentDelimiter
implements ArgumentDelimiter
{
// TODO: handle argument quoting and escape characters
private char[] quoteChars = {'\'', '"'};
private char[] escapeChars = {'\\'};
public void setQuoteChars(final char[] chars) {
this.quoteChars = chars;
}
public char[] getQuoteChars() {
return this.quoteChars;
}
public void setEscapeChars(final char[] chars) {
this.escapeChars = chars;
}
public char[] getEscapeChars() {
return this.escapeChars;
}
public ArgumentList delimit(final CharSequence buffer, final int cursor) {
List<String> args = new LinkedList<String>();
StringBuilder arg = new StringBuilder();
int argpos = -1;
int bindex = -1;
for (int i = 0; (buffer != null) && (i <= buffer.length()); i++) {
// once we reach the cursor, set the
// position of the selected index
if (i == cursor) {
bindex = args.size();
// the position in the current argument is just the
// length of the current argument
argpos = arg.length();
}
if ((i == buffer.length()) || isDelimiter(buffer, i)) {
if (arg.length() > 0) {
args.add(arg.toString());
arg.setLength(0); // reset the arg
}
}
else {
arg.append(buffer.charAt(i));
}
}
return new ArgumentList(args.toArray(new String[args.size()]), bindex, argpos, cursor);
}
/**
* Returns true if the specified character is a whitespace parameter. Check to ensure that the character is not
* escaped by any of {@link #getQuoteChars}, and is not escaped by ant of the {@link #getEscapeChars}, and
* returns true from {@link #isDelimiterChar}.
*
* @param buffer The complete command buffer
* @param pos The index of the character in the buffer
* @return True if the character should be a delimiter
*/
public boolean isDelimiter(final CharSequence buffer, final int pos) {
return !isQuoted(buffer, pos) && !isEscaped(buffer, pos) && isDelimiterChar(buffer, pos);
}
public boolean isQuoted(final CharSequence buffer, final int pos) {
return false;
}
public boolean isEscaped(final CharSequence buffer, final int pos) {
if (pos <= 0) {
return false;
}
for (int i = 0; (escapeChars != null) && (i < escapeChars.length);
i++) {
if (buffer.charAt(pos) == escapeChars[i]) {
return !isEscaped(buffer, pos - 1); // escape escape
}
}
return false;
}
/**
* Returns true if the character at the specified position if a delimiter. This method will only be called if
* the character is not enclosed in any of the {@link #getQuoteChars}, and is not escaped by ant of the
* {@link #getEscapeChars}. To perform escaping manually, override {@link #isDelimiter} instead.
*/
public abstract boolean isDelimiterChar(CharSequence buffer, int pos);
}
/**
* {@link ArgumentCompleter.ArgumentDelimiter} implementation that counts all whitespace (as reported by
* {@link Character#isWhitespace}) as being a delimiter.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
*/
public static class WhitespaceArgumentDelimiter
extends AbstractArgumentDelimiter
{
/**
* The character is a delimiter if it is whitespace, and the
* preceding character is not an escape character.
*/
@Override
public boolean isDelimiterChar(final CharSequence buffer, final int pos) {
return Character.isWhitespace(buffer.charAt(pos));
}
}
/**
* The result of a delimited buffer.
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
*/
public static class ArgumentList
{
private String[] arguments;
private int cursorArgumentIndex;
private int argumentPosition;
private int bufferPosition;
/**
* @param arguments The array of tokens
* @param cursorArgumentIndex The token index of the cursor
* @param argumentPosition The position of the cursor in the current token
* @param bufferPosition The position of the cursor in the whole buffer
*/
public ArgumentList(final String[] arguments, final int cursorArgumentIndex, final int argumentPosition, final int bufferPosition) {
assert arguments != null;
this.arguments = arguments;
this.cursorArgumentIndex = cursorArgumentIndex;
this.argumentPosition = argumentPosition;
this.bufferPosition = bufferPosition;
}
public void setCursorArgumentIndex(final int i) {
this.cursorArgumentIndex = i;
}
public int getCursorArgumentIndex() {
return this.cursorArgumentIndex;
}
public String getCursorArgument() {
if ((cursorArgumentIndex < 0) || (cursorArgumentIndex >= arguments.length)) {
return null;
}
return arguments[cursorArgumentIndex];
}
public void setArgumentPosition(final int pos) {
this.argumentPosition = pos;
}
public int getArgumentPosition() {
return this.argumentPosition;
}
public void setArguments(final String[] arguments) {
this.arguments = arguments;
}
public String[] getArguments() {
return this.arguments;
}
public void setBufferPosition(final int pos) {
this.bufferPosition = pos;
}
public int getBufferPosition() {
return this.bufferPosition;
}
}
}
|
|
// **** GENERATED CODE, DO NOT MODIFY ****
// This file was generated via preprocessing from input:
// java/com/google/common/html/types/SafeUrls.java.tpl
// ***************************************
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.html.types;
import com.google.common.annotations.GwtCompatible;
import com.google.common.io.BaseEncoding;
import com.google.common.net.UrlEscapers;
import com.google.errorprone.annotations.CompileTimeConstant;
import java.io.UnsupportedEncodingException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import javax.annotation.CheckReturnValue;
/**
* Protocol conversions and factory methods for {@link SafeUrl}.
*/
@CheckReturnValue
@GwtCompatible
public final class SafeUrls {
// Don't forget to update the public docs when updating this set.
private static final Set<String> DEFAULT_SAFE_SCHEMES = createUnmodifiableSet(
"http", "https", "mailto", "ftp");
private static final Set<CustomSafeUrlScheme> EMPTY_CUSTOM_SCHEMES = Collections.emptySet();
private SafeUrls() {}
private static final Set<String> createUnmodifiableSet(String ...schemes) {
HashSet<String> set = new HashSet<String>();
for (String scheme : schemes) {
set.add(scheme);
}
return Collections.unmodifiableSet(set);
}
/**
* Deserializes a SafeUrlProto into a SafeUrl instance.
*
* <p>Protocol-message forms are intended to be opaque. The fields of the protocol message should
* be considered encapsulated and are not intended for direct inspection or manipulation. Protocol
* message forms of this type should be produced by {@link #toProto(SafeUrl)} or its
* equivalent in other implementation languages.
*
* <p><b>Important:</b> It is unsafe to invoke this method on a protocol message that has been
* received from an entity outside the application's trust domain. Data coming from the browser
* is outside the application's trust domain.
*/
public static SafeUrl fromProto(SafeUrlProto proto) {
return create(proto.getPrivateDoNotAccessOrElseSafeUrlWrappedValue());
}
/**
* Serializes a SafeUrl into its opaque protocol message representation.
*
* <p>Protocol message forms of this type are intended to be opaque. The fields of the returned
* protocol message should be considered encapsulated and are not intended for direct inspection
* or manipulation. Protocol messages can be converted back into a SafeUrl using
* {@link #fromProto(SafeUrlProto)}.
*/
public static SafeUrlProto toProto(SafeUrl url) {
return SafeUrlProto.newBuilder()
.setPrivateDoNotAccessOrElseSafeUrlWrappedValue(url.getSafeUrlString())
.build();
}
/**
* Creates a SafeUrl from the given compile-time constant string {@code url}.
*
* <p>No runtime validation or sanitization is performed on {@code url}; being under application
* control, it is simply assumed to comply with the SafeUrl contract.
*/
public static SafeUrl fromConstant(@CompileTimeConstant final String url) {
return create(url);
}
/**
* Creates a SafeUrl object from the given {@code url}, validating that the input string matches
* a pattern of commonly used safe URLs. If {@code url} fails validation, this method returns a
* SafeUrl, {@link SafeUrl#INNOCUOUS}, which contains an innocuous string,
* {@link SafeUrl#INNOCUOUS_STRING}.
*
* <p>Specifically, {@code url} may be a URL with any of the default safe schemes (http, https,
* ftp, mailto), or a relative URL (i.e., a URL without a scheme; specifically, a scheme-relative,
* absolute-path-relative, or path-relative URL).
*
* @see http://url.spec.whatwg.org/#concept-relative-url
*/
public static SafeUrl sanitize(String url) {
return sanitize(url, EMPTY_CUSTOM_SCHEMES);
}
/**
* Creates a SafeUrl object from the given {@code url}, validating that the input string matches
* a pattern of commonly used safe URLs. If {@code url} fails validation, this method returns a
* SafeUrl, {@link SafeUrl#INNOCUOUS}, which contains an innocuous string,
* {@link SafeUrl#INNOCUOUS_STRING}.
*
* <p>{@code url} is sanitized as in {@link #sanitize(String)}, additionally permitting the
* custom schemes listed in {@code extraAllowedSchemes}.
*/
public static SafeUrl sanitize(String url, Set<CustomSafeUrlScheme> extraAllowedSchemes) {
if (!isSafeUrl(url, extraAllowedSchemes)) {
return SafeUrl.INNOCUOUS;
}
return create(url);
}
/**
* Sanitizes the given {@code url}, validating that the input string matches a pattern of commonly
* used safe URLs. If {@code url} fails validation, this method returns
* {@code about:invalid#identifier}, with the given {@code identifier}. The identifier allows
* users to trace a sanitized value to the library that performed the sanitization and hence
* should be a unique string like "zLibraryNamez".
*
* <p>Specifically, {@code url} may be a URL with any of the default safe schemes (http, https,
* ftp, mailto), or a relative URL (i.e., a URL without a scheme; specifically, a scheme-relative,
* absolute-path-relative, or path-relative URL).
*
* @see http://url.spec.whatwg.org/#concept-relative-url
*/
public static String sanitizeAsString(String url, @CompileTimeConstant final String identifier) {
if (!isSafeUrl(url, EMPTY_CUSTOM_SCHEMES)) {
return "about:invalid#" + identifier;
}
return url;
}
/**
* Creates a {@code data:text/html} URL whose content is populated from the given
* {@code SafeHtml} object.
*
* <p>The resulting {@code data}-scheme URL's content is UTF-8-encoded, but the
* encoding of non-ASCII characters is done using the standard %xx hex encoding.
*
* @see http://tools.ietf.org/html/rfc2397
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/data_URIs
*/
public static SafeUrl createHtmlDataUrl(SafeHtml html) {
// Use urlPathSegmentEscaper because all other Escapers convert spaces to "+" instead of "%20",
// which are rendered as normal "+"s in the browser instead of being rendered as spaces.
String dataUrl =
"data:text/html;charset=UTF-8,"
+ UrlEscapers.urlPathSegmentEscaper().escape(html.getSafeHtmlString());
return create(dataUrl);
}
/**
* Creates a {@code data:text/html} URL whose content is populated from the given
* {@code SafeHtml} object.
*
* <p>The resulting {@code data}-scheme URL's content is UTF-8-encoded, and further encoded using
* base-64 transfer encoding.
*
* @see http://tools.ietf.org/html/rfc2397
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/data_URIs
*/
public static SafeUrl createHtmlDataUrlBase64(SafeHtml html) {
try {
String dataUrl =
"data:text/html;charset=UTF-8;base64,"
+ BaseEncoding.base64().encode(html.getSafeHtmlString().getBytes("UTF-8"));
return create(dataUrl);
} catch (UnsupportedEncodingException e) {
// Should never happen. We use getBytes(String) instead of getBytes(CharSet) because
// there's no java.nio.charset.StandardCharsets in older Android SDKs.
throw new RuntimeException(e);
}
}
/**
* Matches a subset of URLs that will not cause script execution if used in URL context within a
* HTML document. Specifically, this method returns true if the {@code url}:
* <ul>
* <li>Starts with a default safe protocol (http, https, ftp, mailto) or one of the schemes
* specified in {@code extraAllowedSchemes}.
* <li>Contains no protocol. A protocol must be followed by a colon and colons are allowed
* only after one of the characters [/?#].
* A colon after a hash (#) must be in the fragment.
* Otherwise, a colon after a (?) must be in a query.
* Otherwise, a colon after a single solidus (/) must be in a path.
* Otherwise, a colon after a double solidus (*) must be in the authority (before port).
* </ul>
*
* <p>We don't use a regex so that we don't need to depend on GWT, which does not support Java's
* Pattern and requires using its RegExp class.
*/
private static boolean isSafeUrl(String url, Set<CustomSafeUrlScheme> extraAllowedSchemes) {
String lowerCased = url.toLowerCase();
// If some Unicode character lower cases to something that ends up matching these ASCII ones,
// it's harmless.
for (String scheme : DEFAULT_SAFE_SCHEMES) {
if (lowerCased.startsWith(scheme + ":")) {
return true;
}
}
for (CustomSafeUrlScheme scheme : extraAllowedSchemes) {
/**
* For "-" in a custom URL scheme, it's not possible to write a proto enum with "-" in the
* field name. In proto, it has to be "_". But we can safely convert all "_" in the proto name
* to "-", since according to the URL Living Standard, a URL-scheme string must be one ASCII
* alpha, followed by zero or more of ASCII alphanumeric, "+", "-", and ".".
*
* @see https://url.spec.whatwg.org/#url-syntax
*/
if (lowerCased.startsWith(scheme.name().toLowerCase().replace('_', '-') + ":")) {
return true;
}
}
for (int i = 0; i < url.length(); i++) {
switch (url.charAt(i)) {
case '/':
case '?':
case '#':
// After this the string can end or contain anything else, it won't be interpreted
// as the scheme.
return true;
case ':':
// This character is not allowed before seeing one of the above characters.
return false;
default:
// Other characters ok.
continue;
}
}
return true;
}
/**
* Creates a SafeUrl by doing an unchecked conversion from the given {@code url}. Also called
* from SafeUrlBuilder.
*/
static SafeUrl create(String url) {
return new SafeUrl(url);
}
}
|
|
/*
Derby - Class org.apache.derby.impl.store.access.sort.MergeScan
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.store.access.sort;
import java.util.Enumeration;
import java.util.Vector;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.iapi.store.access.conglomerate.TransactionManager;
import org.apache.derby.iapi.store.access.SortObserver;
import org.apache.derby.iapi.store.raw.StreamContainerHandle;
import org.apache.derby.iapi.store.raw.Transaction;
import org.apache.derby.iapi.types.DataValueDescriptor;
// For JavaDoc references (i.e. @see)
import org.apache.derby.iapi.store.access.conglomerate.ScanManager;
/**
A sort scan that is capable of merging as many merge runs
as will fit in the passed-in sort buffer.
**/
public class MergeScan extends SortScan
{
/**
The sort buffer we will use.
**/
protected SortBuffer sortBuffer;
/**
The merge runs.
**/
protected Vector<Long> mergeRuns;
/**
Array of scan controllers for the merge runs.
Entries in the array become null as the last
row is pulled out and the scan is closed.
**/
protected StreamContainerHandle openScans[];
private SortObserver sortObserver;
/*
* Constructors.
*/
MergeScan(
MergeSort sort,
TransactionManager tran,
SortBuffer sortBuffer,
Vector<Long> mergeRuns,
SortObserver sortObserver,
boolean hold)
{
super(sort, tran, hold);
this.sortBuffer = sortBuffer;
this.mergeRuns = mergeRuns;
this.tran = tran;
this.sortObserver = sortObserver;
}
/*
* Methods of MergeSortScan
*/
/**
Move to the next position in the scan.
@see org.apache.derby.iapi.store.access.ScanController#next
**/
public boolean next()
throws StandardException
{
current = sortBuffer.removeFirst();
if (current != null)
mergeARow(sortBuffer.getLastAux());
return (current != null);
}
/**
Close the scan.
@see org.apache.derby.iapi.store.access.ScanController#close
**/
public void close()
{
if (openScans != null)
{
for (int i = 0; i < openScans.length; i++)
{
if (openScans[i] != null)
{
openScans[i].close();
}
openScans[i] = null;
}
openScans = null;
}
// Hand sort buffer and remaining merge runs to sort.
if (super.sort != null)
{
sort.doneScanning(this, sortBuffer, mergeRuns);
sortBuffer = null;
mergeRuns = null;
}
// Sets sort to null
super.close();
}
/**
Close the scan.
@see ScanManager#closeForEndTransaction
**/
public boolean closeForEndTransaction(boolean closeHeldScan)
{
if (!hold || closeHeldScan)
{
close();
return(true);
}
else
{
return(false);
}
}
/*
* Methods of MergeScan
*/
/**
Initialize the scan, returning false if there
was some error.
**/
public boolean init(TransactionManager tran)
throws StandardException
{
if (SanityManager.DEBUG)
{
// We really expect to have at least one
// merge run.
SanityManager.ASSERT(mergeRuns != null);
SanityManager.ASSERT(mergeRuns.size() > 0);
// This sort scan also expects that the
// caller has ensured that the sort buffer
// capacity will hold a row from all the
// merge runs.
SanityManager.ASSERT(sortBuffer.capacity() >= mergeRuns.size());
}
// Clear the sort buffer.
sortBuffer.reset();
// Create an array to hold a scan controller
// for each merge run.
openScans = new StreamContainerHandle[mergeRuns.size()];
if (openScans == null)
return false;
// Open a scan on each merge run.
int scanindex = 0;
Enumeration<Long> e = mergeRuns.elements();
while (e.hasMoreElements())
{
// get the container id
long id = (e.nextElement()).longValue();
Transaction rawTran = tran.getRawStoreXact(); // get raw transaction
int segmentId = StreamContainerHandle.TEMPORARY_SEGMENT;
openScans[scanindex++] =
rawTran.openStreamContainer(segmentId, id, hold);
}
// Load the initial rows.
for (scanindex = 0; scanindex < openScans.length; scanindex++)
mergeARow(scanindex);
// Success!
return true;
}
/**
Insert rows while we keep getting duplicates
from the merge run whose scan is in the
open scan array entry indexed by scanindex.
**/
void mergeARow(int scanindex)
throws StandardException
{
if (SanityManager.DEBUG)
{
// Unless there's a bug, the scan index will refer
// to an open scan. That's because we never put
// a scan index for a closed scan into the sort
// buffer (via setNextAux).
SanityManager.ASSERT(openScans[scanindex] != null);
}
DataValueDescriptor[] row;
// Read rows from the merge run and stuff them into the
// sort buffer for as long as we encounter duplicates.
do
{
row = sortObserver.getArrayClone();
// Fetch the row from the merge run.
if (!openScans[scanindex].fetchNext(row))
{
// If we're out of rows in the merge run, close the scan.
openScans[scanindex].close();
openScans[scanindex] = null;
return;
}
// Save the index of this merge run with
// the row we're putting in the sort buffer.
sortBuffer.setNextAux(scanindex);
}
while (sortBuffer.insert(row) == SortBuffer.INSERT_DUPLICATE);
}
}
|
|
/*
* Copyright (C) 2022 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package buildtests;
import static com.google.common.truth.Truth.assertThat;
import static org.gradle.testkit.runner.TaskOutcome.SUCCESS;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
// This is a regression test for https://github.com/google/dagger/issues/3136
@RunWith(Parameterized.class)
public class TransitiveBindsQualifierTest {
@Parameters(name = "{0}")
public static Collection<Object[]> parameters() {
return Arrays.asList(new Object[][] {{ "implementation" }, { "api" }});
}
@Rule public TemporaryFolder folder = new TemporaryFolder();
private final String transitiveDependencyType;
public TransitiveBindsQualifierTest(String transitiveDependencyType) {
this.transitiveDependencyType = transitiveDependencyType;
}
@Test
public void testQualifierOnBindsMethod() throws IOException {
BuildResult result;
switch (transitiveDependencyType) {
case "implementation":
result = setupRunner().buildAndFail();
assertThat(result.getOutput()).contains("Task :app:compileJava FAILED");
assertThat(result.getOutput())
.contains(
"ComponentProcessingStep was unable to process 'app.MyComponent' because "
+ "'library2.MyQualifier' could not be resolved."
+ "\n "
+ "\n Dependency trace:"
+ "\n => element (INTERFACE): library1.MyModule"
+ "\n => element (METHOD): bindObject(java.lang.Number)"
+ "\n => element (PARAMETER): arg0"
+ "\n => annotation: @library2.MyQualifier");
break;
case "api":
result = setupRunner().build();
assertThat(result.task(":app:assemble").getOutcome()).isEqualTo(SUCCESS);
assertThat(result.getOutput())
.contains("BINDINGS: ["
+ "@library2.MyQualifier java.lang.Object, "
+ "@library2.MyQualifier java.lang.Number, "
+ "java.lang.Integer"
+ "]");
break;
}
}
private GradleRunner setupRunner() throws IOException {
File projectDir = folder.getRoot();
GradleModule.create(projectDir)
.addSettingsFile(
"include 'app'",
"include 'library1'",
"include 'library2'",
"include 'spi-plugin'")
.addBuildFile(
"buildscript {",
" ext {",
String.format("dagger_version = \"%s\"", System.getProperty("dagger_version")),
" }",
"}",
"",
"allprojects {",
" repositories {",
" mavenCentral()",
" mavenLocal()",
" }",
"}");
GradleModule.create(projectDir, "app")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'application'",
"}",
"compileJava {",
" options.compilerArgs << '-Adagger.pluginsVisitFullBindingGraphs=ENABLED'",
"}",
"dependencies {",
" implementation project(':library1')",
" annotationProcessor project(':spi-plugin')",
" implementation \"com.google.dagger:dagger:$dagger_version\"",
" annotationProcessor \"com.google.dagger:dagger-compiler:$dagger_version\"",
"}")
.addSrcFile(
"MyComponent.java",
"package app;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"import library1.MyModule;",
"",
"@Component(modules = MyModule.class)",
"public interface MyComponent {",
" @Component.Factory",
" interface Factory {",
" MyComponent create(@BindsInstance int i);",
" }",
"}");
GradleModule.create(projectDir, "library1")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'java-library'",
"}",
"dependencies {",
transitiveDependencyType + " project(':library2')",
" implementation \"com.google.dagger:dagger:$dagger_version\"",
" annotationProcessor \"com.google.dagger:dagger-compiler:$dagger_version\"",
"}")
.addSrcFile(
"MyModule.java",
"package library1;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"import dagger.Provides;",
"import library2.MyQualifier;",
"",
"@Module",
"public interface MyModule {",
" @Binds",
" @MyQualifier",
" Object bindObject(@MyQualifier Number number);",
"",
" @Binds",
" @MyQualifier",
" Number bindNumber(int i);",
"}");
GradleModule.create(projectDir, "library2")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'java-library'",
"}",
"dependencies {",
" implementation 'javax.inject:javax.inject:1'",
"}")
.addSrcFile(
"MyQualifier.java",
"package library2;",
"",
"import javax.inject.Qualifier;",
"",
"@Qualifier",
"public @interface MyQualifier {}");
// This plugin is used to print output about bindings that we can assert on in tests.
GradleModule.create(projectDir, "spi-plugin")
.addBuildFile(
"plugins {",
" id 'java'",
"}",
"dependencies {",
" implementation \"com.google.dagger:dagger-spi:$dagger_version\"",
" implementation 'com.google.auto.service:auto-service-annotations:1.0.1'",
" annotationProcessor 'com.google.auto.service:auto-service:1.0.1'",
"}")
.addSrcFile(
"TestBindingGraphPlugin.java",
"package spiplugin;",
"",
"import com.google.auto.service.AutoService;",
"import dagger.model.Binding;",
"import dagger.model.BindingGraph;",
"import dagger.model.BindingGraph.DependencyEdge;",
"import dagger.model.DependencyRequest;",
"import dagger.spi.BindingGraphPlugin;",
"import dagger.spi.DiagnosticReporter;",
"import java.util.stream.Collectors;",
"",
"@AutoService(BindingGraphPlugin.class)",
"public class TestBindingGraphPlugin implements BindingGraphPlugin {",
" @Override",
" public void visitGraph(",
" BindingGraph bindingGraph, DiagnosticReporter diagnosticReporter) {",
" if (!bindingGraph.isFullBindingGraph() || bindingGraph.isModuleBindingGraph()) {",
" return;",
" }",
" System.out.print(",
" \"BINDINGS: \"",
" + bindingGraph.bindings().stream()",
" .map(Binding::key)",
" .collect(Collectors.toList()));",
" }",
"}");
return GradleRunner.create()
.withArguments("--stacktrace", "build")
.withProjectDir(projectDir);
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import static java.util.Collections.singletonList;
import static org.elasticsearch.cluster.DataStreamTestHelper.createTimestampField;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anySetOf;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class MetadataIndexAliasesServiceTests extends ESTestCase {
private final AliasValidator aliasValidator = new AliasValidator();
private final MetadataDeleteIndexService deleteIndexService = mock(MetadataDeleteIndexService.class);
private final MetadataIndexAliasesService service = new MetadataIndexAliasesService(null, null, aliasValidator,
deleteIndexService, xContentRegistry());
public MetadataIndexAliasesServiceTests() {
// Mock any deletes so we don't need to worry about how MetadataDeleteIndexService does its job
when(deleteIndexService.deleteIndices(any(ClusterState.class), anySetOf(Index.class))).then(i -> {
ClusterState state = (ClusterState) i.getArguments()[0];
@SuppressWarnings("unchecked")
Collection<Index> indices = (Collection<Index>) i.getArguments()[1];
Metadata.Builder meta = Metadata.builder(state.metadata());
for (Index index : indices) {
assertTrue("index now found", state.metadata().hasConcreteIndex(index.getName()));
meta.remove(index.getName()); // We only think about metadata for this test. Not routing or any other fun stuff.
}
return ClusterState.builder(state).metadata(meta).build();
});
}
public void testAddAndRemove() {
// Create a state with a single index
String index = randomAlphaOfLength(5);
ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index);
// Add an alias to it
ClusterState after = service.applyAliasActions(before, singletonList(new AliasAction.Add(index, "test", null, null, null, null,
null)));
IndexAbstraction alias = after.metadata().getIndicesLookup().get("test");
assertNotNull(alias);
assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS));
assertThat(alias.getIndices(), contains(after.metadata().index(index)));
assertAliasesVersionIncreased(index, before, after);
// Remove the alias from it while adding another one
before = after;
after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Remove(index, "test", null),
new AliasAction.Add(index, "test_2", null, null, null, null, null)));
assertNull(after.metadata().getIndicesLookup().get("test"));
alias = after.metadata().getIndicesLookup().get("test_2");
assertNotNull(alias);
assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS));
assertThat(alias.getIndices(), contains(after.metadata().index(index)));
assertAliasesVersionIncreased(index, before, after);
// Now just remove on its own
before = after;
after = service.applyAliasActions(before, singletonList(new AliasAction.Remove(index, "test_2", randomBoolean())));
assertNull(after.metadata().getIndicesLookup().get("test"));
assertNull(after.metadata().getIndicesLookup().get("test_2"));
assertAliasesVersionIncreased(index, before, after);
}
public void testMustExist() {
// Create a state with a single index
String index = randomAlphaOfLength(5);
ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index);
// Add an alias to it
ClusterState after = service.applyAliasActions(before, singletonList(new AliasAction.Add(index, "test", null, null, null, null,
null)));
IndexAbstraction alias = after.metadata().getIndicesLookup().get("test");
assertNotNull(alias);
assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS));
assertThat(alias.getIndices(), contains(after.metadata().index(index)));
assertAliasesVersionIncreased(index, before, after);
// Remove the alias from it with mustExist == true while adding another one
before = after;
after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Remove(index, "test", true),
new AliasAction.Add(index, "test_2", null, null, null, null, null)));
assertNull(after.metadata().getIndicesLookup().get("test"));
alias = after.metadata().getIndicesLookup().get("test_2");
assertNotNull(alias);
assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS));
assertThat(alias.getIndices(), contains(after.metadata().index(index)));
assertAliasesVersionIncreased(index, before, after);
// Now just remove on its own
before = after;
after = service.applyAliasActions(before, singletonList(new AliasAction.Remove(index, "test_2", randomBoolean())));
assertNull(after.metadata().getIndicesLookup().get("test"));
assertNull(after.metadata().getIndicesLookup().get("test_2"));
assertAliasesVersionIncreased(index, before, after);
// Show that removing non-existing alias with mustExist == true fails
final ClusterState finalCS = after;
final ResourceNotFoundException iae = expectThrows(ResourceNotFoundException.class,
() -> service.applyAliasActions(finalCS, singletonList(new AliasAction.Remove(index, "test_2", true))));
assertThat(iae.getMessage(), containsString("required alias [test_2] does not exist"));
}
public void testMultipleIndices() {
final var length = randomIntBetween(2, 8);
final var indices = new HashSet<String>(length);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT).build();
final var addActions = new ArrayList<AliasAction>(length);
for (int i = 0; i < length; i++) {
final String index = randomValueOtherThanMany(v -> indices.add(v) == false, () -> randomAlphaOfLength(8));
before = createIndex(before, index);
addActions.add(new AliasAction.Add(index, "alias-" + index, null, null, null, null, null));
}
final ClusterState afterAddingAliasesToAll = service.applyAliasActions(before, addActions);
assertAliasesVersionIncreased(indices.toArray(new String[0]), before, afterAddingAliasesToAll);
// now add some aliases randomly
final var randomIndices = new HashSet<String>(length);
final var randomAddActions = new ArrayList<AliasAction>(length);
for (var index : indices) {
if (randomBoolean()) {
randomAddActions.add(new AliasAction.Add(index, "random-alias-" + index, null, null, null, null, null));
randomIndices.add(index);
}
}
final ClusterState afterAddingRandomAliases = service.applyAliasActions(afterAddingAliasesToAll, randomAddActions);
assertAliasesVersionIncreased(randomIndices.toArray(new String[0]), afterAddingAliasesToAll, afterAddingRandomAliases);
assertAliasesVersionUnchanged(
Sets.difference(indices, randomIndices).toArray(new String[0]),
afterAddingAliasesToAll,
afterAddingRandomAliases);
}
public void testChangingWriteAliasStateIncreasesAliasesVersion() {
final String index = randomAlphaOfLength(8);
final ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index);
final ClusterState afterAddWriteAlias =
service.applyAliasActions(before, singletonList(new AliasAction.Add(index, "test", null, null, null, true, null)));
assertAliasesVersionIncreased(index, before, afterAddWriteAlias);
final ClusterState afterChangeWriteAliasToNonWriteAlias =
service.applyAliasActions(afterAddWriteAlias, singletonList(new AliasAction.Add(index, "test", null, null, null, false,
null)));
assertAliasesVersionIncreased(index, afterAddWriteAlias, afterChangeWriteAliasToNonWriteAlias);
final ClusterState afterChangeNonWriteAliasToWriteAlias =
service.applyAliasActions(
afterChangeWriteAliasToNonWriteAlias,
singletonList(new AliasAction.Add(index, "test", null, null, null, true, null)));
assertAliasesVersionIncreased(index, afterChangeWriteAliasToNonWriteAlias, afterChangeNonWriteAliasToWriteAlias);
}
public void testAddingAliasMoreThanOnceShouldOnlyIncreaseAliasesVersionByOne() {
final String index = randomAlphaOfLength(8);
final ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index);
// add an alias to the index multiple times
final int length = randomIntBetween(2, 8);
final var addActions = new ArrayList<AliasAction>(length);
for (int i = 0; i < length; i++) {
addActions.add(new AliasAction.Add(index, "test", null, null, null, null, null));
}
final ClusterState afterAddingAliases = service.applyAliasActions(before, addActions);
assertAliasesVersionIncreased(index, before, afterAddingAliases);
}
public void testAliasesVersionUnchangedWhenActionsAreIdempotent() {
final String index = randomAlphaOfLength(8);
final ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index);
// add some aliases to the index
final int length = randomIntBetween(1, 8);
final var aliasNames = new HashSet<String>();
final var addActions = new ArrayList<AliasAction>(length);
for (int i = 0; i < length; i++) {
final String aliasName = randomValueOtherThanMany(v -> aliasNames.add(v) == false, () -> randomAlphaOfLength(8));
addActions.add(new AliasAction.Add(index, aliasName, null, null, null, null, null));
}
final ClusterState afterAddingAlias = service.applyAliasActions(before, addActions);
// now perform a remove and add for each alias which is idempotent, the resulting aliases are unchanged
final var removeAndAddActions = new ArrayList<AliasAction>(2 * length);
for (final var aliasName : aliasNames) {
removeAndAddActions.add(new AliasAction.Remove(index, aliasName, null));
removeAndAddActions.add(new AliasAction.Add(index, aliasName, null, null, null, null, null));
}
final ClusterState afterRemoveAndAddAlias = service.applyAliasActions(afterAddingAlias, removeAndAddActions);
assertAliasesVersionUnchanged(index, afterAddingAlias, afterRemoveAndAddAlias);
}
public void testSwapIndexWithAlias() {
// Create "test" and "test_2"
ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), "test");
before = createIndex(before, "test_2");
// Now remove "test" and add an alias to "test" to "test_2" in one go
ClusterState after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test_2", "test", null, null, null, null, null),
new AliasAction.RemoveIndex("test")));
IndexAbstraction alias = after.metadata().getIndicesLookup().get("test");
assertNotNull(alias);
assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS));
assertThat(alias.getIndices(), contains(after.metadata().index("test_2")));
assertAliasesVersionIncreased("test_2", before, after);
}
public void testAddAliasToRemovedIndex() {
// Create "test"
ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), "test");
// Attempt to add an alias to "test" at the same time as we remove it
IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, null, null),
new AliasAction.RemoveIndex("test"))));
assertEquals("test", e.getIndex().getName());
}
public void testRemoveIndexTwice() {
// Create "test"
ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), "test");
// Try to remove an index twice. This should just remove the index once....
ClusterState after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.RemoveIndex("test"),
new AliasAction.RemoveIndex("test")));
assertNull(after.metadata().getIndicesLookup().get("test"));
}
public void testAddWriteOnlyWithNoExistingAliases() {
ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), "test");
ClusterState after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, false, null)));
assertFalse(after.metadata().index("test").getAliases().get("alias").writeIndex());
assertNull(after.metadata().getIndicesLookup().get("alias").getWriteIndex());
assertAliasesVersionIncreased("test", before, after);
after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, null, null)));
assertNull(after.metadata().index("test").getAliases().get("alias").writeIndex());
assertThat(after.metadata().getIndicesLookup().get("alias").getWriteIndex(),
equalTo(after.metadata().index("test")));
assertAliasesVersionIncreased("test", before, after);
after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, true, null)));
assertTrue(after.metadata().index("test").getAliases().get("alias").writeIndex());
assertThat(after.metadata().getIndicesLookup().get("alias").getWriteIndex(),
equalTo(after.metadata().index("test")));
assertAliasesVersionIncreased("test", before, after);
}
public void testAddWriteOnlyWithExistingWriteIndex() {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata2 = IndexMetadata.builder("test2")
.putAlias(AliasMetadata.builder("alias").writeIndex(true).build())
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata).put(indexMetadata2)).build();
ClusterState after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, null, null)));
assertNull(after.metadata().index("test").getAliases().get("alias").writeIndex());
assertThat(after.metadata().getIndicesLookup().get("alias").getWriteIndex(),
equalTo(after.metadata().index("test2")));
assertAliasesVersionIncreased("test", before, after);
assertAliasesVersionUnchanged("test2", before, after);
Exception exception = expectThrows(IllegalStateException.class, () -> service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, true, null))));
assertThat(exception.getMessage(), startsWith("alias [alias] has more than one write index ["));
}
public void testSwapWriteOnlyIndex() {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test")
.putAlias(AliasMetadata.builder("alias").writeIndex(true).build())
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata2 = IndexMetadata.builder("test2")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata).put(indexMetadata2)).build();
Boolean unsetValue = randomBoolean() ? null : false;
List<AliasAction> swapActions = Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, unsetValue, null),
new AliasAction.Add("test2", "alias", null, null, null, true, null)
);
Collections.shuffle(swapActions, random());
ClusterState after = service.applyAliasActions(before, swapActions);
assertThat(after.metadata().index("test").getAliases().get("alias").writeIndex(), equalTo(unsetValue));
assertTrue(after.metadata().index("test2").getAliases().get("alias").writeIndex());
assertThat(after.metadata().getIndicesLookup().get("alias").getWriteIndex(),
equalTo(after.metadata().index("test2")));
assertAliasesVersionIncreased("test", before, after);
assertAliasesVersionIncreased("test2", before, after);
}
public void testAddWriteOnlyWithExistingNonWriteIndices() {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test")
.putAlias(AliasMetadata.builder("alias").writeIndex(randomBoolean() ? null : false).build())
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata2 = IndexMetadata.builder("test2")
.putAlias(AliasMetadata.builder("alias").writeIndex(randomBoolean() ? null : false).build())
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata3 = IndexMetadata.builder("test3")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata).put(indexMetadata2).put(indexMetadata3)).build();
assertNull(before.metadata().getIndicesLookup().get("alias").getWriteIndex());
ClusterState after = service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test3", "alias", null, null, null, true, null)));
assertTrue(after.metadata().index("test3").getAliases().get("alias").writeIndex());
assertThat(after.metadata().getIndicesLookup().get("alias").getWriteIndex(),
equalTo(after.metadata().index("test3")));
assertAliasesVersionUnchanged("test", before, after);
assertAliasesVersionUnchanged("test2", before, after);
assertAliasesVersionIncreased("test3", before, after);
}
public void testAddWriteOnlyWithIndexRemoved() {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test")
.putAlias(AliasMetadata.builder("alias").build())
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata2 = IndexMetadata.builder("test2")
.putAlias(AliasMetadata.builder("alias").build())
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata).put(indexMetadata2)).build();
assertNull(before.metadata().index("test").getAliases().get("alias").writeIndex());
assertNull(before.metadata().index("test2").getAliases().get("alias").writeIndex());
assertNull(before.metadata().getIndicesLookup().get("alias").getWriteIndex());
ClusterState after = service.applyAliasActions(before, Collections.singletonList(new AliasAction.RemoveIndex("test")));
assertNull(after.metadata().index("test2").getAliases().get("alias").writeIndex());
assertThat(after.metadata().getIndicesLookup().get("alias").getWriteIndex(),
equalTo(after.metadata().index("test2")));
assertAliasesVersionUnchanged("test2", before, after);
}
public void testAddWriteOnlyValidatesAgainstMetadataBuilder() {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata2 = IndexMetadata.builder("test2")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata).put(indexMetadata2)).build();
Exception exception = expectThrows(IllegalStateException.class, () -> service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, true, null),
new AliasAction.Add("test2", "alias", null, null, null, true, null)
)));
assertThat(exception.getMessage(), startsWith("alias [alias] has more than one write index ["));
}
public void testHiddenPropertyValidation() {
ClusterState originalState = ClusterState.EMPTY_STATE;
originalState = createIndex(originalState, "test1");
originalState = createIndex(originalState, "test2");
{
// Add a non-hidden alias to one index
ClusterState testState = service.applyAliasActions(originalState, Collections.singletonList(
new AliasAction.Add("test1", "alias", null, null, null, null, randomFrom(false, null))
));
// Adding the same alias as hidden to another index should throw
Exception ex = expectThrows(IllegalStateException.class, () -> // Add a non-hidden alias to one index
service.applyAliasActions(testState, Collections.singletonList(
new AliasAction.Add("test2", "alias", null, null, null, null, true)
)));
assertThat(ex.getMessage(), containsString("alias [alias] has is_hidden set to true on indices"));
}
{
// Add a hidden alias to one index
ClusterState testState = service.applyAliasActions(originalState, Collections.singletonList(
new AliasAction.Add("test1", "alias", null, null, null, null, true)
));
// Adding the same alias as non-hidden to another index should throw
Exception ex = expectThrows(IllegalStateException.class, () -> // Add a non-hidden alias to one index
service.applyAliasActions(testState, Collections.singletonList(
new AliasAction.Add("test2", "alias", null, null, null, null, randomFrom(false, null))
)));
assertThat(ex.getMessage(), containsString("alias [alias] has is_hidden set to true on indices"));
}
{
// Add a non-hidden alias to one index
ClusterState testState = service.applyAliasActions(originalState, Collections.singletonList(
new AliasAction.Add("test1", "alias", null, null, null, null, randomFrom(false, null))
));
// Adding the same alias as non-hidden should be OK
service.applyAliasActions(testState, Collections.singletonList(
new AliasAction.Add("test2", "alias", null, null, null, null, randomFrom(false, null))
));
}
{
// Add a hidden alias to one index
ClusterState testState = service.applyAliasActions(originalState, Collections.singletonList(
new AliasAction.Add("test1", "alias", null, null, null, null, true)
));
// Adding the same alias as hidden should be OK
service.applyAliasActions(testState, Collections.singletonList(
new AliasAction.Add("test2", "alias", null, null, null, null, true)
));
}
}
public void testSimultaneousHiddenPropertyValidation() {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder("test")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
IndexMetadata.Builder indexMetadata2 = IndexMetadata.builder("test2")
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1);
ClusterState before = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata).put(indexMetadata2)).build();
{
// These should all be fine
applyHiddenAliasMix(before, null, null);
applyHiddenAliasMix(before, false, false);
applyHiddenAliasMix(before, false, null);
applyHiddenAliasMix(before, null, false);
applyHiddenAliasMix(before, true, true);
}
{
Exception exception = expectThrows(IllegalStateException.class,
() -> applyHiddenAliasMix(before, true, randomFrom(false, null)));
assertThat(exception.getMessage(), startsWith("alias [alias] has is_hidden set to true on indices ["));
}
{
Exception exception = expectThrows(IllegalStateException.class,
() -> applyHiddenAliasMix(before, randomFrom(false, null), true));
assertThat(exception.getMessage(), startsWith("alias [alias] has is_hidden set to true on indices ["));
}
}
public void testAliasesForDataStreamBackingIndicesNotSupported() {
long epochMillis = randomLongBetween(1580536800000L, 1583042400000L);
String dataStreamName = "foo-stream";
String backingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis);
IndexMetadata indexMetadata = IndexMetadata.builder(backingIndexName)
.settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1).build();
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.put(indexMetadata, true)
.put(new DataStream(dataStreamName, createTimestampField("@timestamp"), singletonList(indexMetadata.getIndex()))))
.build();
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> service.applyAliasActions(state,
singletonList(new AliasAction.Add(backingIndexName, "test", null, null, null, null, null))));
assertThat(exception.getMessage(), is("The provided index [" + backingIndexName + "] is a backing index belonging to data " +
"stream [foo-stream]. Data streams and their backing indices don't support alias operations."));
}
private ClusterState applyHiddenAliasMix(ClusterState before, Boolean isHidden1, Boolean isHidden2) {
return service.applyAliasActions(before, Arrays.asList(
new AliasAction.Add("test", "alias", null, null, null, null, isHidden1),
new AliasAction.Add("test2", "alias", null, null, null, null, isHidden2)
));
}
private ClusterState createIndex(ClusterState state, String index) {
IndexMetadata indexMetadata = IndexMetadata.builder(index)
.settings(Settings.builder().put("index.version.created", VersionUtils.randomVersion(random())))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
return ClusterState.builder(state)
.metadata(Metadata.builder(state.metadata()).put(indexMetadata, false))
.build();
}
private void assertAliasesVersionUnchanged(final String index, final ClusterState before, final ClusterState after) {
assertAliasesVersionUnchanged(new String[]{index}, before, after);
}
private void assertAliasesVersionUnchanged(final String[] indices, final ClusterState before, final ClusterState after) {
for (final var index : indices) {
final long expected = before.metadata().index(index).getAliasesVersion();
final long actual = after.metadata().index(index).getAliasesVersion();
assertThat("index metadata aliases version mismatch", actual, equalTo(expected));
}
}
private void assertAliasesVersionIncreased(final String index, final ClusterState before, final ClusterState after) {
assertAliasesVersionIncreased(new String[]{index}, before, after);
}
private void assertAliasesVersionIncreased(final String[] indices, final ClusterState before, final ClusterState after) {
for (final var index : indices) {
final long expected = 1 + before.metadata().index(index).getAliasesVersion();
final long actual = after.metadata().index(index).getAliasesVersion();
assertThat("index metadata aliases version mismatch", actual, equalTo(expected));
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.stream.kafka;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeoutException;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.SystemTime$;
import kafka.utils.TestUtils;
import kafka.utils.ZkUtils;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.ZkConnection;
import org.apache.curator.test.TestingServer;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import scala.Tuple2;
/**
* Kafka Test Broker.
*/
public class TestKafkaBroker {
/** ZooKeeper connection timeout. */
private static final int ZK_CONNECTION_TIMEOUT = 6000;
/** ZooKeeper session timeout. */
private static final int ZK_SESSION_TIMEOUT = 6000;
/** ZooKeeper port. */
private static final int ZK_PORT = 21811;
/** Broker host. */
private static final String BROKER_HOST = "localhost";
/** Broker port. */
private static final int BROKER_PORT = 11092;
/** Kafka config. */
private KafkaConfig kafkaCfg;
/** Kafka server. */
private KafkaServer kafkaSrv;
/** ZooKeeper. */
private TestingServer zkServer;
/** Kafka Zookeeper utils. */
private ZkUtils zkUtils;
/**
* Kafka broker constructor.
*/
public TestKafkaBroker() {
try {
setupZooKeeper();
setupKafkaServer();
}
catch (Exception e) {
throw new RuntimeException("Failed to start Kafka: " + e);
}
}
/**
* Creates a topic.
*
* @param topic Topic name.
* @param partitions Number of partitions for the topic.
* @param replicationFactor Replication factor.
* @throws TimeoutException If operation is timed out.
* @throws InterruptedException If interrupted.
*/
public void createTopic(String topic, int partitions, int replicationFactor)
throws TimeoutException, InterruptedException {
List<KafkaServer> servers = new ArrayList<>();
servers.add(kafkaSrv);
TestUtils.createTopic(zkUtils, topic, partitions, replicationFactor,
scala.collection.JavaConversions.asScalaBuffer(servers), new Properties());
}
/**
* Sends a message to Kafka broker.
*
* @param records List of records.
* @return Producer used to send the message.
*/
public void sendMessages(List<ProducerRecord<String, String>> records) {
Producer<String, String> producer = new KafkaProducer<>(getProducerConfig());
for (ProducerRecord<String, String> rec : records)
producer.send(rec);
producer.flush();
producer.close();
}
/**
* Shuts down test Kafka broker.
*/
public void shutdown() {
if (zkUtils != null)
zkUtils.close();
if (kafkaSrv != null)
kafkaSrv.shutdown();
if (zkServer != null) {
try {
zkServer.stop();
}
catch (IOException ignored) {
// No-op.
}
}
List<String> logDirs = scala.collection.JavaConversions.seqAsJavaList(kafkaCfg.logDirs());
for (String logDir : logDirs)
U.delete(new File(logDir));
}
/**
* Sets up test Kafka broker.
*
* @throws IOException If failed.
*/
private void setupKafkaServer() throws IOException {
kafkaCfg = new KafkaConfig(getKafkaConfig());
kafkaSrv = TestUtils.createServer(kafkaCfg, SystemTime$.MODULE$);
kafkaSrv.startup();
}
/**
* Sets up ZooKeeper test server.
*
* @throws Exception If failed.
*/
private void setupZooKeeper() throws Exception {
zkServer = new TestingServer(ZK_PORT, true);
Tuple2<ZkClient, ZkConnection> zkTuple = ZkUtils.createZkClientAndConnection(zkServer.getConnectString(),
ZK_SESSION_TIMEOUT, ZK_CONNECTION_TIMEOUT);
zkUtils = new ZkUtils(zkTuple._1(), zkTuple._2(), false);
}
/**
* Obtains Kafka config.
*
* @return Kafka config.
* @throws IOException If failed.
*/
private Properties getKafkaConfig() throws IOException {
Properties props = new Properties();
props.put("broker.id", "0");
props.put("zookeeper.connect", zkServer.getConnectString());
props.put("host.name", BROKER_HOST);
props.put("port", BROKER_PORT);
props.put("offsets.topic.replication.factor", "1");
props.put("log.dir", createTmpDir("_cfg").getAbsolutePath());
props.put("log.flush.interval.messages", "1");
props.put("log.flush.interval.ms", "10");
return props;
}
/**
* Obtains broker address.
*
* @return Kafka broker address.
*/
public String getBrokerAddress() {
return BROKER_HOST + ":" + BROKER_PORT;
}
/**
* Obtains Zookeeper address.
*
* @return Zookeeper address.
*/
public String getZookeeperAddress() {
return BROKER_HOST + ":" + ZK_PORT;
}
/**
* Obtains producer config.
*
* @return Kafka Producer config.
*/
private Properties getProducerConfig() {
Properties props = new Properties();
props.put("bootstrap.servers", getBrokerAddress());
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
return props;
}
/**
* Creates temporary directory.
*
* @param prefix Prefix.
* @return Created file.
* @throws IOException If failed.
*/
private static File createTmpDir(String prefix) throws IOException {
Path path = Files.createTempDirectory(prefix);
return path.toFile();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.